2018-01-11 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / builtins.c
bloba0d0a10d38fcdb3e3204f32e887d77bff6ba28a4
1 /* Expand builtin functions.
2 Copyright (C) 1988-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-restrict.h"
47 #include "stor-layout.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "tree-object-size.h"
51 #include "realmpfr.h"
52 #include "cfgrtl.h"
53 #include "except.h"
54 #include "dojump.h"
55 #include "explow.h"
56 #include "stmt.h"
57 #include "expr.h"
58 #include "libfuncs.h"
59 #include "output.h"
60 #include "typeclass.h"
61 #include "langhooks.h"
62 #include "value-prof.h"
63 #include "builtins.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "asan.h"
67 #include "tree-chkp.h"
68 #include "rtl-chkp.h"
69 #include "internal-fn.h"
70 #include "case-cfn-macros.h"
71 #include "gimple-fold.h"
72 #include "intl.h"
74 struct target_builtins default_target_builtins;
75 #if SWITCHABLE_TARGET
76 struct target_builtins *this_target_builtins = &default_target_builtins;
77 #endif
79 /* Define the names of the builtin function types and codes. */
80 const char *const built_in_class_names[BUILT_IN_LAST]
81 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
83 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
84 const char * built_in_names[(int) END_BUILTINS] =
86 #include "builtins.def"
89 /* Setup an array of builtin_info_type, make sure each element decl is
90 initialized to NULL_TREE. */
91 builtin_info_type builtin_info[(int)END_BUILTINS];
93 /* Non-zero if __builtin_constant_p should be folded right away. */
94 bool force_folding_builtin_constant_p;
96 static rtx c_readstr (const char *, scalar_int_mode);
97 static int target_char_cast (tree, char *);
98 static rtx get_memory_rtx (tree, tree);
99 static int apply_args_size (void);
100 static int apply_result_size (void);
101 static rtx result_vector (int, rtx);
102 static void expand_builtin_prefetch (tree);
103 static rtx expand_builtin_apply_args (void);
104 static rtx expand_builtin_apply_args_1 (void);
105 static rtx expand_builtin_apply (rtx, rtx, rtx);
106 static void expand_builtin_return (rtx);
107 static enum type_class type_to_class (tree);
108 static rtx expand_builtin_classify_type (tree);
109 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
110 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
111 static rtx expand_builtin_interclass_mathfn (tree, rtx);
112 static rtx expand_builtin_sincos (tree);
113 static rtx expand_builtin_cexpi (tree, rtx);
114 static rtx expand_builtin_int_roundingfn (tree, rtx);
115 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
116 static rtx expand_builtin_next_arg (void);
117 static rtx expand_builtin_va_start (tree);
118 static rtx expand_builtin_va_end (tree);
119 static rtx expand_builtin_va_copy (tree);
120 static rtx expand_builtin_strcmp (tree, rtx);
121 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
122 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
123 static rtx expand_builtin_memchr (tree, rtx);
124 static rtx expand_builtin_memcpy (tree, rtx);
125 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
126 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
127 rtx target, tree exp, int endp);
128 static rtx expand_builtin_memmove (tree, rtx);
129 static rtx expand_builtin_mempcpy (tree, rtx);
130 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx);
131 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, int);
132 static rtx expand_builtin_strcat (tree, rtx);
133 static rtx expand_builtin_strcpy (tree, rtx);
134 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
135 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
136 static rtx expand_builtin_stpncpy (tree, rtx);
137 static rtx expand_builtin_strncat (tree, rtx);
138 static rtx expand_builtin_strncpy (tree, rtx);
139 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
140 static rtx expand_builtin_memset (tree, rtx, machine_mode);
141 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
142 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
143 static rtx expand_builtin_bzero (tree);
144 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
145 static rtx expand_builtin_alloca (tree);
146 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
147 static rtx expand_builtin_frame_address (tree, tree);
148 static tree stabilize_va_list_loc (location_t, tree, int);
149 static rtx expand_builtin_expect (tree, rtx);
150 static tree fold_builtin_constant_p (tree);
151 static tree fold_builtin_classify_type (tree);
152 static tree fold_builtin_strlen (location_t, tree, tree);
153 static tree fold_builtin_inf (location_t, tree, int);
154 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
155 static bool validate_arg (const_tree, enum tree_code code);
156 static rtx expand_builtin_fabs (tree, rtx, rtx);
157 static rtx expand_builtin_signbit (tree, rtx);
158 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
159 static tree fold_builtin_isascii (location_t, tree);
160 static tree fold_builtin_toascii (location_t, tree);
161 static tree fold_builtin_isdigit (location_t, tree);
162 static tree fold_builtin_fabs (location_t, tree, tree);
163 static tree fold_builtin_abs (location_t, tree, tree);
164 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
165 enum tree_code);
166 static tree fold_builtin_0 (location_t, tree);
167 static tree fold_builtin_1 (location_t, tree, tree);
168 static tree fold_builtin_2 (location_t, tree, tree, tree);
169 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
170 static tree fold_builtin_varargs (location_t, tree, tree*, int);
172 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
173 static tree fold_builtin_strspn (location_t, tree, tree);
174 static tree fold_builtin_strcspn (location_t, tree, tree);
176 static rtx expand_builtin_object_size (tree);
177 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
178 enum built_in_function);
179 static void maybe_emit_chk_warning (tree, enum built_in_function);
180 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
181 static void maybe_emit_free_warning (tree);
182 static tree fold_builtin_object_size (tree, tree);
184 unsigned HOST_WIDE_INT target_newline;
185 unsigned HOST_WIDE_INT target_percent;
186 static unsigned HOST_WIDE_INT target_c;
187 static unsigned HOST_WIDE_INT target_s;
188 char target_percent_c[3];
189 char target_percent_s[3];
190 char target_percent_s_newline[4];
191 static tree do_mpfr_remquo (tree, tree, tree);
192 static tree do_mpfr_lgamma_r (tree, tree, tree);
193 static void expand_builtin_sync_synchronize (void);
195 /* Return true if NAME starts with __builtin_ or __sync_. */
197 static bool
198 is_builtin_name (const char *name)
200 if (strncmp (name, "__builtin_", 10) == 0)
201 return true;
202 if (strncmp (name, "__sync_", 7) == 0)
203 return true;
204 if (strncmp (name, "__atomic_", 9) == 0)
205 return true;
206 return false;
210 /* Return true if DECL is a function symbol representing a built-in. */
212 bool
213 is_builtin_fn (tree decl)
215 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
218 /* Return true if NODE should be considered for inline expansion regardless
219 of the optimization level. This means whenever a function is invoked with
220 its "internal" name, which normally contains the prefix "__builtin". */
222 bool
223 called_as_built_in (tree node)
225 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
226 we want the name used to call the function, not the name it
227 will have. */
228 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
229 return is_builtin_name (name);
232 /* Compute values M and N such that M divides (address of EXP - N) and such
233 that N < M. If these numbers can be determined, store M in alignp and N in
234 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
235 *alignp and any bit-offset to *bitposp.
237 Note that the address (and thus the alignment) computed here is based
238 on the address to which a symbol resolves, whereas DECL_ALIGN is based
239 on the address at which an object is actually located. These two
240 addresses are not always the same. For example, on ARM targets,
241 the address &foo of a Thumb function foo() has the lowest bit set,
242 whereas foo() itself starts on an even address.
244 If ADDR_P is true we are taking the address of the memory reference EXP
245 and thus cannot rely on the access taking place. */
247 static bool
248 get_object_alignment_2 (tree exp, unsigned int *alignp,
249 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
251 poly_int64 bitsize, bitpos;
252 tree offset;
253 machine_mode mode;
254 int unsignedp, reversep, volatilep;
255 unsigned int align = BITS_PER_UNIT;
256 bool known_alignment = false;
258 /* Get the innermost object and the constant (bitpos) and possibly
259 variable (offset) offset of the access. */
260 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
261 &unsignedp, &reversep, &volatilep);
263 /* Extract alignment information from the innermost object and
264 possibly adjust bitpos and offset. */
265 if (TREE_CODE (exp) == FUNCTION_DECL)
267 /* Function addresses can encode extra information besides their
268 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
269 allows the low bit to be used as a virtual bit, we know
270 that the address itself must be at least 2-byte aligned. */
271 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
272 align = 2 * BITS_PER_UNIT;
274 else if (TREE_CODE (exp) == LABEL_DECL)
276 else if (TREE_CODE (exp) == CONST_DECL)
278 /* The alignment of a CONST_DECL is determined by its initializer. */
279 exp = DECL_INITIAL (exp);
280 align = TYPE_ALIGN (TREE_TYPE (exp));
281 if (CONSTANT_CLASS_P (exp))
282 align = targetm.constant_alignment (exp, align);
284 known_alignment = true;
286 else if (DECL_P (exp))
288 align = DECL_ALIGN (exp);
289 known_alignment = true;
291 else if (TREE_CODE (exp) == INDIRECT_REF
292 || TREE_CODE (exp) == MEM_REF
293 || TREE_CODE (exp) == TARGET_MEM_REF)
295 tree addr = TREE_OPERAND (exp, 0);
296 unsigned ptr_align;
297 unsigned HOST_WIDE_INT ptr_bitpos;
298 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
300 /* If the address is explicitely aligned, handle that. */
301 if (TREE_CODE (addr) == BIT_AND_EXPR
302 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
304 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
305 ptr_bitmask *= BITS_PER_UNIT;
306 align = least_bit_hwi (ptr_bitmask);
307 addr = TREE_OPERAND (addr, 0);
310 known_alignment
311 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
312 align = MAX (ptr_align, align);
314 /* Re-apply explicit alignment to the bitpos. */
315 ptr_bitpos &= ptr_bitmask;
317 /* The alignment of the pointer operand in a TARGET_MEM_REF
318 has to take the variable offset parts into account. */
319 if (TREE_CODE (exp) == TARGET_MEM_REF)
321 if (TMR_INDEX (exp))
323 unsigned HOST_WIDE_INT step = 1;
324 if (TMR_STEP (exp))
325 step = TREE_INT_CST_LOW (TMR_STEP (exp));
326 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
328 if (TMR_INDEX2 (exp))
329 align = BITS_PER_UNIT;
330 known_alignment = false;
333 /* When EXP is an actual memory reference then we can use
334 TYPE_ALIGN of a pointer indirection to derive alignment.
335 Do so only if get_pointer_alignment_1 did not reveal absolute
336 alignment knowledge and if using that alignment would
337 improve the situation. */
338 unsigned int talign;
339 if (!addr_p && !known_alignment
340 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
341 && talign > align)
342 align = talign;
343 else
345 /* Else adjust bitpos accordingly. */
346 bitpos += ptr_bitpos;
347 if (TREE_CODE (exp) == MEM_REF
348 || TREE_CODE (exp) == TARGET_MEM_REF)
349 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
352 else if (TREE_CODE (exp) == STRING_CST)
354 /* STRING_CST are the only constant objects we allow to be not
355 wrapped inside a CONST_DECL. */
356 align = TYPE_ALIGN (TREE_TYPE (exp));
357 if (CONSTANT_CLASS_P (exp))
358 align = targetm.constant_alignment (exp, align);
360 known_alignment = true;
363 /* If there is a non-constant offset part extract the maximum
364 alignment that can prevail. */
365 if (offset)
367 unsigned int trailing_zeros = tree_ctz (offset);
368 if (trailing_zeros < HOST_BITS_PER_INT)
370 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
371 if (inner)
372 align = MIN (align, inner);
376 /* Account for the alignment of runtime coefficients, so that the constant
377 bitpos is guaranteed to be accurate. */
378 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
379 if (alt_align != 0 && alt_align < align)
381 align = alt_align;
382 known_alignment = false;
385 *alignp = align;
386 *bitposp = bitpos.coeffs[0] & (align - 1);
387 return known_alignment;
390 /* For a memory reference expression EXP compute values M and N such that M
391 divides (&EXP - N) and such that N < M. If these numbers can be determined,
392 store M in alignp and N in *BITPOSP and return true. Otherwise return false
393 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
395 bool
396 get_object_alignment_1 (tree exp, unsigned int *alignp,
397 unsigned HOST_WIDE_INT *bitposp)
399 return get_object_alignment_2 (exp, alignp, bitposp, false);
402 /* Return the alignment in bits of EXP, an object. */
404 unsigned int
405 get_object_alignment (tree exp)
407 unsigned HOST_WIDE_INT bitpos = 0;
408 unsigned int align;
410 get_object_alignment_1 (exp, &align, &bitpos);
412 /* align and bitpos now specify known low bits of the pointer.
413 ptr & (align - 1) == bitpos. */
415 if (bitpos != 0)
416 align = least_bit_hwi (bitpos);
417 return align;
420 /* For a pointer valued expression EXP compute values M and N such that M
421 divides (EXP - N) and such that N < M. If these numbers can be determined,
422 store M in alignp and N in *BITPOSP and return true. Return false if
423 the results are just a conservative approximation.
425 If EXP is not a pointer, false is returned too. */
427 bool
428 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
429 unsigned HOST_WIDE_INT *bitposp)
431 STRIP_NOPS (exp);
433 if (TREE_CODE (exp) == ADDR_EXPR)
434 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
435 alignp, bitposp, true);
436 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
438 unsigned int align;
439 unsigned HOST_WIDE_INT bitpos;
440 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
441 &align, &bitpos);
442 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
443 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
444 else
446 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
447 if (trailing_zeros < HOST_BITS_PER_INT)
449 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
450 if (inner)
451 align = MIN (align, inner);
454 *alignp = align;
455 *bitposp = bitpos & (align - 1);
456 return res;
458 else if (TREE_CODE (exp) == SSA_NAME
459 && POINTER_TYPE_P (TREE_TYPE (exp)))
461 unsigned int ptr_align, ptr_misalign;
462 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
464 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
466 *bitposp = ptr_misalign * BITS_PER_UNIT;
467 *alignp = ptr_align * BITS_PER_UNIT;
468 /* Make sure to return a sensible alignment when the multiplication
469 by BITS_PER_UNIT overflowed. */
470 if (*alignp == 0)
471 *alignp = 1u << (HOST_BITS_PER_INT - 1);
472 /* We cannot really tell whether this result is an approximation. */
473 return false;
475 else
477 *bitposp = 0;
478 *alignp = BITS_PER_UNIT;
479 return false;
482 else if (TREE_CODE (exp) == INTEGER_CST)
484 *alignp = BIGGEST_ALIGNMENT;
485 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
486 & (BIGGEST_ALIGNMENT - 1));
487 return true;
490 *bitposp = 0;
491 *alignp = BITS_PER_UNIT;
492 return false;
495 /* Return the alignment in bits of EXP, a pointer valued expression.
496 The alignment returned is, by default, the alignment of the thing that
497 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
499 Otherwise, look at the expression to see if we can do better, i.e., if the
500 expression is actually pointing at an object whose alignment is tighter. */
502 unsigned int
503 get_pointer_alignment (tree exp)
505 unsigned HOST_WIDE_INT bitpos = 0;
506 unsigned int align;
508 get_pointer_alignment_1 (exp, &align, &bitpos);
510 /* align and bitpos now specify known low bits of the pointer.
511 ptr & (align - 1) == bitpos. */
513 if (bitpos != 0)
514 align = least_bit_hwi (bitpos);
516 return align;
519 /* Return the number of non-zero elements in the sequence
520 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
521 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
523 static unsigned
524 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
526 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
528 unsigned n;
530 if (eltsize == 1)
532 /* Optimize the common case of plain char. */
533 for (n = 0; n < maxelts; n++)
535 const char *elt = (const char*) ptr + n;
536 if (!*elt)
537 break;
540 else
542 for (n = 0; n < maxelts; n++)
544 const char *elt = (const char*) ptr + n * eltsize;
545 if (!memcmp (elt, "\0\0\0\0", eltsize))
546 break;
549 return n;
552 /* Compute the length of a null-terminated character string or wide
553 character string handling character sizes of 1, 2, and 4 bytes.
554 TREE_STRING_LENGTH is not the right way because it evaluates to
555 the size of the character array in bytes (as opposed to characters)
556 and because it can contain a zero byte in the middle.
558 ONLY_VALUE should be nonzero if the result is not going to be emitted
559 into the instruction stream and zero if it is going to be expanded.
560 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
561 is returned, otherwise NULL, since
562 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
563 evaluate the side-effects.
565 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
566 accesses. Note that this implies the result is not going to be emitted
567 into the instruction stream.
569 The value returned is of type `ssizetype'.
571 Unfortunately, string_constant can't access the values of const char
572 arrays with initializers, so neither can we do so here. */
574 tree
575 c_strlen (tree src, int only_value)
577 STRIP_NOPS (src);
578 if (TREE_CODE (src) == COND_EXPR
579 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
581 tree len1, len2;
583 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
584 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
585 if (tree_int_cst_equal (len1, len2))
586 return len1;
589 if (TREE_CODE (src) == COMPOUND_EXPR
590 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
591 return c_strlen (TREE_OPERAND (src, 1), only_value);
593 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
595 /* Offset from the beginning of the string in bytes. */
596 tree byteoff;
597 src = string_constant (src, &byteoff);
598 if (src == 0)
599 return NULL_TREE;
601 /* Determine the size of the string element. */
602 unsigned eltsize
603 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src))));
605 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
606 length of SRC. */
607 unsigned maxelts = TREE_STRING_LENGTH (src) / eltsize - 1;
609 /* PTR can point to the byte representation of any string type, including
610 char* and wchar_t*. */
611 const char *ptr = TREE_STRING_POINTER (src);
613 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
615 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
616 compute the offset to the following null if we don't know where to
617 start searching for it. */
618 if (string_length (ptr, eltsize, maxelts) < maxelts)
620 /* Return when an embedded null character is found. */
621 return NULL_TREE;
624 if (!maxelts)
625 return ssize_int (0);
627 /* We don't know the starting offset, but we do know that the string
628 has no internal zero bytes. We can assume that the offset falls
629 within the bounds of the string; otherwise, the programmer deserves
630 what he gets. Subtract the offset from the length of the string,
631 and return that. This would perhaps not be valid if we were dealing
632 with named arrays in addition to literal string constants. */
634 return size_diffop_loc (loc, size_int (maxelts * eltsize), byteoff);
637 /* Offset from the beginning of the string in elements. */
638 HOST_WIDE_INT eltoff;
640 /* We have a known offset into the string. Start searching there for
641 a null character if we can represent it as a single HOST_WIDE_INT. */
642 if (byteoff == 0)
643 eltoff = 0;
644 else if (! tree_fits_shwi_p (byteoff))
645 eltoff = -1;
646 else
647 eltoff = tree_to_shwi (byteoff) / eltsize;
649 /* If the offset is known to be out of bounds, warn, and call strlen at
650 runtime. */
651 if (eltoff < 0 || eltoff > maxelts)
653 /* Suppress multiple warnings for propagated constant strings. */
654 if (only_value != 2
655 && !TREE_NO_WARNING (src))
657 warning_at (loc, OPT_Warray_bounds,
658 "offset %qwi outside bounds of constant string",
659 eltoff);
660 TREE_NO_WARNING (src) = 1;
662 return NULL_TREE;
665 /* Use strlen to search for the first zero byte. Since any strings
666 constructed with build_string will have nulls appended, we win even
667 if we get handed something like (char[4])"abcd".
669 Since ELTOFF is our starting index into the string, no further
670 calculation is needed. */
671 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
672 maxelts - eltoff);
674 return ssize_int (len);
677 /* Return a constant integer corresponding to target reading
678 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
680 static rtx
681 c_readstr (const char *str, scalar_int_mode mode)
683 HOST_WIDE_INT ch;
684 unsigned int i, j;
685 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
687 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
688 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
689 / HOST_BITS_PER_WIDE_INT;
691 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
692 for (i = 0; i < len; i++)
693 tmp[i] = 0;
695 ch = 1;
696 for (i = 0; i < GET_MODE_SIZE (mode); i++)
698 j = i;
699 if (WORDS_BIG_ENDIAN)
700 j = GET_MODE_SIZE (mode) - i - 1;
701 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
702 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
703 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
704 j *= BITS_PER_UNIT;
706 if (ch)
707 ch = (unsigned char) str[i];
708 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
711 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
712 return immed_wide_int_const (c, mode);
715 /* Cast a target constant CST to target CHAR and if that value fits into
716 host char type, return zero and put that value into variable pointed to by
717 P. */
719 static int
720 target_char_cast (tree cst, char *p)
722 unsigned HOST_WIDE_INT val, hostval;
724 if (TREE_CODE (cst) != INTEGER_CST
725 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
726 return 1;
728 /* Do not care if it fits or not right here. */
729 val = TREE_INT_CST_LOW (cst);
731 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
732 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
734 hostval = val;
735 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
736 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
738 if (val != hostval)
739 return 1;
741 *p = hostval;
742 return 0;
745 /* Similar to save_expr, but assumes that arbitrary code is not executed
746 in between the multiple evaluations. In particular, we assume that a
747 non-addressable local variable will not be modified. */
749 static tree
750 builtin_save_expr (tree exp)
752 if (TREE_CODE (exp) == SSA_NAME
753 || (TREE_ADDRESSABLE (exp) == 0
754 && (TREE_CODE (exp) == PARM_DECL
755 || (VAR_P (exp) && !TREE_STATIC (exp)))))
756 return exp;
758 return save_expr (exp);
761 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
762 times to get the address of either a higher stack frame, or a return
763 address located within it (depending on FNDECL_CODE). */
765 static rtx
766 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
768 int i;
769 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
770 if (tem == NULL_RTX)
772 /* For a zero count with __builtin_return_address, we don't care what
773 frame address we return, because target-specific definitions will
774 override us. Therefore frame pointer elimination is OK, and using
775 the soft frame pointer is OK.
777 For a nonzero count, or a zero count with __builtin_frame_address,
778 we require a stable offset from the current frame pointer to the
779 previous one, so we must use the hard frame pointer, and
780 we must disable frame pointer elimination. */
781 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
782 tem = frame_pointer_rtx;
783 else
785 tem = hard_frame_pointer_rtx;
787 /* Tell reload not to eliminate the frame pointer. */
788 crtl->accesses_prior_frames = 1;
792 if (count > 0)
793 SETUP_FRAME_ADDRESSES ();
795 /* On the SPARC, the return address is not in the frame, it is in a
796 register. There is no way to access it off of the current frame
797 pointer, but it can be accessed off the previous frame pointer by
798 reading the value from the register window save area. */
799 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
800 count--;
802 /* Scan back COUNT frames to the specified frame. */
803 for (i = 0; i < count; i++)
805 /* Assume the dynamic chain pointer is in the word that the
806 frame address points to, unless otherwise specified. */
807 tem = DYNAMIC_CHAIN_ADDRESS (tem);
808 tem = memory_address (Pmode, tem);
809 tem = gen_frame_mem (Pmode, tem);
810 tem = copy_to_reg (tem);
813 /* For __builtin_frame_address, return what we've got. But, on
814 the SPARC for example, we may have to add a bias. */
815 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
816 return FRAME_ADDR_RTX (tem);
818 /* For __builtin_return_address, get the return address from that frame. */
819 #ifdef RETURN_ADDR_RTX
820 tem = RETURN_ADDR_RTX (count, tem);
821 #else
822 tem = memory_address (Pmode,
823 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
824 tem = gen_frame_mem (Pmode, tem);
825 #endif
826 return tem;
829 /* Alias set used for setjmp buffer. */
830 static alias_set_type setjmp_alias_set = -1;
832 /* Construct the leading half of a __builtin_setjmp call. Control will
833 return to RECEIVER_LABEL. This is also called directly by the SJLJ
834 exception handling code. */
836 void
837 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
839 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
840 rtx stack_save;
841 rtx mem;
843 if (setjmp_alias_set == -1)
844 setjmp_alias_set = new_alias_set ();
846 buf_addr = convert_memory_address (Pmode, buf_addr);
848 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
850 /* We store the frame pointer and the address of receiver_label in
851 the buffer and use the rest of it for the stack save area, which
852 is machine-dependent. */
854 mem = gen_rtx_MEM (Pmode, buf_addr);
855 set_mem_alias_set (mem, setjmp_alias_set);
856 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
858 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
859 GET_MODE_SIZE (Pmode))),
860 set_mem_alias_set (mem, setjmp_alias_set);
862 emit_move_insn (validize_mem (mem),
863 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
865 stack_save = gen_rtx_MEM (sa_mode,
866 plus_constant (Pmode, buf_addr,
867 2 * GET_MODE_SIZE (Pmode)));
868 set_mem_alias_set (stack_save, setjmp_alias_set);
869 emit_stack_save (SAVE_NONLOCAL, &stack_save);
871 /* If there is further processing to do, do it. */
872 if (targetm.have_builtin_setjmp_setup ())
873 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
875 /* We have a nonlocal label. */
876 cfun->has_nonlocal_label = 1;
879 /* Construct the trailing part of a __builtin_setjmp call. This is
880 also called directly by the SJLJ exception handling code.
881 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
883 void
884 expand_builtin_setjmp_receiver (rtx receiver_label)
886 rtx chain;
888 /* Mark the FP as used when we get here, so we have to make sure it's
889 marked as used by this function. */
890 emit_use (hard_frame_pointer_rtx);
892 /* Mark the static chain as clobbered here so life information
893 doesn't get messed up for it. */
894 chain = rtx_for_static_chain (current_function_decl, true);
895 if (chain && REG_P (chain))
896 emit_clobber (chain);
898 /* Now put in the code to restore the frame pointer, and argument
899 pointer, if needed. */
900 if (! targetm.have_nonlocal_goto ())
902 /* First adjust our frame pointer to its actual value. It was
903 previously set to the start of the virtual area corresponding to
904 the stacked variables when we branched here and now needs to be
905 adjusted to the actual hardware fp value.
907 Assignments to virtual registers are converted by
908 instantiate_virtual_regs into the corresponding assignment
909 to the underlying register (fp in this case) that makes
910 the original assignment true.
911 So the following insn will actually be decrementing fp by
912 TARGET_STARTING_FRAME_OFFSET. */
913 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
915 /* Restoring the frame pointer also modifies the hard frame pointer.
916 Mark it used (so that the previous assignment remains live once
917 the frame pointer is eliminated) and clobbered (to represent the
918 implicit update from the assignment). */
919 emit_use (hard_frame_pointer_rtx);
920 emit_clobber (hard_frame_pointer_rtx);
923 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
925 /* If the argument pointer can be eliminated in favor of the
926 frame pointer, we don't need to restore it. We assume here
927 that if such an elimination is present, it can always be used.
928 This is the case on all known machines; if we don't make this
929 assumption, we do unnecessary saving on many machines. */
930 size_t i;
931 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
933 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
934 if (elim_regs[i].from == ARG_POINTER_REGNUM
935 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
936 break;
938 if (i == ARRAY_SIZE (elim_regs))
940 /* Now restore our arg pointer from the address at which it
941 was saved in our stack frame. */
942 emit_move_insn (crtl->args.internal_arg_pointer,
943 copy_to_reg (get_arg_pointer_save_area ()));
947 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
948 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
949 else if (targetm.have_nonlocal_goto_receiver ())
950 emit_insn (targetm.gen_nonlocal_goto_receiver ());
951 else
952 { /* Nothing */ }
954 /* We must not allow the code we just generated to be reordered by
955 scheduling. Specifically, the update of the frame pointer must
956 happen immediately, not later. */
957 emit_insn (gen_blockage ());
960 /* __builtin_longjmp is passed a pointer to an array of five words (not
961 all will be used on all machines). It operates similarly to the C
962 library function of the same name, but is more efficient. Much of
963 the code below is copied from the handling of non-local gotos. */
965 static void
966 expand_builtin_longjmp (rtx buf_addr, rtx value)
968 rtx fp, lab, stack;
969 rtx_insn *insn, *last;
970 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
972 /* DRAP is needed for stack realign if longjmp is expanded to current
973 function */
974 if (SUPPORTS_STACK_ALIGNMENT)
975 crtl->need_drap = true;
977 if (setjmp_alias_set == -1)
978 setjmp_alias_set = new_alias_set ();
980 buf_addr = convert_memory_address (Pmode, buf_addr);
982 buf_addr = force_reg (Pmode, buf_addr);
984 /* We require that the user must pass a second argument of 1, because
985 that is what builtin_setjmp will return. */
986 gcc_assert (value == const1_rtx);
988 last = get_last_insn ();
989 if (targetm.have_builtin_longjmp ())
990 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
991 else
993 fp = gen_rtx_MEM (Pmode, buf_addr);
994 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
995 GET_MODE_SIZE (Pmode)));
997 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
998 2 * GET_MODE_SIZE (Pmode)));
999 set_mem_alias_set (fp, setjmp_alias_set);
1000 set_mem_alias_set (lab, setjmp_alias_set);
1001 set_mem_alias_set (stack, setjmp_alias_set);
1003 /* Pick up FP, label, and SP from the block and jump. This code is
1004 from expand_goto in stmt.c; see there for detailed comments. */
1005 if (targetm.have_nonlocal_goto ())
1006 /* We have to pass a value to the nonlocal_goto pattern that will
1007 get copied into the static_chain pointer, but it does not matter
1008 what that value is, because builtin_setjmp does not use it. */
1009 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1010 else
1012 lab = copy_to_reg (lab);
1014 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1015 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1017 emit_move_insn (hard_frame_pointer_rtx, fp);
1018 emit_stack_restore (SAVE_NONLOCAL, stack);
1020 emit_use (hard_frame_pointer_rtx);
1021 emit_use (stack_pointer_rtx);
1022 emit_indirect_jump (lab);
1026 /* Search backwards and mark the jump insn as a non-local goto.
1027 Note that this precludes the use of __builtin_longjmp to a
1028 __builtin_setjmp target in the same function. However, we've
1029 already cautioned the user that these functions are for
1030 internal exception handling use only. */
1031 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1033 gcc_assert (insn != last);
1035 if (JUMP_P (insn))
1037 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1038 break;
1040 else if (CALL_P (insn))
1041 break;
1045 static inline bool
1046 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1048 return (iter->i < iter->n);
1051 /* This function validates the types of a function call argument list
1052 against a specified list of tree_codes. If the last specifier is a 0,
1053 that represents an ellipsis, otherwise the last specifier must be a
1054 VOID_TYPE. */
1056 static bool
1057 validate_arglist (const_tree callexpr, ...)
1059 enum tree_code code;
1060 bool res = 0;
1061 va_list ap;
1062 const_call_expr_arg_iterator iter;
1063 const_tree arg;
1065 va_start (ap, callexpr);
1066 init_const_call_expr_arg_iterator (callexpr, &iter);
1068 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1069 tree fn = CALL_EXPR_FN (callexpr);
1070 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1072 for (unsigned argno = 1; ; ++argno)
1074 code = (enum tree_code) va_arg (ap, int);
1076 switch (code)
1078 case 0:
1079 /* This signifies an ellipses, any further arguments are all ok. */
1080 res = true;
1081 goto end;
1082 case VOID_TYPE:
1083 /* This signifies an endlink, if no arguments remain, return
1084 true, otherwise return false. */
1085 res = !more_const_call_expr_args_p (&iter);
1086 goto end;
1087 case POINTER_TYPE:
1088 /* The actual argument must be nonnull when either the whole
1089 called function has been declared nonnull, or when the formal
1090 argument corresponding to the actual argument has been. */
1091 if (argmap
1092 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1094 arg = next_const_call_expr_arg (&iter);
1095 if (!validate_arg (arg, code) || integer_zerop (arg))
1096 goto end;
1097 break;
1099 /* FALLTHRU */
1100 default:
1101 /* If no parameters remain or the parameter's code does not
1102 match the specified code, return false. Otherwise continue
1103 checking any remaining arguments. */
1104 arg = next_const_call_expr_arg (&iter);
1105 if (!validate_arg (arg, code))
1106 goto end;
1107 break;
1111 /* We need gotos here since we can only have one VA_CLOSE in a
1112 function. */
1113 end: ;
1114 va_end (ap);
1116 BITMAP_FREE (argmap);
1118 return res;
1121 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1122 and the address of the save area. */
1124 static rtx
1125 expand_builtin_nonlocal_goto (tree exp)
1127 tree t_label, t_save_area;
1128 rtx r_label, r_save_area, r_fp, r_sp;
1129 rtx_insn *insn;
1131 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1132 return NULL_RTX;
1134 t_label = CALL_EXPR_ARG (exp, 0);
1135 t_save_area = CALL_EXPR_ARG (exp, 1);
1137 r_label = expand_normal (t_label);
1138 r_label = convert_memory_address (Pmode, r_label);
1139 r_save_area = expand_normal (t_save_area);
1140 r_save_area = convert_memory_address (Pmode, r_save_area);
1141 /* Copy the address of the save location to a register just in case it was
1142 based on the frame pointer. */
1143 r_save_area = copy_to_reg (r_save_area);
1144 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1145 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1146 plus_constant (Pmode, r_save_area,
1147 GET_MODE_SIZE (Pmode)));
1149 crtl->has_nonlocal_goto = 1;
1151 /* ??? We no longer need to pass the static chain value, afaik. */
1152 if (targetm.have_nonlocal_goto ())
1153 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1154 else
1156 r_label = copy_to_reg (r_label);
1158 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1159 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1161 /* Restore frame pointer for containing function. */
1162 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1163 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1165 /* USE of hard_frame_pointer_rtx added for consistency;
1166 not clear if really needed. */
1167 emit_use (hard_frame_pointer_rtx);
1168 emit_use (stack_pointer_rtx);
1170 /* If the architecture is using a GP register, we must
1171 conservatively assume that the target function makes use of it.
1172 The prologue of functions with nonlocal gotos must therefore
1173 initialize the GP register to the appropriate value, and we
1174 must then make sure that this value is live at the point
1175 of the jump. (Note that this doesn't necessarily apply
1176 to targets with a nonlocal_goto pattern; they are free
1177 to implement it in their own way. Note also that this is
1178 a no-op if the GP register is a global invariant.) */
1179 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1180 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1181 emit_use (pic_offset_table_rtx);
1183 emit_indirect_jump (r_label);
1186 /* Search backwards to the jump insn and mark it as a
1187 non-local goto. */
1188 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1190 if (JUMP_P (insn))
1192 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1193 break;
1195 else if (CALL_P (insn))
1196 break;
1199 return const0_rtx;
1202 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1203 (not all will be used on all machines) that was passed to __builtin_setjmp.
1204 It updates the stack pointer in that block to the current value. This is
1205 also called directly by the SJLJ exception handling code. */
1207 void
1208 expand_builtin_update_setjmp_buf (rtx buf_addr)
1210 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1211 buf_addr = convert_memory_address (Pmode, buf_addr);
1212 rtx stack_save
1213 = gen_rtx_MEM (sa_mode,
1214 memory_address
1215 (sa_mode,
1216 plus_constant (Pmode, buf_addr,
1217 2 * GET_MODE_SIZE (Pmode))));
1219 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1222 /* Expand a call to __builtin_prefetch. For a target that does not support
1223 data prefetch, evaluate the memory address argument in case it has side
1224 effects. */
1226 static void
1227 expand_builtin_prefetch (tree exp)
1229 tree arg0, arg1, arg2;
1230 int nargs;
1231 rtx op0, op1, op2;
1233 if (!validate_arglist (exp, POINTER_TYPE, 0))
1234 return;
1236 arg0 = CALL_EXPR_ARG (exp, 0);
1238 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1239 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1240 locality). */
1241 nargs = call_expr_nargs (exp);
1242 if (nargs > 1)
1243 arg1 = CALL_EXPR_ARG (exp, 1);
1244 else
1245 arg1 = integer_zero_node;
1246 if (nargs > 2)
1247 arg2 = CALL_EXPR_ARG (exp, 2);
1248 else
1249 arg2 = integer_three_node;
1251 /* Argument 0 is an address. */
1252 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1254 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1255 if (TREE_CODE (arg1) != INTEGER_CST)
1257 error ("second argument to %<__builtin_prefetch%> must be a constant");
1258 arg1 = integer_zero_node;
1260 op1 = expand_normal (arg1);
1261 /* Argument 1 must be either zero or one. */
1262 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1264 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1265 " using zero");
1266 op1 = const0_rtx;
1269 /* Argument 2 (locality) must be a compile-time constant int. */
1270 if (TREE_CODE (arg2) != INTEGER_CST)
1272 error ("third argument to %<__builtin_prefetch%> must be a constant");
1273 arg2 = integer_zero_node;
1275 op2 = expand_normal (arg2);
1276 /* Argument 2 must be 0, 1, 2, or 3. */
1277 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1279 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1280 op2 = const0_rtx;
1283 if (targetm.have_prefetch ())
1285 struct expand_operand ops[3];
1287 create_address_operand (&ops[0], op0);
1288 create_integer_operand (&ops[1], INTVAL (op1));
1289 create_integer_operand (&ops[2], INTVAL (op2));
1290 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1291 return;
1294 /* Don't do anything with direct references to volatile memory, but
1295 generate code to handle other side effects. */
1296 if (!MEM_P (op0) && side_effects_p (op0))
1297 emit_insn (op0);
1300 /* Get a MEM rtx for expression EXP which is the address of an operand
1301 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1302 the maximum length of the block of memory that might be accessed or
1303 NULL if unknown. */
1305 static rtx
1306 get_memory_rtx (tree exp, tree len)
1308 tree orig_exp = exp;
1309 rtx addr, mem;
1311 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1312 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1313 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1314 exp = TREE_OPERAND (exp, 0);
1316 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1317 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1319 /* Get an expression we can use to find the attributes to assign to MEM.
1320 First remove any nops. */
1321 while (CONVERT_EXPR_P (exp)
1322 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1323 exp = TREE_OPERAND (exp, 0);
1325 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1326 (as builtin stringops may alias with anything). */
1327 exp = fold_build2 (MEM_REF,
1328 build_array_type (char_type_node,
1329 build_range_type (sizetype,
1330 size_one_node, len)),
1331 exp, build_int_cst (ptr_type_node, 0));
1333 /* If the MEM_REF has no acceptable address, try to get the base object
1334 from the original address we got, and build an all-aliasing
1335 unknown-sized access to that one. */
1336 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1337 set_mem_attributes (mem, exp, 0);
1338 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1339 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1340 0))))
1342 exp = build_fold_addr_expr (exp);
1343 exp = fold_build2 (MEM_REF,
1344 build_array_type (char_type_node,
1345 build_range_type (sizetype,
1346 size_zero_node,
1347 NULL)),
1348 exp, build_int_cst (ptr_type_node, 0));
1349 set_mem_attributes (mem, exp, 0);
1351 set_mem_alias_set (mem, 0);
1352 return mem;
1355 /* Built-in functions to perform an untyped call and return. */
1357 #define apply_args_mode \
1358 (this_target_builtins->x_apply_args_mode)
1359 #define apply_result_mode \
1360 (this_target_builtins->x_apply_result_mode)
1362 /* Return the size required for the block returned by __builtin_apply_args,
1363 and initialize apply_args_mode. */
1365 static int
1366 apply_args_size (void)
1368 static int size = -1;
1369 int align;
1370 unsigned int regno;
1372 /* The values computed by this function never change. */
1373 if (size < 0)
1375 /* The first value is the incoming arg-pointer. */
1376 size = GET_MODE_SIZE (Pmode);
1378 /* The second value is the structure value address unless this is
1379 passed as an "invisible" first argument. */
1380 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1381 size += GET_MODE_SIZE (Pmode);
1383 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1384 if (FUNCTION_ARG_REGNO_P (regno))
1386 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1388 gcc_assert (mode != VOIDmode);
1390 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1391 if (size % align != 0)
1392 size = CEIL (size, align) * align;
1393 size += GET_MODE_SIZE (mode);
1394 apply_args_mode[regno] = mode;
1396 else
1398 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1401 return size;
1404 /* Return the size required for the block returned by __builtin_apply,
1405 and initialize apply_result_mode. */
1407 static int
1408 apply_result_size (void)
1410 static int size = -1;
1411 int align, regno;
1413 /* The values computed by this function never change. */
1414 if (size < 0)
1416 size = 0;
1418 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1419 if (targetm.calls.function_value_regno_p (regno))
1421 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1423 gcc_assert (mode != VOIDmode);
1425 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1426 if (size % align != 0)
1427 size = CEIL (size, align) * align;
1428 size += GET_MODE_SIZE (mode);
1429 apply_result_mode[regno] = mode;
1431 else
1432 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1434 /* Allow targets that use untyped_call and untyped_return to override
1435 the size so that machine-specific information can be stored here. */
1436 #ifdef APPLY_RESULT_SIZE
1437 size = APPLY_RESULT_SIZE;
1438 #endif
1440 return size;
1443 /* Create a vector describing the result block RESULT. If SAVEP is true,
1444 the result block is used to save the values; otherwise it is used to
1445 restore the values. */
1447 static rtx
1448 result_vector (int savep, rtx result)
1450 int regno, size, align, nelts;
1451 fixed_size_mode mode;
1452 rtx reg, mem;
1453 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1455 size = nelts = 0;
1456 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1457 if ((mode = apply_result_mode[regno]) != VOIDmode)
1459 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1460 if (size % align != 0)
1461 size = CEIL (size, align) * align;
1462 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1463 mem = adjust_address (result, mode, size);
1464 savevec[nelts++] = (savep
1465 ? gen_rtx_SET (mem, reg)
1466 : gen_rtx_SET (reg, mem));
1467 size += GET_MODE_SIZE (mode);
1469 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1472 /* Save the state required to perform an untyped call with the same
1473 arguments as were passed to the current function. */
1475 static rtx
1476 expand_builtin_apply_args_1 (void)
1478 rtx registers, tem;
1479 int size, align, regno;
1480 fixed_size_mode mode;
1481 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1483 /* Create a block where the arg-pointer, structure value address,
1484 and argument registers can be saved. */
1485 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1487 /* Walk past the arg-pointer and structure value address. */
1488 size = GET_MODE_SIZE (Pmode);
1489 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1490 size += GET_MODE_SIZE (Pmode);
1492 /* Save each register used in calling a function to the block. */
1493 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1494 if ((mode = apply_args_mode[regno]) != VOIDmode)
1496 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1497 if (size % align != 0)
1498 size = CEIL (size, align) * align;
1500 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1502 emit_move_insn (adjust_address (registers, mode, size), tem);
1503 size += GET_MODE_SIZE (mode);
1506 /* Save the arg pointer to the block. */
1507 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1508 /* We need the pointer as the caller actually passed them to us, not
1509 as we might have pretended they were passed. Make sure it's a valid
1510 operand, as emit_move_insn isn't expected to handle a PLUS. */
1511 if (STACK_GROWS_DOWNWARD)
1513 = force_operand (plus_constant (Pmode, tem,
1514 crtl->args.pretend_args_size),
1515 NULL_RTX);
1516 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1518 size = GET_MODE_SIZE (Pmode);
1520 /* Save the structure value address unless this is passed as an
1521 "invisible" first argument. */
1522 if (struct_incoming_value)
1524 emit_move_insn (adjust_address (registers, Pmode, size),
1525 copy_to_reg (struct_incoming_value));
1526 size += GET_MODE_SIZE (Pmode);
1529 /* Return the address of the block. */
1530 return copy_addr_to_reg (XEXP (registers, 0));
1533 /* __builtin_apply_args returns block of memory allocated on
1534 the stack into which is stored the arg pointer, structure
1535 value address, static chain, and all the registers that might
1536 possibly be used in performing a function call. The code is
1537 moved to the start of the function so the incoming values are
1538 saved. */
1540 static rtx
1541 expand_builtin_apply_args (void)
1543 /* Don't do __builtin_apply_args more than once in a function.
1544 Save the result of the first call and reuse it. */
1545 if (apply_args_value != 0)
1546 return apply_args_value;
1548 /* When this function is called, it means that registers must be
1549 saved on entry to this function. So we migrate the
1550 call to the first insn of this function. */
1551 rtx temp;
1553 start_sequence ();
1554 temp = expand_builtin_apply_args_1 ();
1555 rtx_insn *seq = get_insns ();
1556 end_sequence ();
1558 apply_args_value = temp;
1560 /* Put the insns after the NOTE that starts the function.
1561 If this is inside a start_sequence, make the outer-level insn
1562 chain current, so the code is placed at the start of the
1563 function. If internal_arg_pointer is a non-virtual pseudo,
1564 it needs to be placed after the function that initializes
1565 that pseudo. */
1566 push_topmost_sequence ();
1567 if (REG_P (crtl->args.internal_arg_pointer)
1568 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1569 emit_insn_before (seq, parm_birth_insn);
1570 else
1571 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1572 pop_topmost_sequence ();
1573 return temp;
1577 /* Perform an untyped call and save the state required to perform an
1578 untyped return of whatever value was returned by the given function. */
1580 static rtx
1581 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1583 int size, align, regno;
1584 fixed_size_mode mode;
1585 rtx incoming_args, result, reg, dest, src;
1586 rtx_call_insn *call_insn;
1587 rtx old_stack_level = 0;
1588 rtx call_fusage = 0;
1589 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1591 arguments = convert_memory_address (Pmode, arguments);
1593 /* Create a block where the return registers can be saved. */
1594 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1596 /* Fetch the arg pointer from the ARGUMENTS block. */
1597 incoming_args = gen_reg_rtx (Pmode);
1598 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1599 if (!STACK_GROWS_DOWNWARD)
1600 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1601 incoming_args, 0, OPTAB_LIB_WIDEN);
1603 /* Push a new argument block and copy the arguments. Do not allow
1604 the (potential) memcpy call below to interfere with our stack
1605 manipulations. */
1606 do_pending_stack_adjust ();
1607 NO_DEFER_POP;
1609 /* Save the stack with nonlocal if available. */
1610 if (targetm.have_save_stack_nonlocal ())
1611 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1612 else
1613 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1615 /* Allocate a block of memory onto the stack and copy the memory
1616 arguments to the outgoing arguments address. We can pass TRUE
1617 as the 4th argument because we just saved the stack pointer
1618 and will restore it right after the call. */
1619 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1621 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1622 may have already set current_function_calls_alloca to true.
1623 current_function_calls_alloca won't be set if argsize is zero,
1624 so we have to guarantee need_drap is true here. */
1625 if (SUPPORTS_STACK_ALIGNMENT)
1626 crtl->need_drap = true;
1628 dest = virtual_outgoing_args_rtx;
1629 if (!STACK_GROWS_DOWNWARD)
1631 if (CONST_INT_P (argsize))
1632 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1633 else
1634 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1636 dest = gen_rtx_MEM (BLKmode, dest);
1637 set_mem_align (dest, PARM_BOUNDARY);
1638 src = gen_rtx_MEM (BLKmode, incoming_args);
1639 set_mem_align (src, PARM_BOUNDARY);
1640 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1642 /* Refer to the argument block. */
1643 apply_args_size ();
1644 arguments = gen_rtx_MEM (BLKmode, arguments);
1645 set_mem_align (arguments, PARM_BOUNDARY);
1647 /* Walk past the arg-pointer and structure value address. */
1648 size = GET_MODE_SIZE (Pmode);
1649 if (struct_value)
1650 size += GET_MODE_SIZE (Pmode);
1652 /* Restore each of the registers previously saved. Make USE insns
1653 for each of these registers for use in making the call. */
1654 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1655 if ((mode = apply_args_mode[regno]) != VOIDmode)
1657 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1658 if (size % align != 0)
1659 size = CEIL (size, align) * align;
1660 reg = gen_rtx_REG (mode, regno);
1661 emit_move_insn (reg, adjust_address (arguments, mode, size));
1662 use_reg (&call_fusage, reg);
1663 size += GET_MODE_SIZE (mode);
1666 /* Restore the structure value address unless this is passed as an
1667 "invisible" first argument. */
1668 size = GET_MODE_SIZE (Pmode);
1669 if (struct_value)
1671 rtx value = gen_reg_rtx (Pmode);
1672 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1673 emit_move_insn (struct_value, value);
1674 if (REG_P (struct_value))
1675 use_reg (&call_fusage, struct_value);
1676 size += GET_MODE_SIZE (Pmode);
1679 /* All arguments and registers used for the call are set up by now! */
1680 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1682 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1683 and we don't want to load it into a register as an optimization,
1684 because prepare_call_address already did it if it should be done. */
1685 if (GET_CODE (function) != SYMBOL_REF)
1686 function = memory_address (FUNCTION_MODE, function);
1688 /* Generate the actual call instruction and save the return value. */
1689 if (targetm.have_untyped_call ())
1691 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1692 emit_call_insn (targetm.gen_untyped_call (mem, result,
1693 result_vector (1, result)));
1695 else if (targetm.have_call_value ())
1697 rtx valreg = 0;
1699 /* Locate the unique return register. It is not possible to
1700 express a call that sets more than one return register using
1701 call_value; use untyped_call for that. In fact, untyped_call
1702 only needs to save the return registers in the given block. */
1703 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1704 if ((mode = apply_result_mode[regno]) != VOIDmode)
1706 gcc_assert (!valreg); /* have_untyped_call required. */
1708 valreg = gen_rtx_REG (mode, regno);
1711 emit_insn (targetm.gen_call_value (valreg,
1712 gen_rtx_MEM (FUNCTION_MODE, function),
1713 const0_rtx, NULL_RTX, const0_rtx));
1715 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1717 else
1718 gcc_unreachable ();
1720 /* Find the CALL insn we just emitted, and attach the register usage
1721 information. */
1722 call_insn = last_call_insn ();
1723 add_function_usage_to (call_insn, call_fusage);
1725 /* Restore the stack. */
1726 if (targetm.have_save_stack_nonlocal ())
1727 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1728 else
1729 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1730 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1732 OK_DEFER_POP;
1734 /* Return the address of the result block. */
1735 result = copy_addr_to_reg (XEXP (result, 0));
1736 return convert_memory_address (ptr_mode, result);
1739 /* Perform an untyped return. */
1741 static void
1742 expand_builtin_return (rtx result)
1744 int size, align, regno;
1745 fixed_size_mode mode;
1746 rtx reg;
1747 rtx_insn *call_fusage = 0;
1749 result = convert_memory_address (Pmode, result);
1751 apply_result_size ();
1752 result = gen_rtx_MEM (BLKmode, result);
1754 if (targetm.have_untyped_return ())
1756 rtx vector = result_vector (0, result);
1757 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1758 emit_barrier ();
1759 return;
1762 /* Restore the return value and note that each value is used. */
1763 size = 0;
1764 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1765 if ((mode = apply_result_mode[regno]) != VOIDmode)
1767 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1768 if (size % align != 0)
1769 size = CEIL (size, align) * align;
1770 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1771 emit_move_insn (reg, adjust_address (result, mode, size));
1773 push_to_sequence (call_fusage);
1774 emit_use (reg);
1775 call_fusage = get_insns ();
1776 end_sequence ();
1777 size += GET_MODE_SIZE (mode);
1780 /* Put the USE insns before the return. */
1781 emit_insn (call_fusage);
1783 /* Return whatever values was restored by jumping directly to the end
1784 of the function. */
1785 expand_naked_return ();
1788 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1790 static enum type_class
1791 type_to_class (tree type)
1793 switch (TREE_CODE (type))
1795 case VOID_TYPE: return void_type_class;
1796 case INTEGER_TYPE: return integer_type_class;
1797 case ENUMERAL_TYPE: return enumeral_type_class;
1798 case BOOLEAN_TYPE: return boolean_type_class;
1799 case POINTER_TYPE: return pointer_type_class;
1800 case REFERENCE_TYPE: return reference_type_class;
1801 case OFFSET_TYPE: return offset_type_class;
1802 case REAL_TYPE: return real_type_class;
1803 case COMPLEX_TYPE: return complex_type_class;
1804 case FUNCTION_TYPE: return function_type_class;
1805 case METHOD_TYPE: return method_type_class;
1806 case RECORD_TYPE: return record_type_class;
1807 case UNION_TYPE:
1808 case QUAL_UNION_TYPE: return union_type_class;
1809 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1810 ? string_type_class : array_type_class);
1811 case LANG_TYPE: return lang_type_class;
1812 default: return no_type_class;
1816 /* Expand a call EXP to __builtin_classify_type. */
1818 static rtx
1819 expand_builtin_classify_type (tree exp)
1821 if (call_expr_nargs (exp))
1822 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1823 return GEN_INT (no_type_class);
1826 /* This helper macro, meant to be used in mathfn_built_in below, determines
1827 which among a set of builtin math functions is appropriate for a given type
1828 mode. The `F' (float) and `L' (long double) are automatically generated
1829 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1830 types, there are additional types that are considered with 'F32', 'F64',
1831 'F128', etc. suffixes. */
1832 #define CASE_MATHFN(MATHFN) \
1833 CASE_CFN_##MATHFN: \
1834 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1835 fcodel = BUILT_IN_##MATHFN##L ; break;
1836 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1837 types. */
1838 #define CASE_MATHFN_FLOATN(MATHFN) \
1839 CASE_CFN_##MATHFN: \
1840 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1841 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1842 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1843 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1844 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1845 break;
1846 /* Similar to above, but appends _R after any F/L suffix. */
1847 #define CASE_MATHFN_REENT(MATHFN) \
1848 case CFN_BUILT_IN_##MATHFN##_R: \
1849 case CFN_BUILT_IN_##MATHFN##F_R: \
1850 case CFN_BUILT_IN_##MATHFN##L_R: \
1851 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1852 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1854 /* Return a function equivalent to FN but operating on floating-point
1855 values of type TYPE, or END_BUILTINS if no such function exists.
1856 This is purely an operation on function codes; it does not guarantee
1857 that the target actually has an implementation of the function. */
1859 static built_in_function
1860 mathfn_built_in_2 (tree type, combined_fn fn)
1862 tree mtype;
1863 built_in_function fcode, fcodef, fcodel;
1864 built_in_function fcodef16 = END_BUILTINS;
1865 built_in_function fcodef32 = END_BUILTINS;
1866 built_in_function fcodef64 = END_BUILTINS;
1867 built_in_function fcodef128 = END_BUILTINS;
1868 built_in_function fcodef32x = END_BUILTINS;
1869 built_in_function fcodef64x = END_BUILTINS;
1870 built_in_function fcodef128x = END_BUILTINS;
1872 switch (fn)
1874 CASE_MATHFN (ACOS)
1875 CASE_MATHFN (ACOSH)
1876 CASE_MATHFN (ASIN)
1877 CASE_MATHFN (ASINH)
1878 CASE_MATHFN (ATAN)
1879 CASE_MATHFN (ATAN2)
1880 CASE_MATHFN (ATANH)
1881 CASE_MATHFN (CBRT)
1882 CASE_MATHFN_FLOATN (CEIL)
1883 CASE_MATHFN (CEXPI)
1884 CASE_MATHFN_FLOATN (COPYSIGN)
1885 CASE_MATHFN (COS)
1886 CASE_MATHFN (COSH)
1887 CASE_MATHFN (DREM)
1888 CASE_MATHFN (ERF)
1889 CASE_MATHFN (ERFC)
1890 CASE_MATHFN (EXP)
1891 CASE_MATHFN (EXP10)
1892 CASE_MATHFN (EXP2)
1893 CASE_MATHFN (EXPM1)
1894 CASE_MATHFN (FABS)
1895 CASE_MATHFN (FDIM)
1896 CASE_MATHFN_FLOATN (FLOOR)
1897 CASE_MATHFN_FLOATN (FMA)
1898 CASE_MATHFN_FLOATN (FMAX)
1899 CASE_MATHFN_FLOATN (FMIN)
1900 CASE_MATHFN (FMOD)
1901 CASE_MATHFN (FREXP)
1902 CASE_MATHFN (GAMMA)
1903 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1904 CASE_MATHFN (HUGE_VAL)
1905 CASE_MATHFN (HYPOT)
1906 CASE_MATHFN (ILOGB)
1907 CASE_MATHFN (ICEIL)
1908 CASE_MATHFN (IFLOOR)
1909 CASE_MATHFN (INF)
1910 CASE_MATHFN (IRINT)
1911 CASE_MATHFN (IROUND)
1912 CASE_MATHFN (ISINF)
1913 CASE_MATHFN (J0)
1914 CASE_MATHFN (J1)
1915 CASE_MATHFN (JN)
1916 CASE_MATHFN (LCEIL)
1917 CASE_MATHFN (LDEXP)
1918 CASE_MATHFN (LFLOOR)
1919 CASE_MATHFN (LGAMMA)
1920 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1921 CASE_MATHFN (LLCEIL)
1922 CASE_MATHFN (LLFLOOR)
1923 CASE_MATHFN (LLRINT)
1924 CASE_MATHFN (LLROUND)
1925 CASE_MATHFN (LOG)
1926 CASE_MATHFN (LOG10)
1927 CASE_MATHFN (LOG1P)
1928 CASE_MATHFN (LOG2)
1929 CASE_MATHFN (LOGB)
1930 CASE_MATHFN (LRINT)
1931 CASE_MATHFN (LROUND)
1932 CASE_MATHFN (MODF)
1933 CASE_MATHFN (NAN)
1934 CASE_MATHFN (NANS)
1935 CASE_MATHFN_FLOATN (NEARBYINT)
1936 CASE_MATHFN (NEXTAFTER)
1937 CASE_MATHFN (NEXTTOWARD)
1938 CASE_MATHFN (POW)
1939 CASE_MATHFN (POWI)
1940 CASE_MATHFN (POW10)
1941 CASE_MATHFN (REMAINDER)
1942 CASE_MATHFN (REMQUO)
1943 CASE_MATHFN_FLOATN (RINT)
1944 CASE_MATHFN_FLOATN (ROUND)
1945 CASE_MATHFN (SCALB)
1946 CASE_MATHFN (SCALBLN)
1947 CASE_MATHFN (SCALBN)
1948 CASE_MATHFN (SIGNBIT)
1949 CASE_MATHFN (SIGNIFICAND)
1950 CASE_MATHFN (SIN)
1951 CASE_MATHFN (SINCOS)
1952 CASE_MATHFN (SINH)
1953 CASE_MATHFN_FLOATN (SQRT)
1954 CASE_MATHFN (TAN)
1955 CASE_MATHFN (TANH)
1956 CASE_MATHFN (TGAMMA)
1957 CASE_MATHFN_FLOATN (TRUNC)
1958 CASE_MATHFN (Y0)
1959 CASE_MATHFN (Y1)
1960 CASE_MATHFN (YN)
1962 default:
1963 return END_BUILTINS;
1966 mtype = TYPE_MAIN_VARIANT (type);
1967 if (mtype == double_type_node)
1968 return fcode;
1969 else if (mtype == float_type_node)
1970 return fcodef;
1971 else if (mtype == long_double_type_node)
1972 return fcodel;
1973 else if (mtype == float16_type_node)
1974 return fcodef16;
1975 else if (mtype == float32_type_node)
1976 return fcodef32;
1977 else if (mtype == float64_type_node)
1978 return fcodef64;
1979 else if (mtype == float128_type_node)
1980 return fcodef128;
1981 else if (mtype == float32x_type_node)
1982 return fcodef32x;
1983 else if (mtype == float64x_type_node)
1984 return fcodef64x;
1985 else if (mtype == float128x_type_node)
1986 return fcodef128x;
1987 else
1988 return END_BUILTINS;
1991 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1992 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1993 otherwise use the explicit declaration. If we can't do the conversion,
1994 return null. */
1996 static tree
1997 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
1999 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2000 if (fcode2 == END_BUILTINS)
2001 return NULL_TREE;
2003 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2004 return NULL_TREE;
2006 return builtin_decl_explicit (fcode2);
2009 /* Like mathfn_built_in_1, but always use the implicit array. */
2011 tree
2012 mathfn_built_in (tree type, combined_fn fn)
2014 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2017 /* Like mathfn_built_in_1, but take a built_in_function and
2018 always use the implicit array. */
2020 tree
2021 mathfn_built_in (tree type, enum built_in_function fn)
2023 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2026 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2027 return its code, otherwise return IFN_LAST. Note that this function
2028 only tests whether the function is defined in internals.def, not whether
2029 it is actually available on the target. */
2031 internal_fn
2032 associated_internal_fn (tree fndecl)
2034 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2035 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2036 switch (DECL_FUNCTION_CODE (fndecl))
2038 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2039 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2040 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2041 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2042 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2043 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2044 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2045 #include "internal-fn.def"
2047 CASE_FLT_FN (BUILT_IN_POW10):
2048 return IFN_EXP10;
2050 CASE_FLT_FN (BUILT_IN_DREM):
2051 return IFN_REMAINDER;
2053 CASE_FLT_FN (BUILT_IN_SCALBN):
2054 CASE_FLT_FN (BUILT_IN_SCALBLN):
2055 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2056 return IFN_LDEXP;
2057 return IFN_LAST;
2059 default:
2060 return IFN_LAST;
2064 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2065 on the current target by a call to an internal function, return the
2066 code of that internal function, otherwise return IFN_LAST. The caller
2067 is responsible for ensuring that any side-effects of the built-in
2068 call are dealt with correctly. E.g. if CALL sets errno, the caller
2069 must decide that the errno result isn't needed or make it available
2070 in some other way. */
2072 internal_fn
2073 replacement_internal_fn (gcall *call)
2075 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2077 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2078 if (ifn != IFN_LAST)
2080 tree_pair types = direct_internal_fn_types (ifn, call);
2081 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2082 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2083 return ifn;
2086 return IFN_LAST;
2089 /* Expand a call to the builtin trinary math functions (fma).
2090 Return NULL_RTX if a normal call should be emitted rather than expanding the
2091 function in-line. EXP is the expression that is a call to the builtin
2092 function; if convenient, the result should be placed in TARGET.
2093 SUBTARGET may be used as the target for computing one of EXP's
2094 operands. */
2096 static rtx
2097 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2099 optab builtin_optab;
2100 rtx op0, op1, op2, result;
2101 rtx_insn *insns;
2102 tree fndecl = get_callee_fndecl (exp);
2103 tree arg0, arg1, arg2;
2104 machine_mode mode;
2106 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2107 return NULL_RTX;
2109 arg0 = CALL_EXPR_ARG (exp, 0);
2110 arg1 = CALL_EXPR_ARG (exp, 1);
2111 arg2 = CALL_EXPR_ARG (exp, 2);
2113 switch (DECL_FUNCTION_CODE (fndecl))
2115 CASE_FLT_FN (BUILT_IN_FMA):
2116 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2117 builtin_optab = fma_optab; break;
2118 default:
2119 gcc_unreachable ();
2122 /* Make a suitable register to place result in. */
2123 mode = TYPE_MODE (TREE_TYPE (exp));
2125 /* Before working hard, check whether the instruction is available. */
2126 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2127 return NULL_RTX;
2129 result = gen_reg_rtx (mode);
2131 /* Always stabilize the argument list. */
2132 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2133 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2134 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2136 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2137 op1 = expand_normal (arg1);
2138 op2 = expand_normal (arg2);
2140 start_sequence ();
2142 /* Compute into RESULT.
2143 Set RESULT to wherever the result comes back. */
2144 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2145 result, 0);
2147 /* If we were unable to expand via the builtin, stop the sequence
2148 (without outputting the insns) and call to the library function
2149 with the stabilized argument list. */
2150 if (result == 0)
2152 end_sequence ();
2153 return expand_call (exp, target, target == const0_rtx);
2156 /* Output the entire sequence. */
2157 insns = get_insns ();
2158 end_sequence ();
2159 emit_insn (insns);
2161 return result;
2164 /* Expand a call to the builtin sin and cos math functions.
2165 Return NULL_RTX if a normal call should be emitted rather than expanding the
2166 function in-line. EXP is the expression that is a call to the builtin
2167 function; if convenient, the result should be placed in TARGET.
2168 SUBTARGET may be used as the target for computing one of EXP's
2169 operands. */
2171 static rtx
2172 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2174 optab builtin_optab;
2175 rtx op0;
2176 rtx_insn *insns;
2177 tree fndecl = get_callee_fndecl (exp);
2178 machine_mode mode;
2179 tree arg;
2181 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2182 return NULL_RTX;
2184 arg = CALL_EXPR_ARG (exp, 0);
2186 switch (DECL_FUNCTION_CODE (fndecl))
2188 CASE_FLT_FN (BUILT_IN_SIN):
2189 CASE_FLT_FN (BUILT_IN_COS):
2190 builtin_optab = sincos_optab; break;
2191 default:
2192 gcc_unreachable ();
2195 /* Make a suitable register to place result in. */
2196 mode = TYPE_MODE (TREE_TYPE (exp));
2198 /* Check if sincos insn is available, otherwise fallback
2199 to sin or cos insn. */
2200 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2201 switch (DECL_FUNCTION_CODE (fndecl))
2203 CASE_FLT_FN (BUILT_IN_SIN):
2204 builtin_optab = sin_optab; break;
2205 CASE_FLT_FN (BUILT_IN_COS):
2206 builtin_optab = cos_optab; break;
2207 default:
2208 gcc_unreachable ();
2211 /* Before working hard, check whether the instruction is available. */
2212 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2214 rtx result = gen_reg_rtx (mode);
2216 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2217 need to expand the argument again. This way, we will not perform
2218 side-effects more the once. */
2219 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2221 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2223 start_sequence ();
2225 /* Compute into RESULT.
2226 Set RESULT to wherever the result comes back. */
2227 if (builtin_optab == sincos_optab)
2229 int ok;
2231 switch (DECL_FUNCTION_CODE (fndecl))
2233 CASE_FLT_FN (BUILT_IN_SIN):
2234 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2235 break;
2236 CASE_FLT_FN (BUILT_IN_COS):
2237 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2238 break;
2239 default:
2240 gcc_unreachable ();
2242 gcc_assert (ok);
2244 else
2245 result = expand_unop (mode, builtin_optab, op0, result, 0);
2247 if (result != 0)
2249 /* Output the entire sequence. */
2250 insns = get_insns ();
2251 end_sequence ();
2252 emit_insn (insns);
2253 return result;
2256 /* If we were unable to expand via the builtin, stop the sequence
2257 (without outputting the insns) and call to the library function
2258 with the stabilized argument list. */
2259 end_sequence ();
2262 return expand_call (exp, target, target == const0_rtx);
2265 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2266 return an RTL instruction code that implements the functionality.
2267 If that isn't possible or available return CODE_FOR_nothing. */
2269 static enum insn_code
2270 interclass_mathfn_icode (tree arg, tree fndecl)
2272 bool errno_set = false;
2273 optab builtin_optab = unknown_optab;
2274 machine_mode mode;
2276 switch (DECL_FUNCTION_CODE (fndecl))
2278 CASE_FLT_FN (BUILT_IN_ILOGB):
2279 errno_set = true; builtin_optab = ilogb_optab; break;
2280 CASE_FLT_FN (BUILT_IN_ISINF):
2281 builtin_optab = isinf_optab; break;
2282 case BUILT_IN_ISNORMAL:
2283 case BUILT_IN_ISFINITE:
2284 CASE_FLT_FN (BUILT_IN_FINITE):
2285 case BUILT_IN_FINITED32:
2286 case BUILT_IN_FINITED64:
2287 case BUILT_IN_FINITED128:
2288 case BUILT_IN_ISINFD32:
2289 case BUILT_IN_ISINFD64:
2290 case BUILT_IN_ISINFD128:
2291 /* These builtins have no optabs (yet). */
2292 break;
2293 default:
2294 gcc_unreachable ();
2297 /* There's no easy way to detect the case we need to set EDOM. */
2298 if (flag_errno_math && errno_set)
2299 return CODE_FOR_nothing;
2301 /* Optab mode depends on the mode of the input argument. */
2302 mode = TYPE_MODE (TREE_TYPE (arg));
2304 if (builtin_optab)
2305 return optab_handler (builtin_optab, mode);
2306 return CODE_FOR_nothing;
2309 /* Expand a call to one of the builtin math functions that operate on
2310 floating point argument and output an integer result (ilogb, isinf,
2311 isnan, etc).
2312 Return 0 if a normal call should be emitted rather than expanding the
2313 function in-line. EXP is the expression that is a call to the builtin
2314 function; if convenient, the result should be placed in TARGET. */
2316 static rtx
2317 expand_builtin_interclass_mathfn (tree exp, rtx target)
2319 enum insn_code icode = CODE_FOR_nothing;
2320 rtx op0;
2321 tree fndecl = get_callee_fndecl (exp);
2322 machine_mode mode;
2323 tree arg;
2325 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2326 return NULL_RTX;
2328 arg = CALL_EXPR_ARG (exp, 0);
2329 icode = interclass_mathfn_icode (arg, fndecl);
2330 mode = TYPE_MODE (TREE_TYPE (arg));
2332 if (icode != CODE_FOR_nothing)
2334 struct expand_operand ops[1];
2335 rtx_insn *last = get_last_insn ();
2336 tree orig_arg = arg;
2338 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2339 need to expand the argument again. This way, we will not perform
2340 side-effects more the once. */
2341 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2343 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2345 if (mode != GET_MODE (op0))
2346 op0 = convert_to_mode (mode, op0, 0);
2348 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2349 if (maybe_legitimize_operands (icode, 0, 1, ops)
2350 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2351 return ops[0].value;
2353 delete_insns_since (last);
2354 CALL_EXPR_ARG (exp, 0) = orig_arg;
2357 return NULL_RTX;
2360 /* Expand a call to the builtin sincos math function.
2361 Return NULL_RTX if a normal call should be emitted rather than expanding the
2362 function in-line. EXP is the expression that is a call to the builtin
2363 function. */
2365 static rtx
2366 expand_builtin_sincos (tree exp)
2368 rtx op0, op1, op2, target1, target2;
2369 machine_mode mode;
2370 tree arg, sinp, cosp;
2371 int result;
2372 location_t loc = EXPR_LOCATION (exp);
2373 tree alias_type, alias_off;
2375 if (!validate_arglist (exp, REAL_TYPE,
2376 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2377 return NULL_RTX;
2379 arg = CALL_EXPR_ARG (exp, 0);
2380 sinp = CALL_EXPR_ARG (exp, 1);
2381 cosp = CALL_EXPR_ARG (exp, 2);
2383 /* Make a suitable register to place result in. */
2384 mode = TYPE_MODE (TREE_TYPE (arg));
2386 /* Check if sincos insn is available, otherwise emit the call. */
2387 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2388 return NULL_RTX;
2390 target1 = gen_reg_rtx (mode);
2391 target2 = gen_reg_rtx (mode);
2393 op0 = expand_normal (arg);
2394 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2395 alias_off = build_int_cst (alias_type, 0);
2396 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2397 sinp, alias_off));
2398 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2399 cosp, alias_off));
2401 /* Compute into target1 and target2.
2402 Set TARGET to wherever the result comes back. */
2403 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2404 gcc_assert (result);
2406 /* Move target1 and target2 to the memory locations indicated
2407 by op1 and op2. */
2408 emit_move_insn (op1, target1);
2409 emit_move_insn (op2, target2);
2411 return const0_rtx;
2414 /* Expand a call to the internal cexpi builtin to the sincos math function.
2415 EXP is the expression that is a call to the builtin function; if convenient,
2416 the result should be placed in TARGET. */
2418 static rtx
2419 expand_builtin_cexpi (tree exp, rtx target)
2421 tree fndecl = get_callee_fndecl (exp);
2422 tree arg, type;
2423 machine_mode mode;
2424 rtx op0, op1, op2;
2425 location_t loc = EXPR_LOCATION (exp);
2427 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2428 return NULL_RTX;
2430 arg = CALL_EXPR_ARG (exp, 0);
2431 type = TREE_TYPE (arg);
2432 mode = TYPE_MODE (TREE_TYPE (arg));
2434 /* Try expanding via a sincos optab, fall back to emitting a libcall
2435 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2436 is only generated from sincos, cexp or if we have either of them. */
2437 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2439 op1 = gen_reg_rtx (mode);
2440 op2 = gen_reg_rtx (mode);
2442 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2444 /* Compute into op1 and op2. */
2445 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2447 else if (targetm.libc_has_function (function_sincos))
2449 tree call, fn = NULL_TREE;
2450 tree top1, top2;
2451 rtx op1a, op2a;
2453 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2454 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2455 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2456 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2457 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2458 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2459 else
2460 gcc_unreachable ();
2462 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2463 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2464 op1a = copy_addr_to_reg (XEXP (op1, 0));
2465 op2a = copy_addr_to_reg (XEXP (op2, 0));
2466 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2467 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2469 /* Make sure not to fold the sincos call again. */
2470 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2471 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2472 call, 3, arg, top1, top2));
2474 else
2476 tree call, fn = NULL_TREE, narg;
2477 tree ctype = build_complex_type (type);
2479 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2480 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2481 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2482 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2483 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2484 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2485 else
2486 gcc_unreachable ();
2488 /* If we don't have a decl for cexp create one. This is the
2489 friendliest fallback if the user calls __builtin_cexpi
2490 without full target C99 function support. */
2491 if (fn == NULL_TREE)
2493 tree fntype;
2494 const char *name = NULL;
2496 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2497 name = "cexpf";
2498 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2499 name = "cexp";
2500 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2501 name = "cexpl";
2503 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2504 fn = build_fn_decl (name, fntype);
2507 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2508 build_real (type, dconst0), arg);
2510 /* Make sure not to fold the cexp call again. */
2511 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2512 return expand_expr (build_call_nary (ctype, call, 1, narg),
2513 target, VOIDmode, EXPAND_NORMAL);
2516 /* Now build the proper return type. */
2517 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2518 make_tree (TREE_TYPE (arg), op2),
2519 make_tree (TREE_TYPE (arg), op1)),
2520 target, VOIDmode, EXPAND_NORMAL);
2523 /* Conveniently construct a function call expression. FNDECL names the
2524 function to be called, N is the number of arguments, and the "..."
2525 parameters are the argument expressions. Unlike build_call_exr
2526 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2528 static tree
2529 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2531 va_list ap;
2532 tree fntype = TREE_TYPE (fndecl);
2533 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2535 va_start (ap, n);
2536 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2537 va_end (ap);
2538 SET_EXPR_LOCATION (fn, loc);
2539 return fn;
2542 /* Expand a call to one of the builtin rounding functions gcc defines
2543 as an extension (lfloor and lceil). As these are gcc extensions we
2544 do not need to worry about setting errno to EDOM.
2545 If expanding via optab fails, lower expression to (int)(floor(x)).
2546 EXP is the expression that is a call to the builtin function;
2547 if convenient, the result should be placed in TARGET. */
2549 static rtx
2550 expand_builtin_int_roundingfn (tree exp, rtx target)
2552 convert_optab builtin_optab;
2553 rtx op0, tmp;
2554 rtx_insn *insns;
2555 tree fndecl = get_callee_fndecl (exp);
2556 enum built_in_function fallback_fn;
2557 tree fallback_fndecl;
2558 machine_mode mode;
2559 tree arg;
2561 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2562 gcc_unreachable ();
2564 arg = CALL_EXPR_ARG (exp, 0);
2566 switch (DECL_FUNCTION_CODE (fndecl))
2568 CASE_FLT_FN (BUILT_IN_ICEIL):
2569 CASE_FLT_FN (BUILT_IN_LCEIL):
2570 CASE_FLT_FN (BUILT_IN_LLCEIL):
2571 builtin_optab = lceil_optab;
2572 fallback_fn = BUILT_IN_CEIL;
2573 break;
2575 CASE_FLT_FN (BUILT_IN_IFLOOR):
2576 CASE_FLT_FN (BUILT_IN_LFLOOR):
2577 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2578 builtin_optab = lfloor_optab;
2579 fallback_fn = BUILT_IN_FLOOR;
2580 break;
2582 default:
2583 gcc_unreachable ();
2586 /* Make a suitable register to place result in. */
2587 mode = TYPE_MODE (TREE_TYPE (exp));
2589 target = gen_reg_rtx (mode);
2591 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2592 need to expand the argument again. This way, we will not perform
2593 side-effects more the once. */
2594 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2596 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2598 start_sequence ();
2600 /* Compute into TARGET. */
2601 if (expand_sfix_optab (target, op0, builtin_optab))
2603 /* Output the entire sequence. */
2604 insns = get_insns ();
2605 end_sequence ();
2606 emit_insn (insns);
2607 return target;
2610 /* If we were unable to expand via the builtin, stop the sequence
2611 (without outputting the insns). */
2612 end_sequence ();
2614 /* Fall back to floating point rounding optab. */
2615 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2617 /* For non-C99 targets we may end up without a fallback fndecl here
2618 if the user called __builtin_lfloor directly. In this case emit
2619 a call to the floor/ceil variants nevertheless. This should result
2620 in the best user experience for not full C99 targets. */
2621 if (fallback_fndecl == NULL_TREE)
2623 tree fntype;
2624 const char *name = NULL;
2626 switch (DECL_FUNCTION_CODE (fndecl))
2628 case BUILT_IN_ICEIL:
2629 case BUILT_IN_LCEIL:
2630 case BUILT_IN_LLCEIL:
2631 name = "ceil";
2632 break;
2633 case BUILT_IN_ICEILF:
2634 case BUILT_IN_LCEILF:
2635 case BUILT_IN_LLCEILF:
2636 name = "ceilf";
2637 break;
2638 case BUILT_IN_ICEILL:
2639 case BUILT_IN_LCEILL:
2640 case BUILT_IN_LLCEILL:
2641 name = "ceill";
2642 break;
2643 case BUILT_IN_IFLOOR:
2644 case BUILT_IN_LFLOOR:
2645 case BUILT_IN_LLFLOOR:
2646 name = "floor";
2647 break;
2648 case BUILT_IN_IFLOORF:
2649 case BUILT_IN_LFLOORF:
2650 case BUILT_IN_LLFLOORF:
2651 name = "floorf";
2652 break;
2653 case BUILT_IN_IFLOORL:
2654 case BUILT_IN_LFLOORL:
2655 case BUILT_IN_LLFLOORL:
2656 name = "floorl";
2657 break;
2658 default:
2659 gcc_unreachable ();
2662 fntype = build_function_type_list (TREE_TYPE (arg),
2663 TREE_TYPE (arg), NULL_TREE);
2664 fallback_fndecl = build_fn_decl (name, fntype);
2667 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2669 tmp = expand_normal (exp);
2670 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2672 /* Truncate the result of floating point optab to integer
2673 via expand_fix (). */
2674 target = gen_reg_rtx (mode);
2675 expand_fix (target, tmp, 0);
2677 return target;
2680 /* Expand a call to one of the builtin math functions doing integer
2681 conversion (lrint).
2682 Return 0 if a normal call should be emitted rather than expanding the
2683 function in-line. EXP is the expression that is a call to the builtin
2684 function; if convenient, the result should be placed in TARGET. */
2686 static rtx
2687 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2689 convert_optab builtin_optab;
2690 rtx op0;
2691 rtx_insn *insns;
2692 tree fndecl = get_callee_fndecl (exp);
2693 tree arg;
2694 machine_mode mode;
2695 enum built_in_function fallback_fn = BUILT_IN_NONE;
2697 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2698 gcc_unreachable ();
2700 arg = CALL_EXPR_ARG (exp, 0);
2702 switch (DECL_FUNCTION_CODE (fndecl))
2704 CASE_FLT_FN (BUILT_IN_IRINT):
2705 fallback_fn = BUILT_IN_LRINT;
2706 gcc_fallthrough ();
2707 CASE_FLT_FN (BUILT_IN_LRINT):
2708 CASE_FLT_FN (BUILT_IN_LLRINT):
2709 builtin_optab = lrint_optab;
2710 break;
2712 CASE_FLT_FN (BUILT_IN_IROUND):
2713 fallback_fn = BUILT_IN_LROUND;
2714 gcc_fallthrough ();
2715 CASE_FLT_FN (BUILT_IN_LROUND):
2716 CASE_FLT_FN (BUILT_IN_LLROUND):
2717 builtin_optab = lround_optab;
2718 break;
2720 default:
2721 gcc_unreachable ();
2724 /* There's no easy way to detect the case we need to set EDOM. */
2725 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2726 return NULL_RTX;
2728 /* Make a suitable register to place result in. */
2729 mode = TYPE_MODE (TREE_TYPE (exp));
2731 /* There's no easy way to detect the case we need to set EDOM. */
2732 if (!flag_errno_math)
2734 rtx result = gen_reg_rtx (mode);
2736 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2737 need to expand the argument again. This way, we will not perform
2738 side-effects more the once. */
2739 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2741 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2743 start_sequence ();
2745 if (expand_sfix_optab (result, op0, builtin_optab))
2747 /* Output the entire sequence. */
2748 insns = get_insns ();
2749 end_sequence ();
2750 emit_insn (insns);
2751 return result;
2754 /* If we were unable to expand via the builtin, stop the sequence
2755 (without outputting the insns) and call to the library function
2756 with the stabilized argument list. */
2757 end_sequence ();
2760 if (fallback_fn != BUILT_IN_NONE)
2762 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2763 targets, (int) round (x) should never be transformed into
2764 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2765 a call to lround in the hope that the target provides at least some
2766 C99 functions. This should result in the best user experience for
2767 not full C99 targets. */
2768 tree fallback_fndecl = mathfn_built_in_1
2769 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2771 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2772 fallback_fndecl, 1, arg);
2774 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2775 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2776 return convert_to_mode (mode, target, 0);
2779 return expand_call (exp, target, target == const0_rtx);
2782 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2783 a normal call should be emitted rather than expanding the function
2784 in-line. EXP is the expression that is a call to the builtin
2785 function; if convenient, the result should be placed in TARGET. */
2787 static rtx
2788 expand_builtin_powi (tree exp, rtx target)
2790 tree arg0, arg1;
2791 rtx op0, op1;
2792 machine_mode mode;
2793 machine_mode mode2;
2795 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2796 return NULL_RTX;
2798 arg0 = CALL_EXPR_ARG (exp, 0);
2799 arg1 = CALL_EXPR_ARG (exp, 1);
2800 mode = TYPE_MODE (TREE_TYPE (exp));
2802 /* Emit a libcall to libgcc. */
2804 /* Mode of the 2nd argument must match that of an int. */
2805 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2807 if (target == NULL_RTX)
2808 target = gen_reg_rtx (mode);
2810 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2811 if (GET_MODE (op0) != mode)
2812 op0 = convert_to_mode (mode, op0, 0);
2813 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2814 if (GET_MODE (op1) != mode2)
2815 op1 = convert_to_mode (mode2, op1, 0);
2817 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2818 target, LCT_CONST, mode,
2819 op0, mode, op1, mode2);
2821 return target;
2824 /* Expand expression EXP which is a call to the strlen builtin. Return
2825 NULL_RTX if we failed the caller should emit a normal call, otherwise
2826 try to get the result in TARGET, if convenient. */
2828 static rtx
2829 expand_builtin_strlen (tree exp, rtx target,
2830 machine_mode target_mode)
2832 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2833 return NULL_RTX;
2835 struct expand_operand ops[4];
2836 rtx pat;
2837 tree len;
2838 tree src = CALL_EXPR_ARG (exp, 0);
2839 rtx src_reg;
2840 rtx_insn *before_strlen;
2841 machine_mode insn_mode;
2842 enum insn_code icode = CODE_FOR_nothing;
2843 unsigned int align;
2845 /* If the length can be computed at compile-time, return it. */
2846 len = c_strlen (src, 0);
2847 if (len)
2848 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2850 /* If the length can be computed at compile-time and is constant
2851 integer, but there are side-effects in src, evaluate
2852 src for side-effects, then return len.
2853 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2854 can be optimized into: i++; x = 3; */
2855 len = c_strlen (src, 1);
2856 if (len && TREE_CODE (len) == INTEGER_CST)
2858 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2859 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2862 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2864 /* If SRC is not a pointer type, don't do this operation inline. */
2865 if (align == 0)
2866 return NULL_RTX;
2868 /* Bail out if we can't compute strlen in the right mode. */
2869 FOR_EACH_MODE_FROM (insn_mode, target_mode)
2871 icode = optab_handler (strlen_optab, insn_mode);
2872 if (icode != CODE_FOR_nothing)
2873 break;
2875 if (insn_mode == VOIDmode)
2876 return NULL_RTX;
2878 /* Make a place to hold the source address. We will not expand
2879 the actual source until we are sure that the expansion will
2880 not fail -- there are trees that cannot be expanded twice. */
2881 src_reg = gen_reg_rtx (Pmode);
2883 /* Mark the beginning of the strlen sequence so we can emit the
2884 source operand later. */
2885 before_strlen = get_last_insn ();
2887 create_output_operand (&ops[0], target, insn_mode);
2888 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2889 create_integer_operand (&ops[2], 0);
2890 create_integer_operand (&ops[3], align);
2891 if (!maybe_expand_insn (icode, 4, ops))
2892 return NULL_RTX;
2894 /* Check to see if the argument was declared attribute nonstring
2895 and if so, issue a warning since at this point it's not known
2896 to be nul-terminated. */
2897 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
2899 /* Now that we are assured of success, expand the source. */
2900 start_sequence ();
2901 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2902 if (pat != src_reg)
2904 #ifdef POINTERS_EXTEND_UNSIGNED
2905 if (GET_MODE (pat) != Pmode)
2906 pat = convert_to_mode (Pmode, pat,
2907 POINTERS_EXTEND_UNSIGNED);
2908 #endif
2909 emit_move_insn (src_reg, pat);
2911 pat = get_insns ();
2912 end_sequence ();
2914 if (before_strlen)
2915 emit_insn_after (pat, before_strlen);
2916 else
2917 emit_insn_before (pat, get_insns ());
2919 /* Return the value in the proper mode for this function. */
2920 if (GET_MODE (ops[0].value) == target_mode)
2921 target = ops[0].value;
2922 else if (target != 0)
2923 convert_move (target, ops[0].value, 0);
2924 else
2925 target = convert_to_mode (target_mode, ops[0].value, 0);
2927 return target;
2930 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2931 bytes from constant string DATA + OFFSET and return it as target
2932 constant. */
2934 static rtx
2935 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2936 scalar_int_mode mode)
2938 const char *str = (const char *) data;
2940 gcc_assert (offset >= 0
2941 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2942 <= strlen (str) + 1));
2944 return c_readstr (str + offset, mode);
2947 /* LEN specify length of the block of memcpy/memset operation.
2948 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2949 In some cases we can make very likely guess on max size, then we
2950 set it into PROBABLE_MAX_SIZE. */
2952 static void
2953 determine_block_size (tree len, rtx len_rtx,
2954 unsigned HOST_WIDE_INT *min_size,
2955 unsigned HOST_WIDE_INT *max_size,
2956 unsigned HOST_WIDE_INT *probable_max_size)
2958 if (CONST_INT_P (len_rtx))
2960 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
2961 return;
2963 else
2965 wide_int min, max;
2966 enum value_range_type range_type = VR_UNDEFINED;
2968 /* Determine bounds from the type. */
2969 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2970 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2971 else
2972 *min_size = 0;
2973 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2974 *probable_max_size = *max_size
2975 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
2976 else
2977 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2979 if (TREE_CODE (len) == SSA_NAME)
2980 range_type = get_range_info (len, &min, &max);
2981 if (range_type == VR_RANGE)
2983 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
2984 *min_size = min.to_uhwi ();
2985 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
2986 *probable_max_size = *max_size = max.to_uhwi ();
2988 else if (range_type == VR_ANTI_RANGE)
2990 /* Anti range 0...N lets us to determine minimal size to N+1. */
2991 if (min == 0)
2993 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2994 *min_size = max.to_uhwi () + 1;
2996 /* Code like
2998 int n;
2999 if (n < 100)
3000 memcpy (a, b, n)
3002 Produce anti range allowing negative values of N. We still
3003 can use the information and make a guess that N is not negative.
3005 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3006 *probable_max_size = min.to_uhwi () - 1;
3009 gcc_checking_assert (*max_size <=
3010 (unsigned HOST_WIDE_INT)
3011 GET_MODE_MASK (GET_MODE (len_rtx)));
3014 /* Try to verify that the sizes and lengths of the arguments to a string
3015 manipulation function given by EXP are within valid bounds and that
3016 the operation does not lead to buffer overflow or read past the end.
3017 Arguments other than EXP may be null. When non-null, the arguments
3018 have the following meaning:
3019 DST is the destination of a copy call or NULL otherwise.
3020 SRC is the source of a copy call or NULL otherwise.
3021 DSTWRITE is the number of bytes written into the destination obtained
3022 from the user-supplied size argument to the function (such as in
3023 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3024 MAXREAD is the user-supplied bound on the length of the source sequence
3025 (such as in strncat(d, s, N). It specifies the upper limit on the number
3026 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3027 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3028 expression EXP is a string function call (as opposed to a memory call
3029 like memcpy). As an exception, SRCSTR can also be an integer denoting
3030 the precomputed size of the source string or object (for functions like
3031 memcpy).
3032 DSTSIZE is the size of the destination object specified by the last
3033 argument to the _chk builtins, typically resulting from the expansion
3034 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3035 DSTSIZE).
3037 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3038 SIZE_MAX.
3040 If the call is successfully verified as safe return true, otherwise
3041 return false. */
3043 static bool
3044 check_access (tree exp, tree, tree, tree dstwrite,
3045 tree maxread, tree srcstr, tree dstsize)
3047 int opt = OPT_Wstringop_overflow_;
3049 /* The size of the largest object is half the address space, or
3050 PTRDIFF_MAX. (This is way too permissive.) */
3051 tree maxobjsize = max_object_size ();
3053 /* Either the length of the source string for string functions or
3054 the size of the source object for raw memory functions. */
3055 tree slen = NULL_TREE;
3057 tree range[2] = { NULL_TREE, NULL_TREE };
3059 /* Set to true when the exact number of bytes written by a string
3060 function like strcpy is not known and the only thing that is
3061 known is that it must be at least one (for the terminating nul). */
3062 bool at_least_one = false;
3063 if (srcstr)
3065 /* SRCSTR is normally a pointer to string but as a special case
3066 it can be an integer denoting the length of a string. */
3067 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
3069 /* Try to determine the range of lengths the source string
3070 refers to. If it can be determined and is less than
3071 the upper bound given by MAXREAD add one to it for
3072 the terminating nul. Otherwise, set it to one for
3073 the same reason, or to MAXREAD as appropriate. */
3074 get_range_strlen (srcstr, range);
3075 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
3077 if (maxread && tree_int_cst_le (maxread, range[0]))
3078 range[0] = range[1] = maxread;
3079 else
3080 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3081 range[0], size_one_node);
3083 if (maxread && tree_int_cst_le (maxread, range[1]))
3084 range[1] = maxread;
3085 else if (!integer_all_onesp (range[1]))
3086 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3087 range[1], size_one_node);
3089 slen = range[0];
3091 else
3093 at_least_one = true;
3094 slen = size_one_node;
3097 else
3098 slen = srcstr;
3101 if (!dstwrite && !maxread)
3103 /* When the only available piece of data is the object size
3104 there is nothing to do. */
3105 if (!slen)
3106 return true;
3108 /* Otherwise, when the length of the source sequence is known
3109 (as with strlen), set DSTWRITE to it. */
3110 if (!range[0])
3111 dstwrite = slen;
3114 if (!dstsize)
3115 dstsize = maxobjsize;
3117 if (dstwrite)
3118 get_size_range (dstwrite, range);
3120 tree func = get_callee_fndecl (exp);
3122 /* First check the number of bytes to be written against the maximum
3123 object size. */
3124 if (range[0] && tree_int_cst_lt (maxobjsize, range[0]))
3126 location_t loc = tree_nonartificial_location (exp);
3127 loc = expansion_point_location_if_in_system_header (loc);
3129 if (range[0] == range[1])
3130 warning_at (loc, opt,
3131 "%K%qD specified size %E "
3132 "exceeds maximum object size %E",
3133 exp, func, range[0], maxobjsize);
3134 else
3135 warning_at (loc, opt,
3136 "%K%qD specified size between %E and %E "
3137 "exceeds maximum object size %E",
3138 exp, func,
3139 range[0], range[1], maxobjsize);
3140 return false;
3143 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3144 constant, and in range of unsigned HOST_WIDE_INT. */
3145 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3147 /* Next check the number of bytes to be written against the destination
3148 object size. */
3149 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
3151 if (range[0]
3152 && ((tree_fits_uhwi_p (dstsize)
3153 && tree_int_cst_lt (dstsize, range[0]))
3154 || (tree_fits_uhwi_p (dstwrite)
3155 && tree_int_cst_lt (dstwrite, range[0]))))
3157 location_t loc = tree_nonartificial_location (exp);
3158 loc = expansion_point_location_if_in_system_header (loc);
3160 if (dstwrite == slen && at_least_one)
3162 /* This is a call to strcpy with a destination of 0 size
3163 and a source of unknown length. The call will write
3164 at least one byte past the end of the destination. */
3165 warning_at (loc, opt,
3166 "%K%qD writing %E or more bytes into a region "
3167 "of size %E overflows the destination",
3168 exp, func, range[0], dstsize);
3170 else if (tree_int_cst_equal (range[0], range[1]))
3171 warning_at (loc, opt,
3172 (integer_onep (range[0])
3173 ? G_("%K%qD writing %E byte into a region "
3174 "of size %E overflows the destination")
3175 : G_("%K%qD writing %E bytes into a region "
3176 "of size %E overflows the destination")),
3177 exp, func, range[0], dstsize);
3178 else if (tree_int_cst_sign_bit (range[1]))
3180 /* Avoid printing the upper bound if it's invalid. */
3181 warning_at (loc, opt,
3182 "%K%qD writing %E or more bytes into a region "
3183 "of size %E overflows the destination",
3184 exp, func, range[0], dstsize);
3186 else
3187 warning_at (loc, opt,
3188 "%K%qD writing between %E and %E bytes into "
3189 "a region of size %E overflows the destination",
3190 exp, func, range[0], range[1],
3191 dstsize);
3193 /* Return error when an overflow has been detected. */
3194 return false;
3198 /* Check the maximum length of the source sequence against the size
3199 of the destination object if known, or against the maximum size
3200 of an object. */
3201 if (maxread)
3203 get_size_range (maxread, range);
3205 /* Use the lower end for MAXREAD from now on. */
3206 if (range[0])
3207 maxread = range[0];
3209 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
3211 location_t loc = tree_nonartificial_location (exp);
3212 loc = expansion_point_location_if_in_system_header (loc);
3214 if (tree_int_cst_lt (maxobjsize, range[0]))
3216 /* Warn about crazy big sizes first since that's more
3217 likely to be meaningful than saying that the bound
3218 is greater than the object size if both are big. */
3219 if (range[0] == range[1])
3220 warning_at (loc, opt,
3221 "%K%qD specified bound %E "
3222 "exceeds maximum object size %E",
3223 exp, func,
3224 range[0], maxobjsize);
3225 else
3226 warning_at (loc, opt,
3227 "%K%qD specified bound between %E and %E "
3228 "exceeds maximum object size %E",
3229 exp, func,
3230 range[0], range[1], maxobjsize);
3232 return false;
3235 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
3237 if (tree_int_cst_equal (range[0], range[1]))
3238 warning_at (loc, opt,
3239 "%K%qD specified bound %E "
3240 "exceeds destination size %E",
3241 exp, func,
3242 range[0], dstsize);
3243 else
3244 warning_at (loc, opt,
3245 "%K%qD specified bound between %E and %E "
3246 "exceeds destination size %E",
3247 exp, func,
3248 range[0], range[1], dstsize);
3249 return false;
3254 /* Check for reading past the end of SRC. */
3255 if (slen
3256 && slen == srcstr
3257 && dstwrite && range[0]
3258 && tree_int_cst_lt (slen, range[0]))
3260 location_t loc = tree_nonartificial_location (exp);
3262 if (tree_int_cst_equal (range[0], range[1]))
3263 warning_at (loc, opt,
3264 (tree_int_cst_equal (range[0], integer_one_node)
3265 ? G_("%K%qD reading %E byte from a region of size %E")
3266 : G_("%K%qD reading %E bytes from a region of size %E")),
3267 exp, func, range[0], slen);
3268 else if (tree_int_cst_sign_bit (range[1]))
3270 /* Avoid printing the upper bound if it's invalid. */
3271 warning_at (loc, opt,
3272 "%K%qD reading %E or more bytes from a region "
3273 "of size %E",
3274 exp, func, range[0], slen);
3276 else
3277 warning_at (loc, opt,
3278 "%K%qD reading between %E and %E bytes from a region "
3279 "of size %E",
3280 exp, func, range[0], range[1], slen);
3281 return false;
3284 return true;
3287 /* Helper to compute the size of the object referenced by the DEST
3288 expression which must have pointer type, using Object Size type
3289 OSTYPE (only the least significant 2 bits are used). Return
3290 an estimate of the size of the object if successful or NULL when
3291 the size cannot be determined. When the referenced object involves
3292 a non-constant offset in some range the returned value represents
3293 the largest size given the smallest non-negative offset in the
3294 range. The function is intended for diagnostics and should not
3295 be used to influence code generation or optimization. */
3297 tree
3298 compute_objsize (tree dest, int ostype)
3300 unsigned HOST_WIDE_INT size;
3302 /* Only the two least significant bits are meaningful. */
3303 ostype &= 3;
3305 if (compute_builtin_object_size (dest, ostype, &size))
3306 return build_int_cst (sizetype, size);
3308 if (TREE_CODE (dest) == SSA_NAME)
3310 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3311 if (!is_gimple_assign (stmt))
3312 return NULL_TREE;
3314 dest = gimple_assign_rhs1 (stmt);
3316 tree_code code = gimple_assign_rhs_code (stmt);
3317 if (code == POINTER_PLUS_EXPR)
3319 /* compute_builtin_object_size fails for addresses with
3320 non-constant offsets. Try to determine the range of
3321 such an offset here and use it to adjus the constant
3322 size. */
3323 tree off = gimple_assign_rhs2 (stmt);
3324 if (TREE_CODE (off) == SSA_NAME
3325 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3327 wide_int min, max;
3328 enum value_range_type rng = get_range_info (off, &min, &max);
3330 if (rng == VR_RANGE)
3332 if (tree size = compute_objsize (dest, ostype))
3334 wide_int wisiz = wi::to_wide (size);
3336 /* Ignore negative offsets for now. For others,
3337 use the lower bound as the most optimistic
3338 estimate of the (remaining)size. */
3339 if (wi::sign_mask (min))
3341 else if (wi::ltu_p (min, wisiz))
3342 return wide_int_to_tree (TREE_TYPE (size),
3343 wi::sub (wisiz, min));
3344 else
3345 return size_zero_node;
3350 else if (code != ADDR_EXPR)
3351 return NULL_TREE;
3354 /* Unless computing the largest size (for memcpy and other raw memory
3355 functions), try to determine the size of the object from its type. */
3356 if (!ostype)
3357 return NULL_TREE;
3359 if (TREE_CODE (dest) != ADDR_EXPR)
3360 return NULL_TREE;
3362 tree type = TREE_TYPE (dest);
3363 if (TREE_CODE (type) == POINTER_TYPE)
3364 type = TREE_TYPE (type);
3366 type = TYPE_MAIN_VARIANT (type);
3368 if (TREE_CODE (type) == ARRAY_TYPE
3369 && !array_at_struct_end_p (dest))
3371 /* Return the constant size unless it's zero (that's a zero-length
3372 array likely at the end of a struct). */
3373 tree size = TYPE_SIZE_UNIT (type);
3374 if (size && TREE_CODE (size) == INTEGER_CST
3375 && !integer_zerop (size))
3376 return size;
3379 return NULL_TREE;
3382 /* Helper to determine and check the sizes of the source and the destination
3383 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3384 call expression, DEST is the destination argument, SRC is the source
3385 argument or null, and LEN is the number of bytes. Use Object Size type-0
3386 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3387 (no overflow or invalid sizes), false otherwise. */
3389 static bool
3390 check_memop_access (tree exp, tree dest, tree src, tree size)
3392 /* For functions like memset and memcpy that operate on raw memory
3393 try to determine the size of the largest source and destination
3394 object using type-0 Object Size regardless of the object size
3395 type specified by the option. */
3396 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3397 tree dstsize = compute_objsize (dest, 0);
3399 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
3400 srcsize, dstsize);
3403 /* Validate memchr arguments without performing any expansion.
3404 Return NULL_RTX. */
3406 static rtx
3407 expand_builtin_memchr (tree exp, rtx)
3409 if (!validate_arglist (exp,
3410 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3411 return NULL_RTX;
3413 tree arg1 = CALL_EXPR_ARG (exp, 0);
3414 tree len = CALL_EXPR_ARG (exp, 2);
3416 /* Diagnose calls where the specified length exceeds the size
3417 of the object. */
3418 if (warn_stringop_overflow)
3420 tree size = compute_objsize (arg1, 0);
3421 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
3422 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
3425 return NULL_RTX;
3428 /* Expand a call EXP to the memcpy builtin.
3429 Return NULL_RTX if we failed, the caller should emit a normal call,
3430 otherwise try to get the result in TARGET, if convenient (and in
3431 mode MODE if that's convenient). */
3433 static rtx
3434 expand_builtin_memcpy (tree exp, rtx target)
3436 if (!validate_arglist (exp,
3437 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3438 return NULL_RTX;
3440 tree dest = CALL_EXPR_ARG (exp, 0);
3441 tree src = CALL_EXPR_ARG (exp, 1);
3442 tree len = CALL_EXPR_ARG (exp, 2);
3444 check_memop_access (exp, dest, src, len);
3446 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3447 /*endp=*/ 0);
3450 /* Check a call EXP to the memmove built-in for validity.
3451 Return NULL_RTX on both success and failure. */
3453 static rtx
3454 expand_builtin_memmove (tree exp, rtx)
3456 if (!validate_arglist (exp,
3457 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3458 return NULL_RTX;
3460 tree dest = CALL_EXPR_ARG (exp, 0);
3461 tree src = CALL_EXPR_ARG (exp, 1);
3462 tree len = CALL_EXPR_ARG (exp, 2);
3464 check_memop_access (exp, dest, src, len);
3466 return NULL_RTX;
3469 /* Expand an instrumented call EXP to the memcpy builtin.
3470 Return NULL_RTX if we failed, the caller should emit a normal call,
3471 otherwise try to get the result in TARGET, if convenient (and in
3472 mode MODE if that's convenient). */
3474 static rtx
3475 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3477 if (!validate_arglist (exp,
3478 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3479 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3480 INTEGER_TYPE, VOID_TYPE))
3481 return NULL_RTX;
3482 else
3484 tree dest = CALL_EXPR_ARG (exp, 0);
3485 tree src = CALL_EXPR_ARG (exp, 2);
3486 tree len = CALL_EXPR_ARG (exp, 4);
3487 rtx res = expand_builtin_memory_copy_args (dest, src, len, target, exp,
3488 /*end_p=*/ 0);
3490 /* Return src bounds with the result. */
3491 if (res)
3493 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3494 expand_normal (CALL_EXPR_ARG (exp, 1)));
3495 res = chkp_join_splitted_slot (res, bnd);
3497 return res;
3501 /* Expand a call EXP to the mempcpy builtin.
3502 Return NULL_RTX if we failed; the caller should emit a normal call,
3503 otherwise try to get the result in TARGET, if convenient (and in
3504 mode MODE if that's convenient). If ENDP is 0 return the
3505 destination pointer, if ENDP is 1 return the end pointer ala
3506 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3507 stpcpy. */
3509 static rtx
3510 expand_builtin_mempcpy (tree exp, rtx target)
3512 if (!validate_arglist (exp,
3513 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3514 return NULL_RTX;
3516 tree dest = CALL_EXPR_ARG (exp, 0);
3517 tree src = CALL_EXPR_ARG (exp, 1);
3518 tree len = CALL_EXPR_ARG (exp, 2);
3520 /* Policy does not generally allow using compute_objsize (which
3521 is used internally by check_memop_size) to change code generation
3522 or drive optimization decisions.
3524 In this instance it is safe because the code we generate has
3525 the same semantics regardless of the return value of
3526 check_memop_sizes. Exactly the same amount of data is copied
3527 and the return value is exactly the same in both cases.
3529 Furthermore, check_memop_size always uses mode 0 for the call to
3530 compute_objsize, so the imprecise nature of compute_objsize is
3531 avoided. */
3533 /* Avoid expanding mempcpy into memcpy when the call is determined
3534 to overflow the buffer. This also prevents the same overflow
3535 from being diagnosed again when expanding memcpy. */
3536 if (!check_memop_access (exp, dest, src, len))
3537 return NULL_RTX;
3539 return expand_builtin_mempcpy_args (dest, src, len,
3540 target, exp, /*endp=*/ 1);
3543 /* Expand an instrumented call EXP to the mempcpy builtin.
3544 Return NULL_RTX if we failed, the caller should emit a normal call,
3545 otherwise try to get the result in TARGET, if convenient (and in
3546 mode MODE if that's convenient). */
3548 static rtx
3549 expand_builtin_mempcpy_with_bounds (tree exp, rtx target)
3551 if (!validate_arglist (exp,
3552 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3553 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3554 INTEGER_TYPE, VOID_TYPE))
3555 return NULL_RTX;
3556 else
3558 tree dest = CALL_EXPR_ARG (exp, 0);
3559 tree src = CALL_EXPR_ARG (exp, 2);
3560 tree len = CALL_EXPR_ARG (exp, 4);
3561 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3562 exp, 1);
3564 /* Return src bounds with the result. */
3565 if (res)
3567 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3568 expand_normal (CALL_EXPR_ARG (exp, 1)));
3569 res = chkp_join_splitted_slot (res, bnd);
3571 return res;
3575 /* Helper function to do the actual work for expand of memory copy family
3576 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3577 of memory from SRC to DEST and assign to TARGET if convenient.
3578 If ENDP is 0 return the
3579 destination pointer, if ENDP is 1 return the end pointer ala
3580 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3581 stpcpy. */
3583 static rtx
3584 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3585 rtx target, tree exp, int endp)
3587 const char *src_str;
3588 unsigned int src_align = get_pointer_alignment (src);
3589 unsigned int dest_align = get_pointer_alignment (dest);
3590 rtx dest_mem, src_mem, dest_addr, len_rtx;
3591 HOST_WIDE_INT expected_size = -1;
3592 unsigned int expected_align = 0;
3593 unsigned HOST_WIDE_INT min_size;
3594 unsigned HOST_WIDE_INT max_size;
3595 unsigned HOST_WIDE_INT probable_max_size;
3597 /* If DEST is not a pointer type, call the normal function. */
3598 if (dest_align == 0)
3599 return NULL_RTX;
3601 /* If either SRC is not a pointer type, don't do this
3602 operation in-line. */
3603 if (src_align == 0)
3604 return NULL_RTX;
3606 if (currently_expanding_gimple_stmt)
3607 stringop_block_profile (currently_expanding_gimple_stmt,
3608 &expected_align, &expected_size);
3610 if (expected_align < dest_align)
3611 expected_align = dest_align;
3612 dest_mem = get_memory_rtx (dest, len);
3613 set_mem_align (dest_mem, dest_align);
3614 len_rtx = expand_normal (len);
3615 determine_block_size (len, len_rtx, &min_size, &max_size,
3616 &probable_max_size);
3617 src_str = c_getstr (src);
3619 /* If SRC is a string constant and block move would be done
3620 by pieces, we can avoid loading the string from memory
3621 and only stored the computed constants. */
3622 if (src_str
3623 && CONST_INT_P (len_rtx)
3624 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3625 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3626 CONST_CAST (char *, src_str),
3627 dest_align, false))
3629 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3630 builtin_memcpy_read_str,
3631 CONST_CAST (char *, src_str),
3632 dest_align, false, endp);
3633 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3634 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3635 return dest_mem;
3638 src_mem = get_memory_rtx (src, len);
3639 set_mem_align (src_mem, src_align);
3641 /* Copy word part most expediently. */
3642 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3643 CALL_EXPR_TAILCALL (exp)
3644 && (endp == 0 || target == const0_rtx)
3645 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3646 expected_align, expected_size,
3647 min_size, max_size, probable_max_size);
3649 if (dest_addr == 0)
3651 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3652 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3655 if (endp && target != const0_rtx)
3657 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3658 /* stpcpy pointer to last byte. */
3659 if (endp == 2)
3660 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3663 return dest_addr;
3666 static rtx
3667 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3668 rtx target, tree orig_exp, int endp)
3670 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3671 endp);
3674 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3675 we failed, the caller should emit a normal call, otherwise try to
3676 get the result in TARGET, if convenient. If ENDP is 0 return the
3677 destination pointer, if ENDP is 1 return the end pointer ala
3678 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3679 stpcpy. */
3681 static rtx
3682 expand_movstr (tree dest, tree src, rtx target, int endp)
3684 struct expand_operand ops[3];
3685 rtx dest_mem;
3686 rtx src_mem;
3688 if (!targetm.have_movstr ())
3689 return NULL_RTX;
3691 dest_mem = get_memory_rtx (dest, NULL);
3692 src_mem = get_memory_rtx (src, NULL);
3693 if (!endp)
3695 target = force_reg (Pmode, XEXP (dest_mem, 0));
3696 dest_mem = replace_equiv_address (dest_mem, target);
3699 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3700 create_fixed_operand (&ops[1], dest_mem);
3701 create_fixed_operand (&ops[2], src_mem);
3702 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3703 return NULL_RTX;
3705 if (endp && target != const0_rtx)
3707 target = ops[0].value;
3708 /* movstr is supposed to set end to the address of the NUL
3709 terminator. If the caller requested a mempcpy-like return value,
3710 adjust it. */
3711 if (endp == 1)
3713 rtx tem = plus_constant (GET_MODE (target),
3714 gen_lowpart (GET_MODE (target), target), 1);
3715 emit_move_insn (target, force_operand (tem, NULL_RTX));
3718 return target;
3721 /* Do some very basic size validation of a call to the strcpy builtin
3722 given by EXP. Return NULL_RTX to have the built-in expand to a call
3723 to the library function. */
3725 static rtx
3726 expand_builtin_strcat (tree exp, rtx)
3728 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3729 || !warn_stringop_overflow)
3730 return NULL_RTX;
3732 tree dest = CALL_EXPR_ARG (exp, 0);
3733 tree src = CALL_EXPR_ARG (exp, 1);
3735 /* There is no way here to determine the length of the string in
3736 the destination to which the SRC string is being appended so
3737 just diagnose cases when the souce string is longer than
3738 the destination object. */
3740 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3742 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
3743 destsize);
3745 return NULL_RTX;
3748 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3749 NULL_RTX if we failed the caller should emit a normal call, otherwise
3750 try to get the result in TARGET, if convenient (and in mode MODE if that's
3751 convenient). */
3753 static rtx
3754 expand_builtin_strcpy (tree exp, rtx target)
3756 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3757 return NULL_RTX;
3759 tree dest = CALL_EXPR_ARG (exp, 0);
3760 tree src = CALL_EXPR_ARG (exp, 1);
3762 if (warn_stringop_overflow)
3764 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3765 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3766 src, destsize);
3769 return expand_builtin_strcpy_args (dest, src, target);
3772 /* Helper function to do the actual work for expand_builtin_strcpy. The
3773 arguments to the builtin_strcpy call DEST and SRC are broken out
3774 so that this can also be called without constructing an actual CALL_EXPR.
3775 The other arguments and return value are the same as for
3776 expand_builtin_strcpy. */
3778 static rtx
3779 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3781 return expand_movstr (dest, src, target, /*endp=*/0);
3784 /* Expand a call EXP to the stpcpy builtin.
3785 Return NULL_RTX if we failed the caller should emit a normal call,
3786 otherwise try to get the result in TARGET, if convenient (and in
3787 mode MODE if that's convenient). */
3789 static rtx
3790 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3792 tree dst, src;
3793 location_t loc = EXPR_LOCATION (exp);
3795 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3796 return NULL_RTX;
3798 dst = CALL_EXPR_ARG (exp, 0);
3799 src = CALL_EXPR_ARG (exp, 1);
3801 if (warn_stringop_overflow)
3803 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
3804 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3805 src, destsize);
3808 /* If return value is ignored, transform stpcpy into strcpy. */
3809 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3811 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3812 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3813 return expand_expr (result, target, mode, EXPAND_NORMAL);
3815 else
3817 tree len, lenp1;
3818 rtx ret;
3820 /* Ensure we get an actual string whose length can be evaluated at
3821 compile-time, not an expression containing a string. This is
3822 because the latter will potentially produce pessimized code
3823 when used to produce the return value. */
3824 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3825 return expand_movstr (dst, src, target, /*endp=*/2);
3827 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3828 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3829 target, exp, /*endp=*/2);
3831 if (ret)
3832 return ret;
3834 if (TREE_CODE (len) == INTEGER_CST)
3836 rtx len_rtx = expand_normal (len);
3838 if (CONST_INT_P (len_rtx))
3840 ret = expand_builtin_strcpy_args (dst, src, target);
3842 if (ret)
3844 if (! target)
3846 if (mode != VOIDmode)
3847 target = gen_reg_rtx (mode);
3848 else
3849 target = gen_reg_rtx (GET_MODE (ret));
3851 if (GET_MODE (target) != GET_MODE (ret))
3852 ret = gen_lowpart (GET_MODE (target), ret);
3854 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3855 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3856 gcc_assert (ret);
3858 return target;
3863 return expand_movstr (dst, src, target, /*endp=*/2);
3867 /* Check a call EXP to the stpncpy built-in for validity.
3868 Return NULL_RTX on both success and failure. */
3870 static rtx
3871 expand_builtin_stpncpy (tree exp, rtx)
3873 if (!validate_arglist (exp,
3874 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3875 || !warn_stringop_overflow)
3876 return NULL_RTX;
3878 /* The source and destination of the call. */
3879 tree dest = CALL_EXPR_ARG (exp, 0);
3880 tree src = CALL_EXPR_ARG (exp, 1);
3882 /* The exact number of bytes to write (not the maximum). */
3883 tree len = CALL_EXPR_ARG (exp, 2);
3885 /* The size of the destination object. */
3886 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3888 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
3890 return NULL_RTX;
3893 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3894 bytes from constant string DATA + OFFSET and return it as target
3895 constant. */
3898 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3899 scalar_int_mode mode)
3901 const char *str = (const char *) data;
3903 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3904 return const0_rtx;
3906 return c_readstr (str + offset, mode);
3909 /* Helper to check the sizes of sequences and the destination of calls
3910 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3911 success (no overflow or invalid sizes), false otherwise. */
3913 static bool
3914 check_strncat_sizes (tree exp, tree objsize)
3916 tree dest = CALL_EXPR_ARG (exp, 0);
3917 tree src = CALL_EXPR_ARG (exp, 1);
3918 tree maxread = CALL_EXPR_ARG (exp, 2);
3920 /* Try to determine the range of lengths that the source expression
3921 refers to. */
3922 tree lenrange[2];
3923 get_range_strlen (src, lenrange);
3925 /* Try to verify that the destination is big enough for the shortest
3926 string. */
3928 if (!objsize && warn_stringop_overflow)
3930 /* If it hasn't been provided by __strncat_chk, try to determine
3931 the size of the destination object into which the source is
3932 being copied. */
3933 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
3936 /* Add one for the terminating nul. */
3937 tree srclen = (lenrange[0]
3938 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3939 size_one_node)
3940 : NULL_TREE);
3942 /* The strncat function copies at most MAXREAD bytes and always appends
3943 the terminating nul so the specified upper bound should never be equal
3944 to (or greater than) the size of the destination. */
3945 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
3946 && tree_int_cst_equal (objsize, maxread))
3948 location_t loc = tree_nonartificial_location (exp);
3949 loc = expansion_point_location_if_in_system_header (loc);
3951 warning_at (loc, OPT_Wstringop_overflow_,
3952 "%K%qD specified bound %E equals destination size",
3953 exp, get_callee_fndecl (exp), maxread);
3955 return false;
3958 if (!srclen
3959 || (maxread && tree_fits_uhwi_p (maxread)
3960 && tree_fits_uhwi_p (srclen)
3961 && tree_int_cst_lt (maxread, srclen)))
3962 srclen = maxread;
3964 /* The number of bytes to write is LEN but check_access will also
3965 check SRCLEN if LEN's value isn't known. */
3966 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
3967 objsize);
3970 /* Similar to expand_builtin_strcat, do some very basic size validation
3971 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
3972 the built-in expand to a call to the library function. */
3974 static rtx
3975 expand_builtin_strncat (tree exp, rtx)
3977 if (!validate_arglist (exp,
3978 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3979 || !warn_stringop_overflow)
3980 return NULL_RTX;
3982 tree dest = CALL_EXPR_ARG (exp, 0);
3983 tree src = CALL_EXPR_ARG (exp, 1);
3984 /* The upper bound on the number of bytes to write. */
3985 tree maxread = CALL_EXPR_ARG (exp, 2);
3986 /* The length of the source sequence. */
3987 tree slen = c_strlen (src, 1);
3989 /* Try to determine the range of lengths that the source expression
3990 refers to. */
3991 tree lenrange[2];
3992 if (slen)
3993 lenrange[0] = lenrange[1] = slen;
3994 else
3995 get_range_strlen (src, lenrange);
3997 /* Try to verify that the destination is big enough for the shortest
3998 string. First try to determine the size of the destination object
3999 into which the source is being copied. */
4000 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4002 /* Add one for the terminating nul. */
4003 tree srclen = (lenrange[0]
4004 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4005 size_one_node)
4006 : NULL_TREE);
4008 /* The strncat function copies at most MAXREAD bytes and always appends
4009 the terminating nul so the specified upper bound should never be equal
4010 to (or greater than) the size of the destination. */
4011 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4012 && tree_int_cst_equal (destsize, maxread))
4014 location_t loc = tree_nonartificial_location (exp);
4015 loc = expansion_point_location_if_in_system_header (loc);
4017 warning_at (loc, OPT_Wstringop_overflow_,
4018 "%K%qD specified bound %E equals destination size",
4019 exp, get_callee_fndecl (exp), maxread);
4021 return NULL_RTX;
4024 if (!srclen
4025 || (maxread && tree_fits_uhwi_p (maxread)
4026 && tree_fits_uhwi_p (srclen)
4027 && tree_int_cst_lt (maxread, srclen)))
4028 srclen = maxread;
4030 /* The number of bytes to write is SRCLEN. */
4031 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
4033 return NULL_RTX;
4036 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4037 NULL_RTX if we failed the caller should emit a normal call. */
4039 static rtx
4040 expand_builtin_strncpy (tree exp, rtx target)
4042 location_t loc = EXPR_LOCATION (exp);
4044 if (validate_arglist (exp,
4045 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4047 tree dest = CALL_EXPR_ARG (exp, 0);
4048 tree src = CALL_EXPR_ARG (exp, 1);
4049 /* The number of bytes to write (not the maximum). */
4050 tree len = CALL_EXPR_ARG (exp, 2);
4051 /* The length of the source sequence. */
4052 tree slen = c_strlen (src, 1);
4054 if (warn_stringop_overflow)
4056 tree destsize = compute_objsize (dest,
4057 warn_stringop_overflow - 1);
4059 /* The number of bytes to write is LEN but check_access will also
4060 check SLEN if LEN's value isn't known. */
4061 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4062 destsize);
4065 /* We must be passed a constant len and src parameter. */
4066 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4067 return NULL_RTX;
4069 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4071 /* We're required to pad with trailing zeros if the requested
4072 len is greater than strlen(s2)+1. In that case try to
4073 use store_by_pieces, if it fails, punt. */
4074 if (tree_int_cst_lt (slen, len))
4076 unsigned int dest_align = get_pointer_alignment (dest);
4077 const char *p = c_getstr (src);
4078 rtx dest_mem;
4080 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4081 || !can_store_by_pieces (tree_to_uhwi (len),
4082 builtin_strncpy_read_str,
4083 CONST_CAST (char *, p),
4084 dest_align, false))
4085 return NULL_RTX;
4087 dest_mem = get_memory_rtx (dest, len);
4088 store_by_pieces (dest_mem, tree_to_uhwi (len),
4089 builtin_strncpy_read_str,
4090 CONST_CAST (char *, p), dest_align, false, 0);
4091 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4092 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4093 return dest_mem;
4096 return NULL_RTX;
4099 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4100 bytes from constant string DATA + OFFSET and return it as target
4101 constant. */
4104 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4105 scalar_int_mode mode)
4107 const char *c = (const char *) data;
4108 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4110 memset (p, *c, GET_MODE_SIZE (mode));
4112 return c_readstr (p, mode);
4115 /* Callback routine for store_by_pieces. Return the RTL of a register
4116 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4117 char value given in the RTL register data. For example, if mode is
4118 4 bytes wide, return the RTL for 0x01010101*data. */
4120 static rtx
4121 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4122 scalar_int_mode mode)
4124 rtx target, coeff;
4125 size_t size;
4126 char *p;
4128 size = GET_MODE_SIZE (mode);
4129 if (size == 1)
4130 return (rtx) data;
4132 p = XALLOCAVEC (char, size);
4133 memset (p, 1, size);
4134 coeff = c_readstr (p, mode);
4136 target = convert_to_mode (mode, (rtx) data, 1);
4137 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4138 return force_reg (mode, target);
4141 /* Expand expression EXP, which is a call to the memset builtin. Return
4142 NULL_RTX if we failed the caller should emit a normal call, otherwise
4143 try to get the result in TARGET, if convenient (and in mode MODE if that's
4144 convenient). */
4146 static rtx
4147 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4149 if (!validate_arglist (exp,
4150 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4151 return NULL_RTX;
4153 tree dest = CALL_EXPR_ARG (exp, 0);
4154 tree val = CALL_EXPR_ARG (exp, 1);
4155 tree len = CALL_EXPR_ARG (exp, 2);
4157 check_memop_access (exp, dest, NULL_TREE, len);
4159 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4162 /* Expand expression EXP, which is an instrumented call to the memset builtin.
4163 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
4164 try to get the result in TARGET, if convenient (and in mode MODE if that's
4165 convenient). */
4167 static rtx
4168 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
4170 if (!validate_arglist (exp,
4171 POINTER_TYPE, POINTER_BOUNDS_TYPE,
4172 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4173 return NULL_RTX;
4174 else
4176 tree dest = CALL_EXPR_ARG (exp, 0);
4177 tree val = CALL_EXPR_ARG (exp, 2);
4178 tree len = CALL_EXPR_ARG (exp, 3);
4179 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
4181 /* Return src bounds with the result. */
4182 if (res)
4184 rtx bnd = force_reg (targetm.chkp_bound_mode (),
4185 expand_normal (CALL_EXPR_ARG (exp, 1)));
4186 res = chkp_join_splitted_slot (res, bnd);
4188 return res;
4192 /* Helper function to do the actual work for expand_builtin_memset. The
4193 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4194 so that this can also be called without constructing an actual CALL_EXPR.
4195 The other arguments and return value are the same as for
4196 expand_builtin_memset. */
4198 static rtx
4199 expand_builtin_memset_args (tree dest, tree val, tree len,
4200 rtx target, machine_mode mode, tree orig_exp)
4202 tree fndecl, fn;
4203 enum built_in_function fcode;
4204 machine_mode val_mode;
4205 char c;
4206 unsigned int dest_align;
4207 rtx dest_mem, dest_addr, len_rtx;
4208 HOST_WIDE_INT expected_size = -1;
4209 unsigned int expected_align = 0;
4210 unsigned HOST_WIDE_INT min_size;
4211 unsigned HOST_WIDE_INT max_size;
4212 unsigned HOST_WIDE_INT probable_max_size;
4214 dest_align = get_pointer_alignment (dest);
4216 /* If DEST is not a pointer type, don't do this operation in-line. */
4217 if (dest_align == 0)
4218 return NULL_RTX;
4220 if (currently_expanding_gimple_stmt)
4221 stringop_block_profile (currently_expanding_gimple_stmt,
4222 &expected_align, &expected_size);
4224 if (expected_align < dest_align)
4225 expected_align = dest_align;
4227 /* If the LEN parameter is zero, return DEST. */
4228 if (integer_zerop (len))
4230 /* Evaluate and ignore VAL in case it has side-effects. */
4231 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4232 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4235 /* Stabilize the arguments in case we fail. */
4236 dest = builtin_save_expr (dest);
4237 val = builtin_save_expr (val);
4238 len = builtin_save_expr (len);
4240 len_rtx = expand_normal (len);
4241 determine_block_size (len, len_rtx, &min_size, &max_size,
4242 &probable_max_size);
4243 dest_mem = get_memory_rtx (dest, len);
4244 val_mode = TYPE_MODE (unsigned_char_type_node);
4246 if (TREE_CODE (val) != INTEGER_CST)
4248 rtx val_rtx;
4250 val_rtx = expand_normal (val);
4251 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4253 /* Assume that we can memset by pieces if we can store
4254 * the coefficients by pieces (in the required modes).
4255 * We can't pass builtin_memset_gen_str as that emits RTL. */
4256 c = 1;
4257 if (tree_fits_uhwi_p (len)
4258 && can_store_by_pieces (tree_to_uhwi (len),
4259 builtin_memset_read_str, &c, dest_align,
4260 true))
4262 val_rtx = force_reg (val_mode, val_rtx);
4263 store_by_pieces (dest_mem, tree_to_uhwi (len),
4264 builtin_memset_gen_str, val_rtx, dest_align,
4265 true, 0);
4267 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4268 dest_align, expected_align,
4269 expected_size, min_size, max_size,
4270 probable_max_size))
4271 goto do_libcall;
4273 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4274 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4275 return dest_mem;
4278 if (target_char_cast (val, &c))
4279 goto do_libcall;
4281 if (c)
4283 if (tree_fits_uhwi_p (len)
4284 && can_store_by_pieces (tree_to_uhwi (len),
4285 builtin_memset_read_str, &c, dest_align,
4286 true))
4287 store_by_pieces (dest_mem, tree_to_uhwi (len),
4288 builtin_memset_read_str, &c, dest_align, true, 0);
4289 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4290 gen_int_mode (c, val_mode),
4291 dest_align, expected_align,
4292 expected_size, min_size, max_size,
4293 probable_max_size))
4294 goto do_libcall;
4296 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4297 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4298 return dest_mem;
4301 set_mem_align (dest_mem, dest_align);
4302 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4303 CALL_EXPR_TAILCALL (orig_exp)
4304 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4305 expected_align, expected_size,
4306 min_size, max_size,
4307 probable_max_size);
4309 if (dest_addr == 0)
4311 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4312 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4315 return dest_addr;
4317 do_libcall:
4318 fndecl = get_callee_fndecl (orig_exp);
4319 fcode = DECL_FUNCTION_CODE (fndecl);
4320 if (fcode == BUILT_IN_MEMSET
4321 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
4322 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4323 dest, val, len);
4324 else if (fcode == BUILT_IN_BZERO)
4325 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4326 dest, len);
4327 else
4328 gcc_unreachable ();
4329 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4330 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4331 return expand_call (fn, target, target == const0_rtx);
4334 /* Expand expression EXP, which is a call to the bzero builtin. Return
4335 NULL_RTX if we failed the caller should emit a normal call. */
4337 static rtx
4338 expand_builtin_bzero (tree exp)
4340 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4341 return NULL_RTX;
4343 tree dest = CALL_EXPR_ARG (exp, 0);
4344 tree size = CALL_EXPR_ARG (exp, 1);
4346 check_memop_access (exp, dest, NULL_TREE, size);
4348 /* New argument list transforming bzero(ptr x, int y) to
4349 memset(ptr x, int 0, size_t y). This is done this way
4350 so that if it isn't expanded inline, we fallback to
4351 calling bzero instead of memset. */
4353 location_t loc = EXPR_LOCATION (exp);
4355 return expand_builtin_memset_args (dest, integer_zero_node,
4356 fold_convert_loc (loc,
4357 size_type_node, size),
4358 const0_rtx, VOIDmode, exp);
4361 /* Try to expand cmpstr operation ICODE with the given operands.
4362 Return the result rtx on success, otherwise return null. */
4364 static rtx
4365 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4366 HOST_WIDE_INT align)
4368 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4370 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4371 target = NULL_RTX;
4373 struct expand_operand ops[4];
4374 create_output_operand (&ops[0], target, insn_mode);
4375 create_fixed_operand (&ops[1], arg1_rtx);
4376 create_fixed_operand (&ops[2], arg2_rtx);
4377 create_integer_operand (&ops[3], align);
4378 if (maybe_expand_insn (icode, 4, ops))
4379 return ops[0].value;
4380 return NULL_RTX;
4383 /* Expand expression EXP, which is a call to the memcmp built-in function.
4384 Return NULL_RTX if we failed and the caller should emit a normal call,
4385 otherwise try to get the result in TARGET, if convenient.
4386 RESULT_EQ is true if we can relax the returned value to be either zero
4387 or nonzero, without caring about the sign. */
4389 static rtx
4390 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4392 if (!validate_arglist (exp,
4393 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4394 return NULL_RTX;
4396 tree arg1 = CALL_EXPR_ARG (exp, 0);
4397 tree arg2 = CALL_EXPR_ARG (exp, 1);
4398 tree len = CALL_EXPR_ARG (exp, 2);
4400 /* Diagnose calls where the specified length exceeds the size of either
4401 object. */
4402 if (warn_stringop_overflow)
4404 tree size = compute_objsize (arg1, 0);
4405 if (check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
4406 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE))
4408 size = compute_objsize (arg2, 0);
4409 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
4410 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
4414 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4415 location_t loc = EXPR_LOCATION (exp);
4417 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4418 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4420 /* If we don't have POINTER_TYPE, call the function. */
4421 if (arg1_align == 0 || arg2_align == 0)
4422 return NULL_RTX;
4424 rtx arg1_rtx = get_memory_rtx (arg1, len);
4425 rtx arg2_rtx = get_memory_rtx (arg2, len);
4426 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4428 /* Set MEM_SIZE as appropriate. */
4429 if (CONST_INT_P (len_rtx))
4431 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4432 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4435 by_pieces_constfn constfn = NULL;
4437 const char *src_str = c_getstr (arg2);
4438 if (result_eq && src_str == NULL)
4440 src_str = c_getstr (arg1);
4441 if (src_str != NULL)
4442 std::swap (arg1_rtx, arg2_rtx);
4445 /* If SRC is a string constant and block move would be done
4446 by pieces, we can avoid loading the string from memory
4447 and only stored the computed constants. */
4448 if (src_str
4449 && CONST_INT_P (len_rtx)
4450 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4451 constfn = builtin_memcpy_read_str;
4453 rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4454 TREE_TYPE (len), target,
4455 result_eq, constfn,
4456 CONST_CAST (char *, src_str));
4458 if (result)
4460 /* Return the value in the proper mode for this function. */
4461 if (GET_MODE (result) == mode)
4462 return result;
4464 if (target != 0)
4466 convert_move (target, result, 0);
4467 return target;
4470 return convert_to_mode (mode, result, 0);
4473 return NULL_RTX;
4476 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4477 if we failed the caller should emit a normal call, otherwise try to get
4478 the result in TARGET, if convenient. */
4480 static rtx
4481 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4483 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4484 return NULL_RTX;
4486 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4487 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4488 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4489 return NULL_RTX;
4491 tree arg1 = CALL_EXPR_ARG (exp, 0);
4492 tree arg2 = CALL_EXPR_ARG (exp, 1);
4494 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4495 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4497 /* If we don't have POINTER_TYPE, call the function. */
4498 if (arg1_align == 0 || arg2_align == 0)
4499 return NULL_RTX;
4501 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4502 arg1 = builtin_save_expr (arg1);
4503 arg2 = builtin_save_expr (arg2);
4505 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4506 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4508 rtx result = NULL_RTX;
4509 /* Try to call cmpstrsi. */
4510 if (cmpstr_icode != CODE_FOR_nothing)
4511 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4512 MIN (arg1_align, arg2_align));
4514 /* Try to determine at least one length and call cmpstrnsi. */
4515 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4517 tree len;
4518 rtx arg3_rtx;
4520 tree len1 = c_strlen (arg1, 1);
4521 tree len2 = c_strlen (arg2, 1);
4523 if (len1)
4524 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4525 if (len2)
4526 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4528 /* If we don't have a constant length for the first, use the length
4529 of the second, if we know it. We don't require a constant for
4530 this case; some cost analysis could be done if both are available
4531 but neither is constant. For now, assume they're equally cheap,
4532 unless one has side effects. If both strings have constant lengths,
4533 use the smaller. */
4535 if (!len1)
4536 len = len2;
4537 else if (!len2)
4538 len = len1;
4539 else if (TREE_SIDE_EFFECTS (len1))
4540 len = len2;
4541 else if (TREE_SIDE_EFFECTS (len2))
4542 len = len1;
4543 else if (TREE_CODE (len1) != INTEGER_CST)
4544 len = len2;
4545 else if (TREE_CODE (len2) != INTEGER_CST)
4546 len = len1;
4547 else if (tree_int_cst_lt (len1, len2))
4548 len = len1;
4549 else
4550 len = len2;
4552 /* If both arguments have side effects, we cannot optimize. */
4553 if (len && !TREE_SIDE_EFFECTS (len))
4555 arg3_rtx = expand_normal (len);
4556 result = expand_cmpstrn_or_cmpmem
4557 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4558 arg3_rtx, MIN (arg1_align, arg2_align));
4562 /* Check to see if the argument was declared attribute nonstring
4563 and if so, issue a warning since at this point it's not known
4564 to be nul-terminated. */
4565 tree fndecl = get_callee_fndecl (exp);
4566 maybe_warn_nonstring_arg (fndecl, exp);
4568 if (result)
4570 /* Return the value in the proper mode for this function. */
4571 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4572 if (GET_MODE (result) == mode)
4573 return result;
4574 if (target == 0)
4575 return convert_to_mode (mode, result, 0);
4576 convert_move (target, result, 0);
4577 return target;
4580 /* Expand the library call ourselves using a stabilized argument
4581 list to avoid re-evaluating the function's arguments twice. */
4582 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4583 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4584 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4585 return expand_call (fn, target, target == const0_rtx);
4588 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4589 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4590 the result in TARGET, if convenient. */
4592 static rtx
4593 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4594 ATTRIBUTE_UNUSED machine_mode mode)
4596 if (!validate_arglist (exp,
4597 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4598 return NULL_RTX;
4600 /* If c_strlen can determine an expression for one of the string
4601 lengths, and it doesn't have side effects, then emit cmpstrnsi
4602 using length MIN(strlen(string)+1, arg3). */
4603 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4604 if (cmpstrn_icode == CODE_FOR_nothing)
4605 return NULL_RTX;
4607 tree len;
4609 tree arg1 = CALL_EXPR_ARG (exp, 0);
4610 tree arg2 = CALL_EXPR_ARG (exp, 1);
4611 tree arg3 = CALL_EXPR_ARG (exp, 2);
4613 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4614 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4616 tree len1 = c_strlen (arg1, 1);
4617 tree len2 = c_strlen (arg2, 1);
4619 location_t loc = EXPR_LOCATION (exp);
4621 if (len1)
4622 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4623 if (len2)
4624 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4626 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4628 /* If we don't have a constant length for the first, use the length
4629 of the second, if we know it. If neither string is constant length,
4630 use the given length argument. We don't require a constant for
4631 this case; some cost analysis could be done if both are available
4632 but neither is constant. For now, assume they're equally cheap,
4633 unless one has side effects. If both strings have constant lengths,
4634 use the smaller. */
4636 if (!len1 && !len2)
4637 len = len3;
4638 else if (!len1)
4639 len = len2;
4640 else if (!len2)
4641 len = len1;
4642 else if (TREE_SIDE_EFFECTS (len1))
4643 len = len2;
4644 else if (TREE_SIDE_EFFECTS (len2))
4645 len = len1;
4646 else if (TREE_CODE (len1) != INTEGER_CST)
4647 len = len2;
4648 else if (TREE_CODE (len2) != INTEGER_CST)
4649 len = len1;
4650 else if (tree_int_cst_lt (len1, len2))
4651 len = len1;
4652 else
4653 len = len2;
4655 /* If we are not using the given length, we must incorporate it here.
4656 The actual new length parameter will be MIN(len,arg3) in this case. */
4657 if (len != len3)
4658 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4659 rtx arg1_rtx = get_memory_rtx (arg1, len);
4660 rtx arg2_rtx = get_memory_rtx (arg2, len);
4661 rtx arg3_rtx = expand_normal (len);
4662 rtx result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4663 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4664 MIN (arg1_align, arg2_align));
4666 /* Check to see if the argument was declared attribute nonstring
4667 and if so, issue a warning since at this point it's not known
4668 to be nul-terminated. */
4669 tree fndecl = get_callee_fndecl (exp);
4670 maybe_warn_nonstring_arg (fndecl, exp);
4672 if (result)
4674 /* Return the value in the proper mode for this function. */
4675 mode = TYPE_MODE (TREE_TYPE (exp));
4676 if (GET_MODE (result) == mode)
4677 return result;
4678 if (target == 0)
4679 return convert_to_mode (mode, result, 0);
4680 convert_move (target, result, 0);
4681 return target;
4684 /* Expand the library call ourselves using a stabilized argument
4685 list to avoid re-evaluating the function's arguments twice. */
4686 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4687 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4688 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4689 return expand_call (fn, target, target == const0_rtx);
4692 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4693 if that's convenient. */
4696 expand_builtin_saveregs (void)
4698 rtx val;
4699 rtx_insn *seq;
4701 /* Don't do __builtin_saveregs more than once in a function.
4702 Save the result of the first call and reuse it. */
4703 if (saveregs_value != 0)
4704 return saveregs_value;
4706 /* When this function is called, it means that registers must be
4707 saved on entry to this function. So we migrate the call to the
4708 first insn of this function. */
4710 start_sequence ();
4712 /* Do whatever the machine needs done in this case. */
4713 val = targetm.calls.expand_builtin_saveregs ();
4715 seq = get_insns ();
4716 end_sequence ();
4718 saveregs_value = val;
4720 /* Put the insns after the NOTE that starts the function. If this
4721 is inside a start_sequence, make the outer-level insn chain current, so
4722 the code is placed at the start of the function. */
4723 push_topmost_sequence ();
4724 emit_insn_after (seq, entry_of_function ());
4725 pop_topmost_sequence ();
4727 return val;
4730 /* Expand a call to __builtin_next_arg. */
4732 static rtx
4733 expand_builtin_next_arg (void)
4735 /* Checking arguments is already done in fold_builtin_next_arg
4736 that must be called before this function. */
4737 return expand_binop (ptr_mode, add_optab,
4738 crtl->args.internal_arg_pointer,
4739 crtl->args.arg_offset_rtx,
4740 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4743 /* Make it easier for the backends by protecting the valist argument
4744 from multiple evaluations. */
4746 static tree
4747 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4749 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4751 /* The current way of determining the type of valist is completely
4752 bogus. We should have the information on the va builtin instead. */
4753 if (!vatype)
4754 vatype = targetm.fn_abi_va_list (cfun->decl);
4756 if (TREE_CODE (vatype) == ARRAY_TYPE)
4758 if (TREE_SIDE_EFFECTS (valist))
4759 valist = save_expr (valist);
4761 /* For this case, the backends will be expecting a pointer to
4762 vatype, but it's possible we've actually been given an array
4763 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4764 So fix it. */
4765 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4767 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4768 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4771 else
4773 tree pt = build_pointer_type (vatype);
4775 if (! needs_lvalue)
4777 if (! TREE_SIDE_EFFECTS (valist))
4778 return valist;
4780 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4781 TREE_SIDE_EFFECTS (valist) = 1;
4784 if (TREE_SIDE_EFFECTS (valist))
4785 valist = save_expr (valist);
4786 valist = fold_build2_loc (loc, MEM_REF,
4787 vatype, valist, build_int_cst (pt, 0));
4790 return valist;
4793 /* The "standard" definition of va_list is void*. */
4795 tree
4796 std_build_builtin_va_list (void)
4798 return ptr_type_node;
4801 /* The "standard" abi va_list is va_list_type_node. */
4803 tree
4804 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4806 return va_list_type_node;
4809 /* The "standard" type of va_list is va_list_type_node. */
4811 tree
4812 std_canonical_va_list_type (tree type)
4814 tree wtype, htype;
4816 wtype = va_list_type_node;
4817 htype = type;
4819 if (TREE_CODE (wtype) == ARRAY_TYPE)
4821 /* If va_list is an array type, the argument may have decayed
4822 to a pointer type, e.g. by being passed to another function.
4823 In that case, unwrap both types so that we can compare the
4824 underlying records. */
4825 if (TREE_CODE (htype) == ARRAY_TYPE
4826 || POINTER_TYPE_P (htype))
4828 wtype = TREE_TYPE (wtype);
4829 htype = TREE_TYPE (htype);
4832 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4833 return va_list_type_node;
4835 return NULL_TREE;
4838 /* The "standard" implementation of va_start: just assign `nextarg' to
4839 the variable. */
4841 void
4842 std_expand_builtin_va_start (tree valist, rtx nextarg)
4844 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4845 convert_move (va_r, nextarg, 0);
4847 /* We do not have any valid bounds for the pointer, so
4848 just store zero bounds for it. */
4849 if (chkp_function_instrumented_p (current_function_decl))
4850 chkp_expand_bounds_reset_for_mem (valist,
4851 make_tree (TREE_TYPE (valist),
4852 nextarg));
4855 /* Expand EXP, a call to __builtin_va_start. */
4857 static rtx
4858 expand_builtin_va_start (tree exp)
4860 rtx nextarg;
4861 tree valist;
4862 location_t loc = EXPR_LOCATION (exp);
4864 if (call_expr_nargs (exp) < 2)
4866 error_at (loc, "too few arguments to function %<va_start%>");
4867 return const0_rtx;
4870 if (fold_builtin_next_arg (exp, true))
4871 return const0_rtx;
4873 nextarg = expand_builtin_next_arg ();
4874 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4876 if (targetm.expand_builtin_va_start)
4877 targetm.expand_builtin_va_start (valist, nextarg);
4878 else
4879 std_expand_builtin_va_start (valist, nextarg);
4881 return const0_rtx;
4884 /* Expand EXP, a call to __builtin_va_end. */
4886 static rtx
4887 expand_builtin_va_end (tree exp)
4889 tree valist = CALL_EXPR_ARG (exp, 0);
4891 /* Evaluate for side effects, if needed. I hate macros that don't
4892 do that. */
4893 if (TREE_SIDE_EFFECTS (valist))
4894 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4896 return const0_rtx;
4899 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4900 builtin rather than just as an assignment in stdarg.h because of the
4901 nastiness of array-type va_list types. */
4903 static rtx
4904 expand_builtin_va_copy (tree exp)
4906 tree dst, src, t;
4907 location_t loc = EXPR_LOCATION (exp);
4909 dst = CALL_EXPR_ARG (exp, 0);
4910 src = CALL_EXPR_ARG (exp, 1);
4912 dst = stabilize_va_list_loc (loc, dst, 1);
4913 src = stabilize_va_list_loc (loc, src, 0);
4915 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4917 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4919 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4920 TREE_SIDE_EFFECTS (t) = 1;
4921 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4923 else
4925 rtx dstb, srcb, size;
4927 /* Evaluate to pointers. */
4928 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4929 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4930 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4931 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4933 dstb = convert_memory_address (Pmode, dstb);
4934 srcb = convert_memory_address (Pmode, srcb);
4936 /* "Dereference" to BLKmode memories. */
4937 dstb = gen_rtx_MEM (BLKmode, dstb);
4938 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4939 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4940 srcb = gen_rtx_MEM (BLKmode, srcb);
4941 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4942 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4944 /* Copy. */
4945 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4948 return const0_rtx;
4951 /* Expand a call to one of the builtin functions __builtin_frame_address or
4952 __builtin_return_address. */
4954 static rtx
4955 expand_builtin_frame_address (tree fndecl, tree exp)
4957 /* The argument must be a nonnegative integer constant.
4958 It counts the number of frames to scan up the stack.
4959 The value is either the frame pointer value or the return
4960 address saved in that frame. */
4961 if (call_expr_nargs (exp) == 0)
4962 /* Warning about missing arg was already issued. */
4963 return const0_rtx;
4964 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4966 error ("invalid argument to %qD", fndecl);
4967 return const0_rtx;
4969 else
4971 /* Number of frames to scan up the stack. */
4972 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4974 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4976 /* Some ports cannot access arbitrary stack frames. */
4977 if (tem == NULL)
4979 warning (0, "unsupported argument to %qD", fndecl);
4980 return const0_rtx;
4983 if (count)
4985 /* Warn since no effort is made to ensure that any frame
4986 beyond the current one exists or can be safely reached. */
4987 warning (OPT_Wframe_address, "calling %qD with "
4988 "a nonzero argument is unsafe", fndecl);
4991 /* For __builtin_frame_address, return what we've got. */
4992 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4993 return tem;
4995 if (!REG_P (tem)
4996 && ! CONSTANT_P (tem))
4997 tem = copy_addr_to_reg (tem);
4998 return tem;
5002 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5003 failed and the caller should emit a normal call. */
5005 static rtx
5006 expand_builtin_alloca (tree exp)
5008 rtx op0;
5009 rtx result;
5010 unsigned int align;
5011 tree fndecl = get_callee_fndecl (exp);
5012 HOST_WIDE_INT max_size;
5013 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5014 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5015 bool valid_arglist
5016 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5017 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5018 VOID_TYPE)
5019 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5020 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5021 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5023 if (!valid_arglist)
5024 return NULL_RTX;
5026 if ((alloca_for_var && !warn_vla_limit)
5027 || (!alloca_for_var && !warn_alloca_limit))
5029 /* -Walloca-larger-than and -Wvla-larger-than settings override
5030 the more general -Walloc-size-larger-than so unless either of
5031 the former options is specified check the alloca arguments for
5032 overflow. */
5033 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5034 int idx[] = { 0, -1 };
5035 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5038 /* Compute the argument. */
5039 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5041 /* Compute the alignment. */
5042 align = (fcode == BUILT_IN_ALLOCA
5043 ? BIGGEST_ALIGNMENT
5044 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5046 /* Compute the maximum size. */
5047 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5048 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5049 : -1);
5051 /* Allocate the desired space. If the allocation stems from the declaration
5052 of a variable-sized object, it cannot accumulate. */
5053 result
5054 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5055 result = convert_memory_address (ptr_mode, result);
5057 return result;
5060 /* Emit a call to __asan_allocas_unpoison call in EXP. Replace second argument
5061 of the call with virtual_stack_dynamic_rtx because in asan pass we emit a
5062 dummy value into second parameter relying on this function to perform the
5063 change. See motivation for this in comment to handle_builtin_stack_restore
5064 function. */
5066 static rtx
5067 expand_asan_emit_allocas_unpoison (tree exp)
5069 tree arg0 = CALL_EXPR_ARG (exp, 0);
5070 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5071 rtx bot = convert_memory_address (ptr_mode, virtual_stack_dynamic_rtx);
5072 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5073 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5074 top, ptr_mode, bot, ptr_mode);
5075 return ret;
5078 /* Expand a call to bswap builtin in EXP.
5079 Return NULL_RTX if a normal call should be emitted rather than expanding the
5080 function in-line. If convenient, the result should be placed in TARGET.
5081 SUBTARGET may be used as the target for computing one of EXP's operands. */
5083 static rtx
5084 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5085 rtx subtarget)
5087 tree arg;
5088 rtx op0;
5090 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5091 return NULL_RTX;
5093 arg = CALL_EXPR_ARG (exp, 0);
5094 op0 = expand_expr (arg,
5095 subtarget && GET_MODE (subtarget) == target_mode
5096 ? subtarget : NULL_RTX,
5097 target_mode, EXPAND_NORMAL);
5098 if (GET_MODE (op0) != target_mode)
5099 op0 = convert_to_mode (target_mode, op0, 1);
5101 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5103 gcc_assert (target);
5105 return convert_to_mode (target_mode, target, 1);
5108 /* Expand a call to a unary builtin in EXP.
5109 Return NULL_RTX if a normal call should be emitted rather than expanding the
5110 function in-line. If convenient, the result should be placed in TARGET.
5111 SUBTARGET may be used as the target for computing one of EXP's operands. */
5113 static rtx
5114 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5115 rtx subtarget, optab op_optab)
5117 rtx op0;
5119 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5120 return NULL_RTX;
5122 /* Compute the argument. */
5123 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5124 (subtarget
5125 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5126 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5127 VOIDmode, EXPAND_NORMAL);
5128 /* Compute op, into TARGET if possible.
5129 Set TARGET to wherever the result comes back. */
5130 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5131 op_optab, op0, target, op_optab != clrsb_optab);
5132 gcc_assert (target);
5134 return convert_to_mode (target_mode, target, 0);
5137 /* Expand a call to __builtin_expect. We just return our argument
5138 as the builtin_expect semantic should've been already executed by
5139 tree branch prediction pass. */
5141 static rtx
5142 expand_builtin_expect (tree exp, rtx target)
5144 tree arg;
5146 if (call_expr_nargs (exp) < 2)
5147 return const0_rtx;
5148 arg = CALL_EXPR_ARG (exp, 0);
5150 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5151 /* When guessing was done, the hints should be already stripped away. */
5152 gcc_assert (!flag_guess_branch_prob
5153 || optimize == 0 || seen_error ());
5154 return target;
5157 /* Expand a call to __builtin_assume_aligned. We just return our first
5158 argument as the builtin_assume_aligned semantic should've been already
5159 executed by CCP. */
5161 static rtx
5162 expand_builtin_assume_aligned (tree exp, rtx target)
5164 if (call_expr_nargs (exp) < 2)
5165 return const0_rtx;
5166 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5167 EXPAND_NORMAL);
5168 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5169 && (call_expr_nargs (exp) < 3
5170 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5171 return target;
5174 void
5175 expand_builtin_trap (void)
5177 if (targetm.have_trap ())
5179 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5180 /* For trap insns when not accumulating outgoing args force
5181 REG_ARGS_SIZE note to prevent crossjumping of calls with
5182 different args sizes. */
5183 if (!ACCUMULATE_OUTGOING_ARGS)
5184 add_args_size_note (insn, stack_pointer_delta);
5186 else
5188 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5189 tree call_expr = build_call_expr (fn, 0);
5190 expand_call (call_expr, NULL_RTX, false);
5193 emit_barrier ();
5196 /* Expand a call to __builtin_unreachable. We do nothing except emit
5197 a barrier saying that control flow will not pass here.
5199 It is the responsibility of the program being compiled to ensure
5200 that control flow does never reach __builtin_unreachable. */
5201 static void
5202 expand_builtin_unreachable (void)
5204 emit_barrier ();
5207 /* Expand EXP, a call to fabs, fabsf or fabsl.
5208 Return NULL_RTX if a normal call should be emitted rather than expanding
5209 the function inline. If convenient, the result should be placed
5210 in TARGET. SUBTARGET may be used as the target for computing
5211 the operand. */
5213 static rtx
5214 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5216 machine_mode mode;
5217 tree arg;
5218 rtx op0;
5220 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5221 return NULL_RTX;
5223 arg = CALL_EXPR_ARG (exp, 0);
5224 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5225 mode = TYPE_MODE (TREE_TYPE (arg));
5226 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5227 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5230 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5231 Return NULL is a normal call should be emitted rather than expanding the
5232 function inline. If convenient, the result should be placed in TARGET.
5233 SUBTARGET may be used as the target for computing the operand. */
5235 static rtx
5236 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5238 rtx op0, op1;
5239 tree arg;
5241 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5242 return NULL_RTX;
5244 arg = CALL_EXPR_ARG (exp, 0);
5245 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5247 arg = CALL_EXPR_ARG (exp, 1);
5248 op1 = expand_normal (arg);
5250 return expand_copysign (op0, op1, target);
5253 /* Expand a call to __builtin___clear_cache. */
5255 static rtx
5256 expand_builtin___clear_cache (tree exp)
5258 if (!targetm.code_for_clear_cache)
5260 #ifdef CLEAR_INSN_CACHE
5261 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5262 does something. Just do the default expansion to a call to
5263 __clear_cache(). */
5264 return NULL_RTX;
5265 #else
5266 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5267 does nothing. There is no need to call it. Do nothing. */
5268 return const0_rtx;
5269 #endif /* CLEAR_INSN_CACHE */
5272 /* We have a "clear_cache" insn, and it will handle everything. */
5273 tree begin, end;
5274 rtx begin_rtx, end_rtx;
5276 /* We must not expand to a library call. If we did, any
5277 fallback library function in libgcc that might contain a call to
5278 __builtin___clear_cache() would recurse infinitely. */
5279 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5281 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5282 return const0_rtx;
5285 if (targetm.have_clear_cache ())
5287 struct expand_operand ops[2];
5289 begin = CALL_EXPR_ARG (exp, 0);
5290 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5292 end = CALL_EXPR_ARG (exp, 1);
5293 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5295 create_address_operand (&ops[0], begin_rtx);
5296 create_address_operand (&ops[1], end_rtx);
5297 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5298 return const0_rtx;
5300 return const0_rtx;
5303 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5305 static rtx
5306 round_trampoline_addr (rtx tramp)
5308 rtx temp, addend, mask;
5310 /* If we don't need too much alignment, we'll have been guaranteed
5311 proper alignment by get_trampoline_type. */
5312 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5313 return tramp;
5315 /* Round address up to desired boundary. */
5316 temp = gen_reg_rtx (Pmode);
5317 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5318 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5320 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5321 temp, 0, OPTAB_LIB_WIDEN);
5322 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5323 temp, 0, OPTAB_LIB_WIDEN);
5325 return tramp;
5328 static rtx
5329 expand_builtin_init_trampoline (tree exp, bool onstack)
5331 tree t_tramp, t_func, t_chain;
5332 rtx m_tramp, r_tramp, r_chain, tmp;
5334 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5335 POINTER_TYPE, VOID_TYPE))
5336 return NULL_RTX;
5338 t_tramp = CALL_EXPR_ARG (exp, 0);
5339 t_func = CALL_EXPR_ARG (exp, 1);
5340 t_chain = CALL_EXPR_ARG (exp, 2);
5342 r_tramp = expand_normal (t_tramp);
5343 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5344 MEM_NOTRAP_P (m_tramp) = 1;
5346 /* If ONSTACK, the TRAMP argument should be the address of a field
5347 within the local function's FRAME decl. Either way, let's see if
5348 we can fill in the MEM_ATTRs for this memory. */
5349 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5350 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5352 /* Creator of a heap trampoline is responsible for making sure the
5353 address is aligned to at least STACK_BOUNDARY. Normally malloc
5354 will ensure this anyhow. */
5355 tmp = round_trampoline_addr (r_tramp);
5356 if (tmp != r_tramp)
5358 m_tramp = change_address (m_tramp, BLKmode, tmp);
5359 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5360 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5363 /* The FUNC argument should be the address of the nested function.
5364 Extract the actual function decl to pass to the hook. */
5365 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5366 t_func = TREE_OPERAND (t_func, 0);
5367 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5369 r_chain = expand_normal (t_chain);
5371 /* Generate insns to initialize the trampoline. */
5372 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5374 if (onstack)
5376 trampolines_created = 1;
5378 if (targetm.calls.custom_function_descriptors != 0)
5379 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5380 "trampoline generated for nested function %qD", t_func);
5383 return const0_rtx;
5386 static rtx
5387 expand_builtin_adjust_trampoline (tree exp)
5389 rtx tramp;
5391 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5392 return NULL_RTX;
5394 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5395 tramp = round_trampoline_addr (tramp);
5396 if (targetm.calls.trampoline_adjust_address)
5397 tramp = targetm.calls.trampoline_adjust_address (tramp);
5399 return tramp;
5402 /* Expand a call to the builtin descriptor initialization routine.
5403 A descriptor is made up of a couple of pointers to the static
5404 chain and the code entry in this order. */
5406 static rtx
5407 expand_builtin_init_descriptor (tree exp)
5409 tree t_descr, t_func, t_chain;
5410 rtx m_descr, r_descr, r_func, r_chain;
5412 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5413 VOID_TYPE))
5414 return NULL_RTX;
5416 t_descr = CALL_EXPR_ARG (exp, 0);
5417 t_func = CALL_EXPR_ARG (exp, 1);
5418 t_chain = CALL_EXPR_ARG (exp, 2);
5420 r_descr = expand_normal (t_descr);
5421 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5422 MEM_NOTRAP_P (m_descr) = 1;
5424 r_func = expand_normal (t_func);
5425 r_chain = expand_normal (t_chain);
5427 /* Generate insns to initialize the descriptor. */
5428 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5429 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5430 POINTER_SIZE / BITS_PER_UNIT), r_func);
5432 return const0_rtx;
5435 /* Expand a call to the builtin descriptor adjustment routine. */
5437 static rtx
5438 expand_builtin_adjust_descriptor (tree exp)
5440 rtx tramp;
5442 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5443 return NULL_RTX;
5445 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5447 /* Unalign the descriptor to allow runtime identification. */
5448 tramp = plus_constant (ptr_mode, tramp,
5449 targetm.calls.custom_function_descriptors);
5451 return force_operand (tramp, NULL_RTX);
5454 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5455 function. The function first checks whether the back end provides
5456 an insn to implement signbit for the respective mode. If not, it
5457 checks whether the floating point format of the value is such that
5458 the sign bit can be extracted. If that is not the case, error out.
5459 EXP is the expression that is a call to the builtin function; if
5460 convenient, the result should be placed in TARGET. */
5461 static rtx
5462 expand_builtin_signbit (tree exp, rtx target)
5464 const struct real_format *fmt;
5465 scalar_float_mode fmode;
5466 scalar_int_mode rmode, imode;
5467 tree arg;
5468 int word, bitpos;
5469 enum insn_code icode;
5470 rtx temp;
5471 location_t loc = EXPR_LOCATION (exp);
5473 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5474 return NULL_RTX;
5476 arg = CALL_EXPR_ARG (exp, 0);
5477 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5478 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5479 fmt = REAL_MODE_FORMAT (fmode);
5481 arg = builtin_save_expr (arg);
5483 /* Expand the argument yielding a RTX expression. */
5484 temp = expand_normal (arg);
5486 /* Check if the back end provides an insn that handles signbit for the
5487 argument's mode. */
5488 icode = optab_handler (signbit_optab, fmode);
5489 if (icode != CODE_FOR_nothing)
5491 rtx_insn *last = get_last_insn ();
5492 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5493 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5494 return target;
5495 delete_insns_since (last);
5498 /* For floating point formats without a sign bit, implement signbit
5499 as "ARG < 0.0". */
5500 bitpos = fmt->signbit_ro;
5501 if (bitpos < 0)
5503 /* But we can't do this if the format supports signed zero. */
5504 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5506 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5507 build_real (TREE_TYPE (arg), dconst0));
5508 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5511 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5513 imode = int_mode_for_mode (fmode).require ();
5514 temp = gen_lowpart (imode, temp);
5516 else
5518 imode = word_mode;
5519 /* Handle targets with different FP word orders. */
5520 if (FLOAT_WORDS_BIG_ENDIAN)
5521 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5522 else
5523 word = bitpos / BITS_PER_WORD;
5524 temp = operand_subword_force (temp, word, fmode);
5525 bitpos = bitpos % BITS_PER_WORD;
5528 /* Force the intermediate word_mode (or narrower) result into a
5529 register. This avoids attempting to create paradoxical SUBREGs
5530 of floating point modes below. */
5531 temp = force_reg (imode, temp);
5533 /* If the bitpos is within the "result mode" lowpart, the operation
5534 can be implement with a single bitwise AND. Otherwise, we need
5535 a right shift and an AND. */
5537 if (bitpos < GET_MODE_BITSIZE (rmode))
5539 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5541 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5542 temp = gen_lowpart (rmode, temp);
5543 temp = expand_binop (rmode, and_optab, temp,
5544 immed_wide_int_const (mask, rmode),
5545 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5547 else
5549 /* Perform a logical right shift to place the signbit in the least
5550 significant bit, then truncate the result to the desired mode
5551 and mask just this bit. */
5552 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5553 temp = gen_lowpart (rmode, temp);
5554 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5555 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5558 return temp;
5561 /* Expand fork or exec calls. TARGET is the desired target of the
5562 call. EXP is the call. FN is the
5563 identificator of the actual function. IGNORE is nonzero if the
5564 value is to be ignored. */
5566 static rtx
5567 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5569 tree id, decl;
5570 tree call;
5572 /* If we are not profiling, just call the function. */
5573 if (!profile_arc_flag)
5574 return NULL_RTX;
5576 /* Otherwise call the wrapper. This should be equivalent for the rest of
5577 compiler, so the code does not diverge, and the wrapper may run the
5578 code necessary for keeping the profiling sane. */
5580 switch (DECL_FUNCTION_CODE (fn))
5582 case BUILT_IN_FORK:
5583 id = get_identifier ("__gcov_fork");
5584 break;
5586 case BUILT_IN_EXECL:
5587 id = get_identifier ("__gcov_execl");
5588 break;
5590 case BUILT_IN_EXECV:
5591 id = get_identifier ("__gcov_execv");
5592 break;
5594 case BUILT_IN_EXECLP:
5595 id = get_identifier ("__gcov_execlp");
5596 break;
5598 case BUILT_IN_EXECLE:
5599 id = get_identifier ("__gcov_execle");
5600 break;
5602 case BUILT_IN_EXECVP:
5603 id = get_identifier ("__gcov_execvp");
5604 break;
5606 case BUILT_IN_EXECVE:
5607 id = get_identifier ("__gcov_execve");
5608 break;
5610 default:
5611 gcc_unreachable ();
5614 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5615 FUNCTION_DECL, id, TREE_TYPE (fn));
5616 DECL_EXTERNAL (decl) = 1;
5617 TREE_PUBLIC (decl) = 1;
5618 DECL_ARTIFICIAL (decl) = 1;
5619 TREE_NOTHROW (decl) = 1;
5620 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5621 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5622 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5623 return expand_call (call, target, ignore);
5628 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5629 the pointer in these functions is void*, the tree optimizers may remove
5630 casts. The mode computed in expand_builtin isn't reliable either, due
5631 to __sync_bool_compare_and_swap.
5633 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5634 group of builtins. This gives us log2 of the mode size. */
5636 static inline machine_mode
5637 get_builtin_sync_mode (int fcode_diff)
5639 /* The size is not negotiable, so ask not to get BLKmode in return
5640 if the target indicates that a smaller size would be better. */
5641 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5644 /* Expand the memory expression LOC and return the appropriate memory operand
5645 for the builtin_sync operations. */
5647 static rtx
5648 get_builtin_sync_mem (tree loc, machine_mode mode)
5650 rtx addr, mem;
5652 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5653 addr = convert_memory_address (Pmode, addr);
5655 /* Note that we explicitly do not want any alias information for this
5656 memory, so that we kill all other live memories. Otherwise we don't
5657 satisfy the full barrier semantics of the intrinsic. */
5658 mem = validize_mem (gen_rtx_MEM (mode, addr));
5660 /* The alignment needs to be at least according to that of the mode. */
5661 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5662 get_pointer_alignment (loc)));
5663 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5664 MEM_VOLATILE_P (mem) = 1;
5666 return mem;
5669 /* Make sure an argument is in the right mode.
5670 EXP is the tree argument.
5671 MODE is the mode it should be in. */
5673 static rtx
5674 expand_expr_force_mode (tree exp, machine_mode mode)
5676 rtx val;
5677 machine_mode old_mode;
5679 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5680 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5681 of CONST_INTs, where we know the old_mode only from the call argument. */
5683 old_mode = GET_MODE (val);
5684 if (old_mode == VOIDmode)
5685 old_mode = TYPE_MODE (TREE_TYPE (exp));
5686 val = convert_modes (mode, old_mode, val, 1);
5687 return val;
5691 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5692 EXP is the CALL_EXPR. CODE is the rtx code
5693 that corresponds to the arithmetic or logical operation from the name;
5694 an exception here is that NOT actually means NAND. TARGET is an optional
5695 place for us to store the results; AFTER is true if this is the
5696 fetch_and_xxx form. */
5698 static rtx
5699 expand_builtin_sync_operation (machine_mode mode, tree exp,
5700 enum rtx_code code, bool after,
5701 rtx target)
5703 rtx val, mem;
5704 location_t loc = EXPR_LOCATION (exp);
5706 if (code == NOT && warn_sync_nand)
5708 tree fndecl = get_callee_fndecl (exp);
5709 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5711 static bool warned_f_a_n, warned_n_a_f;
5713 switch (fcode)
5715 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5716 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5717 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5718 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5719 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5720 if (warned_f_a_n)
5721 break;
5723 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5724 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5725 warned_f_a_n = true;
5726 break;
5728 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5729 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5730 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5731 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5732 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5733 if (warned_n_a_f)
5734 break;
5736 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5737 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5738 warned_n_a_f = true;
5739 break;
5741 default:
5742 gcc_unreachable ();
5746 /* Expand the operands. */
5747 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5748 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5750 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5751 after);
5754 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5755 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5756 true if this is the boolean form. TARGET is a place for us to store the
5757 results; this is NOT optional if IS_BOOL is true. */
5759 static rtx
5760 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5761 bool is_bool, rtx target)
5763 rtx old_val, new_val, mem;
5764 rtx *pbool, *poval;
5766 /* Expand the operands. */
5767 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5768 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5769 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5771 pbool = poval = NULL;
5772 if (target != const0_rtx)
5774 if (is_bool)
5775 pbool = &target;
5776 else
5777 poval = &target;
5779 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5780 false, MEMMODEL_SYNC_SEQ_CST,
5781 MEMMODEL_SYNC_SEQ_CST))
5782 return NULL_RTX;
5784 return target;
5787 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5788 general form is actually an atomic exchange, and some targets only
5789 support a reduced form with the second argument being a constant 1.
5790 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5791 the results. */
5793 static rtx
5794 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5795 rtx target)
5797 rtx val, mem;
5799 /* Expand the operands. */
5800 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5801 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5803 return expand_sync_lock_test_and_set (target, mem, val);
5806 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5808 static void
5809 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5811 rtx mem;
5813 /* Expand the operands. */
5814 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5816 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5819 /* Given an integer representing an ``enum memmodel'', verify its
5820 correctness and return the memory model enum. */
5822 static enum memmodel
5823 get_memmodel (tree exp)
5825 rtx op;
5826 unsigned HOST_WIDE_INT val;
5827 source_location loc
5828 = expansion_point_location_if_in_system_header (input_location);
5830 /* If the parameter is not a constant, it's a run time value so we'll just
5831 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5832 if (TREE_CODE (exp) != INTEGER_CST)
5833 return MEMMODEL_SEQ_CST;
5835 op = expand_normal (exp);
5837 val = INTVAL (op);
5838 if (targetm.memmodel_check)
5839 val = targetm.memmodel_check (val);
5840 else if (val & ~MEMMODEL_MASK)
5842 warning_at (loc, OPT_Winvalid_memory_model,
5843 "unknown architecture specifier in memory model to builtin");
5844 return MEMMODEL_SEQ_CST;
5847 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5848 if (memmodel_base (val) >= MEMMODEL_LAST)
5850 warning_at (loc, OPT_Winvalid_memory_model,
5851 "invalid memory model argument to builtin");
5852 return MEMMODEL_SEQ_CST;
5855 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5856 be conservative and promote consume to acquire. */
5857 if (val == MEMMODEL_CONSUME)
5858 val = MEMMODEL_ACQUIRE;
5860 return (enum memmodel) val;
5863 /* Expand the __atomic_exchange intrinsic:
5864 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5865 EXP is the CALL_EXPR.
5866 TARGET is an optional place for us to store the results. */
5868 static rtx
5869 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5871 rtx val, mem;
5872 enum memmodel model;
5874 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5876 if (!flag_inline_atomics)
5877 return NULL_RTX;
5879 /* Expand the operands. */
5880 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5881 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5883 return expand_atomic_exchange (target, mem, val, model);
5886 /* Expand the __atomic_compare_exchange intrinsic:
5887 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5888 TYPE desired, BOOL weak,
5889 enum memmodel success,
5890 enum memmodel failure)
5891 EXP is the CALL_EXPR.
5892 TARGET is an optional place for us to store the results. */
5894 static rtx
5895 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5896 rtx target)
5898 rtx expect, desired, mem, oldval;
5899 rtx_code_label *label;
5900 enum memmodel success, failure;
5901 tree weak;
5902 bool is_weak;
5903 source_location loc
5904 = expansion_point_location_if_in_system_header (input_location);
5906 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5907 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5909 if (failure > success)
5911 warning_at (loc, OPT_Winvalid_memory_model,
5912 "failure memory model cannot be stronger than success "
5913 "memory model for %<__atomic_compare_exchange%>");
5914 success = MEMMODEL_SEQ_CST;
5917 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5919 warning_at (loc, OPT_Winvalid_memory_model,
5920 "invalid failure memory model for "
5921 "%<__atomic_compare_exchange%>");
5922 failure = MEMMODEL_SEQ_CST;
5923 success = MEMMODEL_SEQ_CST;
5927 if (!flag_inline_atomics)
5928 return NULL_RTX;
5930 /* Expand the operands. */
5931 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5933 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5934 expect = convert_memory_address (Pmode, expect);
5935 expect = gen_rtx_MEM (mode, expect);
5936 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5938 weak = CALL_EXPR_ARG (exp, 3);
5939 is_weak = false;
5940 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5941 is_weak = true;
5943 if (target == const0_rtx)
5944 target = NULL;
5946 /* Lest the rtl backend create a race condition with an imporoper store
5947 to memory, always create a new pseudo for OLDVAL. */
5948 oldval = NULL;
5950 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5951 is_weak, success, failure))
5952 return NULL_RTX;
5954 /* Conditionally store back to EXPECT, lest we create a race condition
5955 with an improper store to memory. */
5956 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5957 the normal case where EXPECT is totally private, i.e. a register. At
5958 which point the store can be unconditional. */
5959 label = gen_label_rtx ();
5960 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5961 GET_MODE (target), 1, label);
5962 emit_move_insn (expect, oldval);
5963 emit_label (label);
5965 return target;
5968 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5969 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5970 call. The weak parameter must be dropped to match the expected parameter
5971 list and the expected argument changed from value to pointer to memory
5972 slot. */
5974 static void
5975 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5977 unsigned int z;
5978 vec<tree, va_gc> *vec;
5980 vec_alloc (vec, 5);
5981 vec->quick_push (gimple_call_arg (call, 0));
5982 tree expected = gimple_call_arg (call, 1);
5983 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5984 TREE_TYPE (expected));
5985 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5986 if (expd != x)
5987 emit_move_insn (x, expd);
5988 tree v = make_tree (TREE_TYPE (expected), x);
5989 vec->quick_push (build1 (ADDR_EXPR,
5990 build_pointer_type (TREE_TYPE (expected)), v));
5991 vec->quick_push (gimple_call_arg (call, 2));
5992 /* Skip the boolean weak parameter. */
5993 for (z = 4; z < 6; z++)
5994 vec->quick_push (gimple_call_arg (call, z));
5995 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
5996 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
5997 gcc_assert (bytes_log2 < 5);
5998 built_in_function fncode
5999 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6000 + bytes_log2);
6001 tree fndecl = builtin_decl_explicit (fncode);
6002 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6003 fndecl);
6004 tree exp = build_call_vec (boolean_type_node, fn, vec);
6005 tree lhs = gimple_call_lhs (call);
6006 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6007 if (lhs)
6009 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6010 if (GET_MODE (boolret) != mode)
6011 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6012 x = force_reg (mode, x);
6013 write_complex_part (target, boolret, true);
6014 write_complex_part (target, x, false);
6018 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6020 void
6021 expand_ifn_atomic_compare_exchange (gcall *call)
6023 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6024 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6025 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6026 rtx expect, desired, mem, oldval, boolret;
6027 enum memmodel success, failure;
6028 tree lhs;
6029 bool is_weak;
6030 source_location loc
6031 = expansion_point_location_if_in_system_header (gimple_location (call));
6033 success = get_memmodel (gimple_call_arg (call, 4));
6034 failure = get_memmodel (gimple_call_arg (call, 5));
6036 if (failure > success)
6038 warning_at (loc, OPT_Winvalid_memory_model,
6039 "failure memory model cannot be stronger than success "
6040 "memory model for %<__atomic_compare_exchange%>");
6041 success = MEMMODEL_SEQ_CST;
6044 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6046 warning_at (loc, OPT_Winvalid_memory_model,
6047 "invalid failure memory model for "
6048 "%<__atomic_compare_exchange%>");
6049 failure = MEMMODEL_SEQ_CST;
6050 success = MEMMODEL_SEQ_CST;
6053 if (!flag_inline_atomics)
6055 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6056 return;
6059 /* Expand the operands. */
6060 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6062 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6063 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6065 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6067 boolret = NULL;
6068 oldval = NULL;
6070 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6071 is_weak, success, failure))
6073 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6074 return;
6077 lhs = gimple_call_lhs (call);
6078 if (lhs)
6080 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6081 if (GET_MODE (boolret) != mode)
6082 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6083 write_complex_part (target, boolret, true);
6084 write_complex_part (target, oldval, false);
6088 /* Expand the __atomic_load intrinsic:
6089 TYPE __atomic_load (TYPE *object, enum memmodel)
6090 EXP is the CALL_EXPR.
6091 TARGET is an optional place for us to store the results. */
6093 static rtx
6094 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6096 rtx mem;
6097 enum memmodel model;
6099 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6100 if (is_mm_release (model) || is_mm_acq_rel (model))
6102 source_location loc
6103 = expansion_point_location_if_in_system_header (input_location);
6104 warning_at (loc, OPT_Winvalid_memory_model,
6105 "invalid memory model for %<__atomic_load%>");
6106 model = MEMMODEL_SEQ_CST;
6109 if (!flag_inline_atomics)
6110 return NULL_RTX;
6112 /* Expand the operand. */
6113 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6115 return expand_atomic_load (target, mem, model);
6119 /* Expand the __atomic_store intrinsic:
6120 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6121 EXP is the CALL_EXPR.
6122 TARGET is an optional place for us to store the results. */
6124 static rtx
6125 expand_builtin_atomic_store (machine_mode mode, tree exp)
6127 rtx mem, val;
6128 enum memmodel model;
6130 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6131 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6132 || is_mm_release (model)))
6134 source_location loc
6135 = expansion_point_location_if_in_system_header (input_location);
6136 warning_at (loc, OPT_Winvalid_memory_model,
6137 "invalid memory model for %<__atomic_store%>");
6138 model = MEMMODEL_SEQ_CST;
6141 if (!flag_inline_atomics)
6142 return NULL_RTX;
6144 /* Expand the operands. */
6145 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6146 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6148 return expand_atomic_store (mem, val, model, false);
6151 /* Expand the __atomic_fetch_XXX intrinsic:
6152 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6153 EXP is the CALL_EXPR.
6154 TARGET is an optional place for us to store the results.
6155 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6156 FETCH_AFTER is true if returning the result of the operation.
6157 FETCH_AFTER is false if returning the value before the operation.
6158 IGNORE is true if the result is not used.
6159 EXT_CALL is the correct builtin for an external call if this cannot be
6160 resolved to an instruction sequence. */
6162 static rtx
6163 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6164 enum rtx_code code, bool fetch_after,
6165 bool ignore, enum built_in_function ext_call)
6167 rtx val, mem, ret;
6168 enum memmodel model;
6169 tree fndecl;
6170 tree addr;
6172 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6174 /* Expand the operands. */
6175 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6176 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6178 /* Only try generating instructions if inlining is turned on. */
6179 if (flag_inline_atomics)
6181 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6182 if (ret)
6183 return ret;
6186 /* Return if a different routine isn't needed for the library call. */
6187 if (ext_call == BUILT_IN_NONE)
6188 return NULL_RTX;
6190 /* Change the call to the specified function. */
6191 fndecl = get_callee_fndecl (exp);
6192 addr = CALL_EXPR_FN (exp);
6193 STRIP_NOPS (addr);
6195 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6196 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6198 /* If we will emit code after the call, the call can not be a tail call.
6199 If it is emitted as a tail call, a barrier is emitted after it, and
6200 then all trailing code is removed. */
6201 if (!ignore)
6202 CALL_EXPR_TAILCALL (exp) = 0;
6204 /* Expand the call here so we can emit trailing code. */
6205 ret = expand_call (exp, target, ignore);
6207 /* Replace the original function just in case it matters. */
6208 TREE_OPERAND (addr, 0) = fndecl;
6210 /* Then issue the arithmetic correction to return the right result. */
6211 if (!ignore)
6213 if (code == NOT)
6215 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6216 OPTAB_LIB_WIDEN);
6217 ret = expand_simple_unop (mode, NOT, ret, target, true);
6219 else
6220 ret = expand_simple_binop (mode, code, ret, val, target, true,
6221 OPTAB_LIB_WIDEN);
6223 return ret;
6226 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6228 void
6229 expand_ifn_atomic_bit_test_and (gcall *call)
6231 tree ptr = gimple_call_arg (call, 0);
6232 tree bit = gimple_call_arg (call, 1);
6233 tree flag = gimple_call_arg (call, 2);
6234 tree lhs = gimple_call_lhs (call);
6235 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6236 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6237 enum rtx_code code;
6238 optab optab;
6239 struct expand_operand ops[5];
6241 gcc_assert (flag_inline_atomics);
6243 if (gimple_call_num_args (call) == 4)
6244 model = get_memmodel (gimple_call_arg (call, 3));
6246 rtx mem = get_builtin_sync_mem (ptr, mode);
6247 rtx val = expand_expr_force_mode (bit, mode);
6249 switch (gimple_call_internal_fn (call))
6251 case IFN_ATOMIC_BIT_TEST_AND_SET:
6252 code = IOR;
6253 optab = atomic_bit_test_and_set_optab;
6254 break;
6255 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6256 code = XOR;
6257 optab = atomic_bit_test_and_complement_optab;
6258 break;
6259 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6260 code = AND;
6261 optab = atomic_bit_test_and_reset_optab;
6262 break;
6263 default:
6264 gcc_unreachable ();
6267 if (lhs == NULL_TREE)
6269 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6270 val, NULL_RTX, true, OPTAB_DIRECT);
6271 if (code == AND)
6272 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6273 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6274 return;
6277 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6278 enum insn_code icode = direct_optab_handler (optab, mode);
6279 gcc_assert (icode != CODE_FOR_nothing);
6280 create_output_operand (&ops[0], target, mode);
6281 create_fixed_operand (&ops[1], mem);
6282 create_convert_operand_to (&ops[2], val, mode, true);
6283 create_integer_operand (&ops[3], model);
6284 create_integer_operand (&ops[4], integer_onep (flag));
6285 if (maybe_expand_insn (icode, 5, ops))
6286 return;
6288 rtx bitval = val;
6289 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6290 val, NULL_RTX, true, OPTAB_DIRECT);
6291 rtx maskval = val;
6292 if (code == AND)
6293 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6294 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6295 code, model, false);
6296 if (integer_onep (flag))
6298 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6299 NULL_RTX, true, OPTAB_DIRECT);
6300 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6301 true, OPTAB_DIRECT);
6303 else
6304 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6305 OPTAB_DIRECT);
6306 if (result != target)
6307 emit_move_insn (target, result);
6310 /* Expand an atomic clear operation.
6311 void _atomic_clear (BOOL *obj, enum memmodel)
6312 EXP is the call expression. */
6314 static rtx
6315 expand_builtin_atomic_clear (tree exp)
6317 machine_mode mode;
6318 rtx mem, ret;
6319 enum memmodel model;
6321 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6322 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6323 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6325 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6327 source_location loc
6328 = expansion_point_location_if_in_system_header (input_location);
6329 warning_at (loc, OPT_Winvalid_memory_model,
6330 "invalid memory model for %<__atomic_store%>");
6331 model = MEMMODEL_SEQ_CST;
6334 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6335 Failing that, a store is issued by __atomic_store. The only way this can
6336 fail is if the bool type is larger than a word size. Unlikely, but
6337 handle it anyway for completeness. Assume a single threaded model since
6338 there is no atomic support in this case, and no barriers are required. */
6339 ret = expand_atomic_store (mem, const0_rtx, model, true);
6340 if (!ret)
6341 emit_move_insn (mem, const0_rtx);
6342 return const0_rtx;
6345 /* Expand an atomic test_and_set operation.
6346 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6347 EXP is the call expression. */
6349 static rtx
6350 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6352 rtx mem;
6353 enum memmodel model;
6354 machine_mode mode;
6356 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6357 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6358 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6360 return expand_atomic_test_and_set (target, mem, model);
6364 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6365 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6367 static tree
6368 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6370 int size;
6371 machine_mode mode;
6372 unsigned int mode_align, type_align;
6374 if (TREE_CODE (arg0) != INTEGER_CST)
6375 return NULL_TREE;
6377 /* We need a corresponding integer mode for the access to be lock-free. */
6378 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6379 if (!int_mode_for_size (size, 0).exists (&mode))
6380 return boolean_false_node;
6382 mode_align = GET_MODE_ALIGNMENT (mode);
6384 if (TREE_CODE (arg1) == INTEGER_CST)
6386 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6388 /* Either this argument is null, or it's a fake pointer encoding
6389 the alignment of the object. */
6390 val = least_bit_hwi (val);
6391 val *= BITS_PER_UNIT;
6393 if (val == 0 || mode_align < val)
6394 type_align = mode_align;
6395 else
6396 type_align = val;
6398 else
6400 tree ttype = TREE_TYPE (arg1);
6402 /* This function is usually invoked and folded immediately by the front
6403 end before anything else has a chance to look at it. The pointer
6404 parameter at this point is usually cast to a void *, so check for that
6405 and look past the cast. */
6406 if (CONVERT_EXPR_P (arg1)
6407 && POINTER_TYPE_P (ttype)
6408 && VOID_TYPE_P (TREE_TYPE (ttype))
6409 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6410 arg1 = TREE_OPERAND (arg1, 0);
6412 ttype = TREE_TYPE (arg1);
6413 gcc_assert (POINTER_TYPE_P (ttype));
6415 /* Get the underlying type of the object. */
6416 ttype = TREE_TYPE (ttype);
6417 type_align = TYPE_ALIGN (ttype);
6420 /* If the object has smaller alignment, the lock free routines cannot
6421 be used. */
6422 if (type_align < mode_align)
6423 return boolean_false_node;
6425 /* Check if a compare_and_swap pattern exists for the mode which represents
6426 the required size. The pattern is not allowed to fail, so the existence
6427 of the pattern indicates support is present. Also require that an
6428 atomic load exists for the required size. */
6429 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6430 return boolean_true_node;
6431 else
6432 return boolean_false_node;
6435 /* Return true if the parameters to call EXP represent an object which will
6436 always generate lock free instructions. The first argument represents the
6437 size of the object, and the second parameter is a pointer to the object
6438 itself. If NULL is passed for the object, then the result is based on
6439 typical alignment for an object of the specified size. Otherwise return
6440 false. */
6442 static rtx
6443 expand_builtin_atomic_always_lock_free (tree exp)
6445 tree size;
6446 tree arg0 = CALL_EXPR_ARG (exp, 0);
6447 tree arg1 = CALL_EXPR_ARG (exp, 1);
6449 if (TREE_CODE (arg0) != INTEGER_CST)
6451 error ("non-constant argument 1 to __atomic_always_lock_free");
6452 return const0_rtx;
6455 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6456 if (size == boolean_true_node)
6457 return const1_rtx;
6458 return const0_rtx;
6461 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6462 is lock free on this architecture. */
6464 static tree
6465 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6467 if (!flag_inline_atomics)
6468 return NULL_TREE;
6470 /* If it isn't always lock free, don't generate a result. */
6471 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6472 return boolean_true_node;
6474 return NULL_TREE;
6477 /* Return true if the parameters to call EXP represent an object which will
6478 always generate lock free instructions. The first argument represents the
6479 size of the object, and the second parameter is a pointer to the object
6480 itself. If NULL is passed for the object, then the result is based on
6481 typical alignment for an object of the specified size. Otherwise return
6482 NULL*/
6484 static rtx
6485 expand_builtin_atomic_is_lock_free (tree exp)
6487 tree size;
6488 tree arg0 = CALL_EXPR_ARG (exp, 0);
6489 tree arg1 = CALL_EXPR_ARG (exp, 1);
6491 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6493 error ("non-integer argument 1 to __atomic_is_lock_free");
6494 return NULL_RTX;
6497 if (!flag_inline_atomics)
6498 return NULL_RTX;
6500 /* If the value is known at compile time, return the RTX for it. */
6501 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6502 if (size == boolean_true_node)
6503 return const1_rtx;
6505 return NULL_RTX;
6508 /* Expand the __atomic_thread_fence intrinsic:
6509 void __atomic_thread_fence (enum memmodel)
6510 EXP is the CALL_EXPR. */
6512 static void
6513 expand_builtin_atomic_thread_fence (tree exp)
6515 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6516 expand_mem_thread_fence (model);
6519 /* Expand the __atomic_signal_fence intrinsic:
6520 void __atomic_signal_fence (enum memmodel)
6521 EXP is the CALL_EXPR. */
6523 static void
6524 expand_builtin_atomic_signal_fence (tree exp)
6526 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6527 expand_mem_signal_fence (model);
6530 /* Expand the __sync_synchronize intrinsic. */
6532 static void
6533 expand_builtin_sync_synchronize (void)
6535 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6538 static rtx
6539 expand_builtin_thread_pointer (tree exp, rtx target)
6541 enum insn_code icode;
6542 if (!validate_arglist (exp, VOID_TYPE))
6543 return const0_rtx;
6544 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6545 if (icode != CODE_FOR_nothing)
6547 struct expand_operand op;
6548 /* If the target is not sutitable then create a new target. */
6549 if (target == NULL_RTX
6550 || !REG_P (target)
6551 || GET_MODE (target) != Pmode)
6552 target = gen_reg_rtx (Pmode);
6553 create_output_operand (&op, target, Pmode);
6554 expand_insn (icode, 1, &op);
6555 return target;
6557 error ("__builtin_thread_pointer is not supported on this target");
6558 return const0_rtx;
6561 static void
6562 expand_builtin_set_thread_pointer (tree exp)
6564 enum insn_code icode;
6565 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6566 return;
6567 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6568 if (icode != CODE_FOR_nothing)
6570 struct expand_operand op;
6571 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6572 Pmode, EXPAND_NORMAL);
6573 create_input_operand (&op, val, Pmode);
6574 expand_insn (icode, 1, &op);
6575 return;
6577 error ("__builtin_set_thread_pointer is not supported on this target");
6581 /* Emit code to restore the current value of stack. */
6583 static void
6584 expand_stack_restore (tree var)
6586 rtx_insn *prev;
6587 rtx sa = expand_normal (var);
6589 sa = convert_memory_address (Pmode, sa);
6591 prev = get_last_insn ();
6592 emit_stack_restore (SAVE_BLOCK, sa);
6594 record_new_stack_level ();
6596 fixup_args_size_notes (prev, get_last_insn (), 0);
6599 /* Emit code to save the current value of stack. */
6601 static rtx
6602 expand_stack_save (void)
6604 rtx ret = NULL_RTX;
6606 emit_stack_save (SAVE_BLOCK, &ret);
6607 return ret;
6611 /* Expand an expression EXP that calls a built-in function,
6612 with result going to TARGET if that's convenient
6613 (and in mode MODE if that's convenient).
6614 SUBTARGET may be used as the target for computing one of EXP's operands.
6615 IGNORE is nonzero if the value is to be ignored. */
6618 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
6619 int ignore)
6621 tree fndecl = get_callee_fndecl (exp);
6622 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6623 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6624 int flags;
6626 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6627 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6629 /* When ASan is enabled, we don't want to expand some memory/string
6630 builtins and rely on libsanitizer's hooks. This allows us to avoid
6631 redundant checks and be sure, that possible overflow will be detected
6632 by ASan. */
6634 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
6635 return expand_call (exp, target, ignore);
6637 /* When not optimizing, generate calls to library functions for a certain
6638 set of builtins. */
6639 if (!optimize
6640 && !called_as_built_in (fndecl)
6641 && fcode != BUILT_IN_FORK
6642 && fcode != BUILT_IN_EXECL
6643 && fcode != BUILT_IN_EXECV
6644 && fcode != BUILT_IN_EXECLP
6645 && fcode != BUILT_IN_EXECLE
6646 && fcode != BUILT_IN_EXECVP
6647 && fcode != BUILT_IN_EXECVE
6648 && !ALLOCA_FUNCTION_CODE_P (fcode)
6649 && fcode != BUILT_IN_FREE
6650 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
6651 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
6652 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
6653 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
6654 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6655 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
6656 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
6657 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
6658 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
6659 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
6660 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
6661 && fcode != BUILT_IN_CHKP_BNDRET)
6662 return expand_call (exp, target, ignore);
6664 /* The built-in function expanders test for target == const0_rtx
6665 to determine whether the function's result will be ignored. */
6666 if (ignore)
6667 target = const0_rtx;
6669 /* If the result of a pure or const built-in function is ignored, and
6670 none of its arguments are volatile, we can avoid expanding the
6671 built-in call and just evaluate the arguments for side-effects. */
6672 if (target == const0_rtx
6673 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6674 && !(flags & ECF_LOOPING_CONST_OR_PURE))
6676 bool volatilep = false;
6677 tree arg;
6678 call_expr_arg_iterator iter;
6680 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6681 if (TREE_THIS_VOLATILE (arg))
6683 volatilep = true;
6684 break;
6687 if (! volatilep)
6689 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6690 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6691 return const0_rtx;
6695 /* expand_builtin_with_bounds is supposed to be used for
6696 instrumented builtin calls. */
6697 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
6699 switch (fcode)
6701 CASE_FLT_FN (BUILT_IN_FABS):
6702 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
6703 case BUILT_IN_FABSD32:
6704 case BUILT_IN_FABSD64:
6705 case BUILT_IN_FABSD128:
6706 target = expand_builtin_fabs (exp, target, subtarget);
6707 if (target)
6708 return target;
6709 break;
6711 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6712 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
6713 target = expand_builtin_copysign (exp, target, subtarget);
6714 if (target)
6715 return target;
6716 break;
6718 /* Just do a normal library call if we were unable to fold
6719 the values. */
6720 CASE_FLT_FN (BUILT_IN_CABS):
6721 break;
6723 CASE_FLT_FN (BUILT_IN_FMA):
6724 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
6725 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6726 if (target)
6727 return target;
6728 break;
6730 CASE_FLT_FN (BUILT_IN_ILOGB):
6731 if (! flag_unsafe_math_optimizations)
6732 break;
6733 gcc_fallthrough ();
6734 CASE_FLT_FN (BUILT_IN_ISINF):
6735 CASE_FLT_FN (BUILT_IN_FINITE):
6736 case BUILT_IN_ISFINITE:
6737 case BUILT_IN_ISNORMAL:
6738 target = expand_builtin_interclass_mathfn (exp, target);
6739 if (target)
6740 return target;
6741 break;
6743 CASE_FLT_FN (BUILT_IN_ICEIL):
6744 CASE_FLT_FN (BUILT_IN_LCEIL):
6745 CASE_FLT_FN (BUILT_IN_LLCEIL):
6746 CASE_FLT_FN (BUILT_IN_LFLOOR):
6747 CASE_FLT_FN (BUILT_IN_IFLOOR):
6748 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6749 target = expand_builtin_int_roundingfn (exp, target);
6750 if (target)
6751 return target;
6752 break;
6754 CASE_FLT_FN (BUILT_IN_IRINT):
6755 CASE_FLT_FN (BUILT_IN_LRINT):
6756 CASE_FLT_FN (BUILT_IN_LLRINT):
6757 CASE_FLT_FN (BUILT_IN_IROUND):
6758 CASE_FLT_FN (BUILT_IN_LROUND):
6759 CASE_FLT_FN (BUILT_IN_LLROUND):
6760 target = expand_builtin_int_roundingfn_2 (exp, target);
6761 if (target)
6762 return target;
6763 break;
6765 CASE_FLT_FN (BUILT_IN_POWI):
6766 target = expand_builtin_powi (exp, target);
6767 if (target)
6768 return target;
6769 break;
6771 CASE_FLT_FN (BUILT_IN_CEXPI):
6772 target = expand_builtin_cexpi (exp, target);
6773 gcc_assert (target);
6774 return target;
6776 CASE_FLT_FN (BUILT_IN_SIN):
6777 CASE_FLT_FN (BUILT_IN_COS):
6778 if (! flag_unsafe_math_optimizations)
6779 break;
6780 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6781 if (target)
6782 return target;
6783 break;
6785 CASE_FLT_FN (BUILT_IN_SINCOS):
6786 if (! flag_unsafe_math_optimizations)
6787 break;
6788 target = expand_builtin_sincos (exp);
6789 if (target)
6790 return target;
6791 break;
6793 case BUILT_IN_APPLY_ARGS:
6794 return expand_builtin_apply_args ();
6796 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6797 FUNCTION with a copy of the parameters described by
6798 ARGUMENTS, and ARGSIZE. It returns a block of memory
6799 allocated on the stack into which is stored all the registers
6800 that might possibly be used for returning the result of a
6801 function. ARGUMENTS is the value returned by
6802 __builtin_apply_args. ARGSIZE is the number of bytes of
6803 arguments that must be copied. ??? How should this value be
6804 computed? We'll also need a safe worst case value for varargs
6805 functions. */
6806 case BUILT_IN_APPLY:
6807 if (!validate_arglist (exp, POINTER_TYPE,
6808 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6809 && !validate_arglist (exp, REFERENCE_TYPE,
6810 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6811 return const0_rtx;
6812 else
6814 rtx ops[3];
6816 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6817 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6818 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6820 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6823 /* __builtin_return (RESULT) causes the function to return the
6824 value described by RESULT. RESULT is address of the block of
6825 memory returned by __builtin_apply. */
6826 case BUILT_IN_RETURN:
6827 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6828 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6829 return const0_rtx;
6831 case BUILT_IN_SAVEREGS:
6832 return expand_builtin_saveregs ();
6834 case BUILT_IN_VA_ARG_PACK:
6835 /* All valid uses of __builtin_va_arg_pack () are removed during
6836 inlining. */
6837 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6838 return const0_rtx;
6840 case BUILT_IN_VA_ARG_PACK_LEN:
6841 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6842 inlining. */
6843 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6844 return const0_rtx;
6846 /* Return the address of the first anonymous stack arg. */
6847 case BUILT_IN_NEXT_ARG:
6848 if (fold_builtin_next_arg (exp, false))
6849 return const0_rtx;
6850 return expand_builtin_next_arg ();
6852 case BUILT_IN_CLEAR_CACHE:
6853 target = expand_builtin___clear_cache (exp);
6854 if (target)
6855 return target;
6856 break;
6858 case BUILT_IN_CLASSIFY_TYPE:
6859 return expand_builtin_classify_type (exp);
6861 case BUILT_IN_CONSTANT_P:
6862 return const0_rtx;
6864 case BUILT_IN_FRAME_ADDRESS:
6865 case BUILT_IN_RETURN_ADDRESS:
6866 return expand_builtin_frame_address (fndecl, exp);
6868 /* Returns the address of the area where the structure is returned.
6869 0 otherwise. */
6870 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6871 if (call_expr_nargs (exp) != 0
6872 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6873 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6874 return const0_rtx;
6875 else
6876 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6878 CASE_BUILT_IN_ALLOCA:
6879 target = expand_builtin_alloca (exp);
6880 if (target)
6881 return target;
6882 break;
6884 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
6885 return expand_asan_emit_allocas_unpoison (exp);
6887 case BUILT_IN_STACK_SAVE:
6888 return expand_stack_save ();
6890 case BUILT_IN_STACK_RESTORE:
6891 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6892 return const0_rtx;
6894 case BUILT_IN_BSWAP16:
6895 case BUILT_IN_BSWAP32:
6896 case BUILT_IN_BSWAP64:
6897 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6898 if (target)
6899 return target;
6900 break;
6902 CASE_INT_FN (BUILT_IN_FFS):
6903 target = expand_builtin_unop (target_mode, exp, target,
6904 subtarget, ffs_optab);
6905 if (target)
6906 return target;
6907 break;
6909 CASE_INT_FN (BUILT_IN_CLZ):
6910 target = expand_builtin_unop (target_mode, exp, target,
6911 subtarget, clz_optab);
6912 if (target)
6913 return target;
6914 break;
6916 CASE_INT_FN (BUILT_IN_CTZ):
6917 target = expand_builtin_unop (target_mode, exp, target,
6918 subtarget, ctz_optab);
6919 if (target)
6920 return target;
6921 break;
6923 CASE_INT_FN (BUILT_IN_CLRSB):
6924 target = expand_builtin_unop (target_mode, exp, target,
6925 subtarget, clrsb_optab);
6926 if (target)
6927 return target;
6928 break;
6930 CASE_INT_FN (BUILT_IN_POPCOUNT):
6931 target = expand_builtin_unop (target_mode, exp, target,
6932 subtarget, popcount_optab);
6933 if (target)
6934 return target;
6935 break;
6937 CASE_INT_FN (BUILT_IN_PARITY):
6938 target = expand_builtin_unop (target_mode, exp, target,
6939 subtarget, parity_optab);
6940 if (target)
6941 return target;
6942 break;
6944 case BUILT_IN_STRLEN:
6945 target = expand_builtin_strlen (exp, target, target_mode);
6946 if (target)
6947 return target;
6948 break;
6950 case BUILT_IN_STRCAT:
6951 target = expand_builtin_strcat (exp, target);
6952 if (target)
6953 return target;
6954 break;
6956 case BUILT_IN_STRCPY:
6957 target = expand_builtin_strcpy (exp, target);
6958 if (target)
6959 return target;
6960 break;
6962 case BUILT_IN_STRNCAT:
6963 target = expand_builtin_strncat (exp, target);
6964 if (target)
6965 return target;
6966 break;
6968 case BUILT_IN_STRNCPY:
6969 target = expand_builtin_strncpy (exp, target);
6970 if (target)
6971 return target;
6972 break;
6974 case BUILT_IN_STPCPY:
6975 target = expand_builtin_stpcpy (exp, target, mode);
6976 if (target)
6977 return target;
6978 break;
6980 case BUILT_IN_STPNCPY:
6981 target = expand_builtin_stpncpy (exp, target);
6982 if (target)
6983 return target;
6984 break;
6986 case BUILT_IN_MEMCHR:
6987 target = expand_builtin_memchr (exp, target);
6988 if (target)
6989 return target;
6990 break;
6992 case BUILT_IN_MEMCPY:
6993 target = expand_builtin_memcpy (exp, target);
6994 if (target)
6995 return target;
6996 break;
6998 case BUILT_IN_MEMMOVE:
6999 target = expand_builtin_memmove (exp, target);
7000 if (target)
7001 return target;
7002 break;
7004 case BUILT_IN_MEMPCPY:
7005 target = expand_builtin_mempcpy (exp, target);
7006 if (target)
7007 return target;
7008 break;
7010 case BUILT_IN_MEMSET:
7011 target = expand_builtin_memset (exp, target, mode);
7012 if (target)
7013 return target;
7014 break;
7016 case BUILT_IN_BZERO:
7017 target = expand_builtin_bzero (exp);
7018 if (target)
7019 return target;
7020 break;
7022 case BUILT_IN_STRCMP:
7023 target = expand_builtin_strcmp (exp, target);
7024 if (target)
7025 return target;
7026 break;
7028 case BUILT_IN_STRNCMP:
7029 target = expand_builtin_strncmp (exp, target, mode);
7030 if (target)
7031 return target;
7032 break;
7034 case BUILT_IN_BCMP:
7035 case BUILT_IN_MEMCMP:
7036 case BUILT_IN_MEMCMP_EQ:
7037 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7038 if (target)
7039 return target;
7040 if (fcode == BUILT_IN_MEMCMP_EQ)
7042 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7043 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7045 break;
7047 case BUILT_IN_SETJMP:
7048 /* This should have been lowered to the builtins below. */
7049 gcc_unreachable ();
7051 case BUILT_IN_SETJMP_SETUP:
7052 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7053 and the receiver label. */
7054 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7056 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7057 VOIDmode, EXPAND_NORMAL);
7058 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7059 rtx_insn *label_r = label_rtx (label);
7061 /* This is copied from the handling of non-local gotos. */
7062 expand_builtin_setjmp_setup (buf_addr, label_r);
7063 nonlocal_goto_handler_labels
7064 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7065 nonlocal_goto_handler_labels);
7066 /* ??? Do not let expand_label treat us as such since we would
7067 not want to be both on the list of non-local labels and on
7068 the list of forced labels. */
7069 FORCED_LABEL (label) = 0;
7070 return const0_rtx;
7072 break;
7074 case BUILT_IN_SETJMP_RECEIVER:
7075 /* __builtin_setjmp_receiver is passed the receiver label. */
7076 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7078 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7079 rtx_insn *label_r = label_rtx (label);
7081 expand_builtin_setjmp_receiver (label_r);
7082 return const0_rtx;
7084 break;
7086 /* __builtin_longjmp is passed a pointer to an array of five words.
7087 It's similar to the C library longjmp function but works with
7088 __builtin_setjmp above. */
7089 case BUILT_IN_LONGJMP:
7090 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7092 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7093 VOIDmode, EXPAND_NORMAL);
7094 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7096 if (value != const1_rtx)
7098 error ("%<__builtin_longjmp%> second argument must be 1");
7099 return const0_rtx;
7102 expand_builtin_longjmp (buf_addr, value);
7103 return const0_rtx;
7105 break;
7107 case BUILT_IN_NONLOCAL_GOTO:
7108 target = expand_builtin_nonlocal_goto (exp);
7109 if (target)
7110 return target;
7111 break;
7113 /* This updates the setjmp buffer that is its argument with the value
7114 of the current stack pointer. */
7115 case BUILT_IN_UPDATE_SETJMP_BUF:
7116 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7118 rtx buf_addr
7119 = expand_normal (CALL_EXPR_ARG (exp, 0));
7121 expand_builtin_update_setjmp_buf (buf_addr);
7122 return const0_rtx;
7124 break;
7126 case BUILT_IN_TRAP:
7127 expand_builtin_trap ();
7128 return const0_rtx;
7130 case BUILT_IN_UNREACHABLE:
7131 expand_builtin_unreachable ();
7132 return const0_rtx;
7134 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7135 case BUILT_IN_SIGNBITD32:
7136 case BUILT_IN_SIGNBITD64:
7137 case BUILT_IN_SIGNBITD128:
7138 target = expand_builtin_signbit (exp, target);
7139 if (target)
7140 return target;
7141 break;
7143 /* Various hooks for the DWARF 2 __throw routine. */
7144 case BUILT_IN_UNWIND_INIT:
7145 expand_builtin_unwind_init ();
7146 return const0_rtx;
7147 case BUILT_IN_DWARF_CFA:
7148 return virtual_cfa_rtx;
7149 #ifdef DWARF2_UNWIND_INFO
7150 case BUILT_IN_DWARF_SP_COLUMN:
7151 return expand_builtin_dwarf_sp_column ();
7152 case BUILT_IN_INIT_DWARF_REG_SIZES:
7153 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7154 return const0_rtx;
7155 #endif
7156 case BUILT_IN_FROB_RETURN_ADDR:
7157 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7158 case BUILT_IN_EXTRACT_RETURN_ADDR:
7159 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7160 case BUILT_IN_EH_RETURN:
7161 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7162 CALL_EXPR_ARG (exp, 1));
7163 return const0_rtx;
7164 case BUILT_IN_EH_RETURN_DATA_REGNO:
7165 return expand_builtin_eh_return_data_regno (exp);
7166 case BUILT_IN_EXTEND_POINTER:
7167 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7168 case BUILT_IN_EH_POINTER:
7169 return expand_builtin_eh_pointer (exp);
7170 case BUILT_IN_EH_FILTER:
7171 return expand_builtin_eh_filter (exp);
7172 case BUILT_IN_EH_COPY_VALUES:
7173 return expand_builtin_eh_copy_values (exp);
7175 case BUILT_IN_VA_START:
7176 return expand_builtin_va_start (exp);
7177 case BUILT_IN_VA_END:
7178 return expand_builtin_va_end (exp);
7179 case BUILT_IN_VA_COPY:
7180 return expand_builtin_va_copy (exp);
7181 case BUILT_IN_EXPECT:
7182 return expand_builtin_expect (exp, target);
7183 case BUILT_IN_ASSUME_ALIGNED:
7184 return expand_builtin_assume_aligned (exp, target);
7185 case BUILT_IN_PREFETCH:
7186 expand_builtin_prefetch (exp);
7187 return const0_rtx;
7189 case BUILT_IN_INIT_TRAMPOLINE:
7190 return expand_builtin_init_trampoline (exp, true);
7191 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7192 return expand_builtin_init_trampoline (exp, false);
7193 case BUILT_IN_ADJUST_TRAMPOLINE:
7194 return expand_builtin_adjust_trampoline (exp);
7196 case BUILT_IN_INIT_DESCRIPTOR:
7197 return expand_builtin_init_descriptor (exp);
7198 case BUILT_IN_ADJUST_DESCRIPTOR:
7199 return expand_builtin_adjust_descriptor (exp);
7201 case BUILT_IN_FORK:
7202 case BUILT_IN_EXECL:
7203 case BUILT_IN_EXECV:
7204 case BUILT_IN_EXECLP:
7205 case BUILT_IN_EXECLE:
7206 case BUILT_IN_EXECVP:
7207 case BUILT_IN_EXECVE:
7208 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7209 if (target)
7210 return target;
7211 break;
7213 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7214 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7215 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7216 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7217 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7218 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7219 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7220 if (target)
7221 return target;
7222 break;
7224 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7225 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7226 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7227 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7228 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7229 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7230 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7231 if (target)
7232 return target;
7233 break;
7235 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7236 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7237 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7238 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7239 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7240 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7241 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7242 if (target)
7243 return target;
7244 break;
7246 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7247 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7248 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7249 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7250 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7251 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7252 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7253 if (target)
7254 return target;
7255 break;
7257 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7258 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7259 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7260 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7261 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7262 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7263 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7264 if (target)
7265 return target;
7266 break;
7268 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7269 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7270 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7271 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7272 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7273 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7274 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7275 if (target)
7276 return target;
7277 break;
7279 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7280 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7281 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7282 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7283 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7284 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7285 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7286 if (target)
7287 return target;
7288 break;
7290 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7291 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7292 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7293 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7294 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7295 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7296 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7297 if (target)
7298 return target;
7299 break;
7301 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7302 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7303 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7304 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7305 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7306 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7307 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7308 if (target)
7309 return target;
7310 break;
7312 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7313 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7314 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7315 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7316 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7317 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7318 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7319 if (target)
7320 return target;
7321 break;
7323 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7324 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7325 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7326 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7327 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7328 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7329 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7330 if (target)
7331 return target;
7332 break;
7334 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7335 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7336 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7337 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7338 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7339 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7340 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7341 if (target)
7342 return target;
7343 break;
7345 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7346 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7347 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7348 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7349 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7350 if (mode == VOIDmode)
7351 mode = TYPE_MODE (boolean_type_node);
7352 if (!target || !register_operand (target, mode))
7353 target = gen_reg_rtx (mode);
7355 mode = get_builtin_sync_mode
7356 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7357 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7358 if (target)
7359 return target;
7360 break;
7362 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7363 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7364 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7365 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7366 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7367 mode = get_builtin_sync_mode
7368 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7369 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7370 if (target)
7371 return target;
7372 break;
7374 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7375 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7376 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7377 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7378 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7379 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7380 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7381 if (target)
7382 return target;
7383 break;
7385 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7386 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7387 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7388 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7389 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7390 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7391 expand_builtin_sync_lock_release (mode, exp);
7392 return const0_rtx;
7394 case BUILT_IN_SYNC_SYNCHRONIZE:
7395 expand_builtin_sync_synchronize ();
7396 return const0_rtx;
7398 case BUILT_IN_ATOMIC_EXCHANGE_1:
7399 case BUILT_IN_ATOMIC_EXCHANGE_2:
7400 case BUILT_IN_ATOMIC_EXCHANGE_4:
7401 case BUILT_IN_ATOMIC_EXCHANGE_8:
7402 case BUILT_IN_ATOMIC_EXCHANGE_16:
7403 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7404 target = expand_builtin_atomic_exchange (mode, exp, target);
7405 if (target)
7406 return target;
7407 break;
7409 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7410 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7411 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7412 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7413 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7415 unsigned int nargs, z;
7416 vec<tree, va_gc> *vec;
7418 mode =
7419 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7420 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7421 if (target)
7422 return target;
7424 /* If this is turned into an external library call, the weak parameter
7425 must be dropped to match the expected parameter list. */
7426 nargs = call_expr_nargs (exp);
7427 vec_alloc (vec, nargs - 1);
7428 for (z = 0; z < 3; z++)
7429 vec->quick_push (CALL_EXPR_ARG (exp, z));
7430 /* Skip the boolean weak parameter. */
7431 for (z = 4; z < 6; z++)
7432 vec->quick_push (CALL_EXPR_ARG (exp, z));
7433 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7434 break;
7437 case BUILT_IN_ATOMIC_LOAD_1:
7438 case BUILT_IN_ATOMIC_LOAD_2:
7439 case BUILT_IN_ATOMIC_LOAD_4:
7440 case BUILT_IN_ATOMIC_LOAD_8:
7441 case BUILT_IN_ATOMIC_LOAD_16:
7442 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7443 target = expand_builtin_atomic_load (mode, exp, target);
7444 if (target)
7445 return target;
7446 break;
7448 case BUILT_IN_ATOMIC_STORE_1:
7449 case BUILT_IN_ATOMIC_STORE_2:
7450 case BUILT_IN_ATOMIC_STORE_4:
7451 case BUILT_IN_ATOMIC_STORE_8:
7452 case BUILT_IN_ATOMIC_STORE_16:
7453 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7454 target = expand_builtin_atomic_store (mode, exp);
7455 if (target)
7456 return const0_rtx;
7457 break;
7459 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7460 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7461 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7462 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7463 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7465 enum built_in_function lib;
7466 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7467 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7468 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7469 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7470 ignore, lib);
7471 if (target)
7472 return target;
7473 break;
7475 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7476 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7477 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7478 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7479 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7481 enum built_in_function lib;
7482 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7483 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7484 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7485 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7486 ignore, lib);
7487 if (target)
7488 return target;
7489 break;
7491 case BUILT_IN_ATOMIC_AND_FETCH_1:
7492 case BUILT_IN_ATOMIC_AND_FETCH_2:
7493 case BUILT_IN_ATOMIC_AND_FETCH_4:
7494 case BUILT_IN_ATOMIC_AND_FETCH_8:
7495 case BUILT_IN_ATOMIC_AND_FETCH_16:
7497 enum built_in_function lib;
7498 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7499 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7500 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7501 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7502 ignore, lib);
7503 if (target)
7504 return target;
7505 break;
7507 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7508 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7509 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7510 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7511 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7513 enum built_in_function lib;
7514 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7515 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7516 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7517 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7518 ignore, lib);
7519 if (target)
7520 return target;
7521 break;
7523 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7524 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7525 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7526 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7527 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7529 enum built_in_function lib;
7530 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7531 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7532 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7533 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7534 ignore, lib);
7535 if (target)
7536 return target;
7537 break;
7539 case BUILT_IN_ATOMIC_OR_FETCH_1:
7540 case BUILT_IN_ATOMIC_OR_FETCH_2:
7541 case BUILT_IN_ATOMIC_OR_FETCH_4:
7542 case BUILT_IN_ATOMIC_OR_FETCH_8:
7543 case BUILT_IN_ATOMIC_OR_FETCH_16:
7545 enum built_in_function lib;
7546 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7547 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7548 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7549 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7550 ignore, lib);
7551 if (target)
7552 return target;
7553 break;
7555 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7556 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7557 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7558 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7559 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7560 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7561 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7562 ignore, BUILT_IN_NONE);
7563 if (target)
7564 return target;
7565 break;
7567 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7568 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7569 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7570 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7571 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7572 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7573 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7574 ignore, BUILT_IN_NONE);
7575 if (target)
7576 return target;
7577 break;
7579 case BUILT_IN_ATOMIC_FETCH_AND_1:
7580 case BUILT_IN_ATOMIC_FETCH_AND_2:
7581 case BUILT_IN_ATOMIC_FETCH_AND_4:
7582 case BUILT_IN_ATOMIC_FETCH_AND_8:
7583 case BUILT_IN_ATOMIC_FETCH_AND_16:
7584 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7585 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7586 ignore, BUILT_IN_NONE);
7587 if (target)
7588 return target;
7589 break;
7591 case BUILT_IN_ATOMIC_FETCH_NAND_1:
7592 case BUILT_IN_ATOMIC_FETCH_NAND_2:
7593 case BUILT_IN_ATOMIC_FETCH_NAND_4:
7594 case BUILT_IN_ATOMIC_FETCH_NAND_8:
7595 case BUILT_IN_ATOMIC_FETCH_NAND_16:
7596 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
7597 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
7598 ignore, BUILT_IN_NONE);
7599 if (target)
7600 return target;
7601 break;
7603 case BUILT_IN_ATOMIC_FETCH_XOR_1:
7604 case BUILT_IN_ATOMIC_FETCH_XOR_2:
7605 case BUILT_IN_ATOMIC_FETCH_XOR_4:
7606 case BUILT_IN_ATOMIC_FETCH_XOR_8:
7607 case BUILT_IN_ATOMIC_FETCH_XOR_16:
7608 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
7609 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
7610 ignore, BUILT_IN_NONE);
7611 if (target)
7612 return target;
7613 break;
7615 case BUILT_IN_ATOMIC_FETCH_OR_1:
7616 case BUILT_IN_ATOMIC_FETCH_OR_2:
7617 case BUILT_IN_ATOMIC_FETCH_OR_4:
7618 case BUILT_IN_ATOMIC_FETCH_OR_8:
7619 case BUILT_IN_ATOMIC_FETCH_OR_16:
7620 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
7621 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
7622 ignore, BUILT_IN_NONE);
7623 if (target)
7624 return target;
7625 break;
7627 case BUILT_IN_ATOMIC_TEST_AND_SET:
7628 return expand_builtin_atomic_test_and_set (exp, target);
7630 case BUILT_IN_ATOMIC_CLEAR:
7631 return expand_builtin_atomic_clear (exp);
7633 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
7634 return expand_builtin_atomic_always_lock_free (exp);
7636 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
7637 target = expand_builtin_atomic_is_lock_free (exp);
7638 if (target)
7639 return target;
7640 break;
7642 case BUILT_IN_ATOMIC_THREAD_FENCE:
7643 expand_builtin_atomic_thread_fence (exp);
7644 return const0_rtx;
7646 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
7647 expand_builtin_atomic_signal_fence (exp);
7648 return const0_rtx;
7650 case BUILT_IN_OBJECT_SIZE:
7651 return expand_builtin_object_size (exp);
7653 case BUILT_IN_MEMCPY_CHK:
7654 case BUILT_IN_MEMPCPY_CHK:
7655 case BUILT_IN_MEMMOVE_CHK:
7656 case BUILT_IN_MEMSET_CHK:
7657 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7658 if (target)
7659 return target;
7660 break;
7662 case BUILT_IN_STRCPY_CHK:
7663 case BUILT_IN_STPCPY_CHK:
7664 case BUILT_IN_STRNCPY_CHK:
7665 case BUILT_IN_STPNCPY_CHK:
7666 case BUILT_IN_STRCAT_CHK:
7667 case BUILT_IN_STRNCAT_CHK:
7668 case BUILT_IN_SNPRINTF_CHK:
7669 case BUILT_IN_VSNPRINTF_CHK:
7670 maybe_emit_chk_warning (exp, fcode);
7671 break;
7673 case BUILT_IN_SPRINTF_CHK:
7674 case BUILT_IN_VSPRINTF_CHK:
7675 maybe_emit_sprintf_chk_warning (exp, fcode);
7676 break;
7678 case BUILT_IN_FREE:
7679 if (warn_free_nonheap_object)
7680 maybe_emit_free_warning (exp);
7681 break;
7683 case BUILT_IN_THREAD_POINTER:
7684 return expand_builtin_thread_pointer (exp, target);
7686 case BUILT_IN_SET_THREAD_POINTER:
7687 expand_builtin_set_thread_pointer (exp);
7688 return const0_rtx;
7690 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
7691 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
7692 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
7693 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
7694 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7695 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7696 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7697 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7698 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7699 case BUILT_IN_CHKP_GET_PTR_LBOUND:
7700 case BUILT_IN_CHKP_GET_PTR_UBOUND:
7701 /* We allow user CHKP builtins if Pointer Bounds
7702 Checker is off. */
7703 if (!chkp_function_instrumented_p (current_function_decl))
7705 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7706 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7707 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7708 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7709 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7710 return expand_normal (CALL_EXPR_ARG (exp, 0));
7711 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7712 return expand_normal (size_zero_node);
7713 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7714 return expand_normal (size_int (-1));
7715 else
7716 return const0_rtx;
7718 /* FALLTHROUGH */
7720 case BUILT_IN_CHKP_BNDMK:
7721 case BUILT_IN_CHKP_BNDSTX:
7722 case BUILT_IN_CHKP_BNDCL:
7723 case BUILT_IN_CHKP_BNDCU:
7724 case BUILT_IN_CHKP_BNDLDX:
7725 case BUILT_IN_CHKP_BNDRET:
7726 case BUILT_IN_CHKP_INTERSECT:
7727 case BUILT_IN_CHKP_NARROW:
7728 case BUILT_IN_CHKP_EXTRACT_LOWER:
7729 case BUILT_IN_CHKP_EXTRACT_UPPER:
7730 /* Software implementation of Pointer Bounds Checker is NYI.
7731 Target support is required. */
7732 error ("Your target platform does not support -fcheck-pointer-bounds");
7733 break;
7735 case BUILT_IN_ACC_ON_DEVICE:
7736 /* Do library call, if we failed to expand the builtin when
7737 folding. */
7738 break;
7740 default: /* just do library call, if unknown builtin */
7741 break;
7744 /* The switch statement above can drop through to cause the function
7745 to be called normally. */
7746 return expand_call (exp, target, ignore);
7749 /* Similar to expand_builtin but is used for instrumented calls. */
7752 expand_builtin_with_bounds (tree exp, rtx target,
7753 rtx subtarget ATTRIBUTE_UNUSED,
7754 machine_mode mode, int ignore)
7756 tree fndecl = get_callee_fndecl (exp);
7757 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7759 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7761 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7762 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7764 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7765 && fcode < END_CHKP_BUILTINS);
7767 switch (fcode)
7769 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7770 target = expand_builtin_memcpy_with_bounds (exp, target);
7771 if (target)
7772 return target;
7773 break;
7775 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7776 target = expand_builtin_mempcpy_with_bounds (exp, target);
7777 if (target)
7778 return target;
7779 break;
7781 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7782 target = expand_builtin_memset_with_bounds (exp, target, mode);
7783 if (target)
7784 return target;
7785 break;
7787 case BUILT_IN_MEMCPY_CHKP:
7788 case BUILT_IN_MEMMOVE_CHKP:
7789 case BUILT_IN_MEMPCPY_CHKP:
7790 if (call_expr_nargs (exp) > 3)
7792 /* memcpy_chkp (void *dst, size_t dstbnd,
7793 const void *src, size_t srcbnd, size_t n)
7794 and others take a pointer bound argument just after each
7795 pointer argument. */
7796 tree dest = CALL_EXPR_ARG (exp, 0);
7797 tree src = CALL_EXPR_ARG (exp, 2);
7798 tree len = CALL_EXPR_ARG (exp, 4);
7800 check_memop_access (exp, dest, src, len);
7801 break;
7804 default:
7805 break;
7808 /* The switch statement above can drop through to cause the function
7809 to be called normally. */
7810 return expand_call (exp, target, ignore);
7813 /* Determine whether a tree node represents a call to a built-in
7814 function. If the tree T is a call to a built-in function with
7815 the right number of arguments of the appropriate types, return
7816 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7817 Otherwise the return value is END_BUILTINS. */
7819 enum built_in_function
7820 builtin_mathfn_code (const_tree t)
7822 const_tree fndecl, arg, parmlist;
7823 const_tree argtype, parmtype;
7824 const_call_expr_arg_iterator iter;
7826 if (TREE_CODE (t) != CALL_EXPR
7827 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7828 return END_BUILTINS;
7830 fndecl = get_callee_fndecl (t);
7831 if (fndecl == NULL_TREE
7832 || TREE_CODE (fndecl) != FUNCTION_DECL
7833 || ! DECL_BUILT_IN (fndecl)
7834 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7835 return END_BUILTINS;
7837 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7838 init_const_call_expr_arg_iterator (t, &iter);
7839 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7841 /* If a function doesn't take a variable number of arguments,
7842 the last element in the list will have type `void'. */
7843 parmtype = TREE_VALUE (parmlist);
7844 if (VOID_TYPE_P (parmtype))
7846 if (more_const_call_expr_args_p (&iter))
7847 return END_BUILTINS;
7848 return DECL_FUNCTION_CODE (fndecl);
7851 if (! more_const_call_expr_args_p (&iter))
7852 return END_BUILTINS;
7854 arg = next_const_call_expr_arg (&iter);
7855 argtype = TREE_TYPE (arg);
7857 if (SCALAR_FLOAT_TYPE_P (parmtype))
7859 if (! SCALAR_FLOAT_TYPE_P (argtype))
7860 return END_BUILTINS;
7862 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7864 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7865 return END_BUILTINS;
7867 else if (POINTER_TYPE_P (parmtype))
7869 if (! POINTER_TYPE_P (argtype))
7870 return END_BUILTINS;
7872 else if (INTEGRAL_TYPE_P (parmtype))
7874 if (! INTEGRAL_TYPE_P (argtype))
7875 return END_BUILTINS;
7877 else
7878 return END_BUILTINS;
7881 /* Variable-length argument list. */
7882 return DECL_FUNCTION_CODE (fndecl);
7885 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7886 evaluate to a constant. */
7888 static tree
7889 fold_builtin_constant_p (tree arg)
7891 /* We return 1 for a numeric type that's known to be a constant
7892 value at compile-time or for an aggregate type that's a
7893 literal constant. */
7894 STRIP_NOPS (arg);
7896 /* If we know this is a constant, emit the constant of one. */
7897 if (CONSTANT_CLASS_P (arg)
7898 || (TREE_CODE (arg) == CONSTRUCTOR
7899 && TREE_CONSTANT (arg)))
7900 return integer_one_node;
7901 if (TREE_CODE (arg) == ADDR_EXPR)
7903 tree op = TREE_OPERAND (arg, 0);
7904 if (TREE_CODE (op) == STRING_CST
7905 || (TREE_CODE (op) == ARRAY_REF
7906 && integer_zerop (TREE_OPERAND (op, 1))
7907 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7908 return integer_one_node;
7911 /* If this expression has side effects, show we don't know it to be a
7912 constant. Likewise if it's a pointer or aggregate type since in
7913 those case we only want literals, since those are only optimized
7914 when generating RTL, not later.
7915 And finally, if we are compiling an initializer, not code, we
7916 need to return a definite result now; there's not going to be any
7917 more optimization done. */
7918 if (TREE_SIDE_EFFECTS (arg)
7919 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7920 || POINTER_TYPE_P (TREE_TYPE (arg))
7921 || cfun == 0
7922 || folding_initializer
7923 || force_folding_builtin_constant_p)
7924 return integer_zero_node;
7926 return NULL_TREE;
7929 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7930 return it as a truthvalue. */
7932 static tree
7933 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7934 tree predictor)
7936 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7938 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7939 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7940 ret_type = TREE_TYPE (TREE_TYPE (fn));
7941 pred_type = TREE_VALUE (arg_types);
7942 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7944 pred = fold_convert_loc (loc, pred_type, pred);
7945 expected = fold_convert_loc (loc, expected_type, expected);
7946 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7947 predictor);
7949 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7950 build_int_cst (ret_type, 0));
7953 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7954 NULL_TREE if no simplification is possible. */
7956 tree
7957 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7959 tree inner, fndecl, inner_arg0;
7960 enum tree_code code;
7962 /* Distribute the expected value over short-circuiting operators.
7963 See through the cast from truthvalue_type_node to long. */
7964 inner_arg0 = arg0;
7965 while (CONVERT_EXPR_P (inner_arg0)
7966 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7967 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7968 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7970 /* If this is a builtin_expect within a builtin_expect keep the
7971 inner one. See through a comparison against a constant. It
7972 might have been added to create a thruthvalue. */
7973 inner = inner_arg0;
7975 if (COMPARISON_CLASS_P (inner)
7976 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7977 inner = TREE_OPERAND (inner, 0);
7979 if (TREE_CODE (inner) == CALL_EXPR
7980 && (fndecl = get_callee_fndecl (inner))
7981 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7982 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7983 return arg0;
7985 inner = inner_arg0;
7986 code = TREE_CODE (inner);
7987 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7989 tree op0 = TREE_OPERAND (inner, 0);
7990 tree op1 = TREE_OPERAND (inner, 1);
7992 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7993 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7994 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7996 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7999 /* If the argument isn't invariant then there's nothing else we can do. */
8000 if (!TREE_CONSTANT (inner_arg0))
8001 return NULL_TREE;
8003 /* If we expect that a comparison against the argument will fold to
8004 a constant return the constant. In practice, this means a true
8005 constant or the address of a non-weak symbol. */
8006 inner = inner_arg0;
8007 STRIP_NOPS (inner);
8008 if (TREE_CODE (inner) == ADDR_EXPR)
8012 inner = TREE_OPERAND (inner, 0);
8014 while (TREE_CODE (inner) == COMPONENT_REF
8015 || TREE_CODE (inner) == ARRAY_REF);
8016 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8017 return NULL_TREE;
8020 /* Otherwise, ARG0 already has the proper type for the return value. */
8021 return arg0;
8024 /* Fold a call to __builtin_classify_type with argument ARG. */
8026 static tree
8027 fold_builtin_classify_type (tree arg)
8029 if (arg == 0)
8030 return build_int_cst (integer_type_node, no_type_class);
8032 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8035 /* Fold a call to __builtin_strlen with argument ARG. */
8037 static tree
8038 fold_builtin_strlen (location_t loc, tree type, tree arg)
8040 if (!validate_arg (arg, POINTER_TYPE))
8041 return NULL_TREE;
8042 else
8044 tree len = c_strlen (arg, 0);
8046 if (len)
8047 return fold_convert_loc (loc, type, len);
8049 return NULL_TREE;
8053 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8055 static tree
8056 fold_builtin_inf (location_t loc, tree type, int warn)
8058 REAL_VALUE_TYPE real;
8060 /* __builtin_inff is intended to be usable to define INFINITY on all
8061 targets. If an infinity is not available, INFINITY expands "to a
8062 positive constant of type float that overflows at translation
8063 time", footnote "In this case, using INFINITY will violate the
8064 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8065 Thus we pedwarn to ensure this constraint violation is
8066 diagnosed. */
8067 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8068 pedwarn (loc, 0, "target format does not support infinity");
8070 real_inf (&real);
8071 return build_real (type, real);
8074 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8075 NULL_TREE if no simplification can be made. */
8077 static tree
8078 fold_builtin_sincos (location_t loc,
8079 tree arg0, tree arg1, tree arg2)
8081 tree type;
8082 tree fndecl, call = NULL_TREE;
8084 if (!validate_arg (arg0, REAL_TYPE)
8085 || !validate_arg (arg1, POINTER_TYPE)
8086 || !validate_arg (arg2, POINTER_TYPE))
8087 return NULL_TREE;
8089 type = TREE_TYPE (arg0);
8091 /* Calculate the result when the argument is a constant. */
8092 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8093 if (fn == END_BUILTINS)
8094 return NULL_TREE;
8096 /* Canonicalize sincos to cexpi. */
8097 if (TREE_CODE (arg0) == REAL_CST)
8099 tree complex_type = build_complex_type (type);
8100 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8102 if (!call)
8104 if (!targetm.libc_has_function (function_c99_math_complex)
8105 || !builtin_decl_implicit_p (fn))
8106 return NULL_TREE;
8107 fndecl = builtin_decl_explicit (fn);
8108 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8109 call = builtin_save_expr (call);
8112 return build2 (COMPOUND_EXPR, void_type_node,
8113 build2 (MODIFY_EXPR, void_type_node,
8114 build_fold_indirect_ref_loc (loc, arg1),
8115 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8116 build2 (MODIFY_EXPR, void_type_node,
8117 build_fold_indirect_ref_loc (loc, arg2),
8118 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8121 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8122 Return NULL_TREE if no simplification can be made. */
8124 static tree
8125 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8127 if (!validate_arg (arg1, POINTER_TYPE)
8128 || !validate_arg (arg2, POINTER_TYPE)
8129 || !validate_arg (len, INTEGER_TYPE))
8130 return NULL_TREE;
8132 /* If the LEN parameter is zero, return zero. */
8133 if (integer_zerop (len))
8134 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8135 arg1, arg2);
8137 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8138 if (operand_equal_p (arg1, arg2, 0))
8139 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8141 /* If len parameter is one, return an expression corresponding to
8142 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8143 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8145 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8146 tree cst_uchar_ptr_node
8147 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8149 tree ind1
8150 = fold_convert_loc (loc, integer_type_node,
8151 build1 (INDIRECT_REF, cst_uchar_node,
8152 fold_convert_loc (loc,
8153 cst_uchar_ptr_node,
8154 arg1)));
8155 tree ind2
8156 = fold_convert_loc (loc, integer_type_node,
8157 build1 (INDIRECT_REF, cst_uchar_node,
8158 fold_convert_loc (loc,
8159 cst_uchar_ptr_node,
8160 arg2)));
8161 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8164 return NULL_TREE;
8167 /* Fold a call to builtin isascii with argument ARG. */
8169 static tree
8170 fold_builtin_isascii (location_t loc, tree arg)
8172 if (!validate_arg (arg, INTEGER_TYPE))
8173 return NULL_TREE;
8174 else
8176 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8177 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8178 build_int_cst (integer_type_node,
8179 ~ (unsigned HOST_WIDE_INT) 0x7f));
8180 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8181 arg, integer_zero_node);
8185 /* Fold a call to builtin toascii with argument ARG. */
8187 static tree
8188 fold_builtin_toascii (location_t loc, tree arg)
8190 if (!validate_arg (arg, INTEGER_TYPE))
8191 return NULL_TREE;
8193 /* Transform toascii(c) -> (c & 0x7f). */
8194 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8195 build_int_cst (integer_type_node, 0x7f));
8198 /* Fold a call to builtin isdigit with argument ARG. */
8200 static tree
8201 fold_builtin_isdigit (location_t loc, tree arg)
8203 if (!validate_arg (arg, INTEGER_TYPE))
8204 return NULL_TREE;
8205 else
8207 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8208 /* According to the C standard, isdigit is unaffected by locale.
8209 However, it definitely is affected by the target character set. */
8210 unsigned HOST_WIDE_INT target_digit0
8211 = lang_hooks.to_target_charset ('0');
8213 if (target_digit0 == 0)
8214 return NULL_TREE;
8216 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8217 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8218 build_int_cst (unsigned_type_node, target_digit0));
8219 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8220 build_int_cst (unsigned_type_node, 9));
8224 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8226 static tree
8227 fold_builtin_fabs (location_t loc, tree arg, tree type)
8229 if (!validate_arg (arg, REAL_TYPE))
8230 return NULL_TREE;
8232 arg = fold_convert_loc (loc, type, arg);
8233 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8236 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8238 static tree
8239 fold_builtin_abs (location_t loc, tree arg, tree type)
8241 if (!validate_arg (arg, INTEGER_TYPE))
8242 return NULL_TREE;
8244 arg = fold_convert_loc (loc, type, arg);
8245 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8248 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8250 static tree
8251 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
8253 /* ??? Only expand to FMA_EXPR if it's directly supported. */
8254 if (validate_arg (arg0, REAL_TYPE)
8255 && validate_arg (arg1, REAL_TYPE)
8256 && validate_arg (arg2, REAL_TYPE)
8257 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
8258 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
8260 return NULL_TREE;
8263 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8265 static tree
8266 fold_builtin_carg (location_t loc, tree arg, tree type)
8268 if (validate_arg (arg, COMPLEX_TYPE)
8269 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8271 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8273 if (atan2_fn)
8275 tree new_arg = builtin_save_expr (arg);
8276 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8277 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8278 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8282 return NULL_TREE;
8285 /* Fold a call to builtin frexp, we can assume the base is 2. */
8287 static tree
8288 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8290 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8291 return NULL_TREE;
8293 STRIP_NOPS (arg0);
8295 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8296 return NULL_TREE;
8298 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8300 /* Proceed if a valid pointer type was passed in. */
8301 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8303 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8304 tree frac, exp;
8306 switch (value->cl)
8308 case rvc_zero:
8309 /* For +-0, return (*exp = 0, +-0). */
8310 exp = integer_zero_node;
8311 frac = arg0;
8312 break;
8313 case rvc_nan:
8314 case rvc_inf:
8315 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8316 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8317 case rvc_normal:
8319 /* Since the frexp function always expects base 2, and in
8320 GCC normalized significands are already in the range
8321 [0.5, 1.0), we have exactly what frexp wants. */
8322 REAL_VALUE_TYPE frac_rvt = *value;
8323 SET_REAL_EXP (&frac_rvt, 0);
8324 frac = build_real (rettype, frac_rvt);
8325 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8327 break;
8328 default:
8329 gcc_unreachable ();
8332 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8333 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8334 TREE_SIDE_EFFECTS (arg1) = 1;
8335 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8338 return NULL_TREE;
8341 /* Fold a call to builtin modf. */
8343 static tree
8344 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8346 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8347 return NULL_TREE;
8349 STRIP_NOPS (arg0);
8351 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8352 return NULL_TREE;
8354 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8356 /* Proceed if a valid pointer type was passed in. */
8357 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8359 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8360 REAL_VALUE_TYPE trunc, frac;
8362 switch (value->cl)
8364 case rvc_nan:
8365 case rvc_zero:
8366 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8367 trunc = frac = *value;
8368 break;
8369 case rvc_inf:
8370 /* For +-Inf, return (*arg1 = arg0, +-0). */
8371 frac = dconst0;
8372 frac.sign = value->sign;
8373 trunc = *value;
8374 break;
8375 case rvc_normal:
8376 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8377 real_trunc (&trunc, VOIDmode, value);
8378 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8379 /* If the original number was negative and already
8380 integral, then the fractional part is -0.0. */
8381 if (value->sign && frac.cl == rvc_zero)
8382 frac.sign = value->sign;
8383 break;
8386 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8387 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8388 build_real (rettype, trunc));
8389 TREE_SIDE_EFFECTS (arg1) = 1;
8390 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8391 build_real (rettype, frac));
8394 return NULL_TREE;
8397 /* Given a location LOC, an interclass builtin function decl FNDECL
8398 and its single argument ARG, return an folded expression computing
8399 the same, or NULL_TREE if we either couldn't or didn't want to fold
8400 (the latter happen if there's an RTL instruction available). */
8402 static tree
8403 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8405 machine_mode mode;
8407 if (!validate_arg (arg, REAL_TYPE))
8408 return NULL_TREE;
8410 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8411 return NULL_TREE;
8413 mode = TYPE_MODE (TREE_TYPE (arg));
8415 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8417 /* If there is no optab, try generic code. */
8418 switch (DECL_FUNCTION_CODE (fndecl))
8420 tree result;
8422 CASE_FLT_FN (BUILT_IN_ISINF):
8424 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8425 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8426 tree type = TREE_TYPE (arg);
8427 REAL_VALUE_TYPE r;
8428 char buf[128];
8430 if (is_ibm_extended)
8432 /* NaN and Inf are encoded in the high-order double value
8433 only. The low-order value is not significant. */
8434 type = double_type_node;
8435 mode = DFmode;
8436 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8438 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8439 real_from_string (&r, buf);
8440 result = build_call_expr (isgr_fn, 2,
8441 fold_build1_loc (loc, ABS_EXPR, type, arg),
8442 build_real (type, r));
8443 return result;
8445 CASE_FLT_FN (BUILT_IN_FINITE):
8446 case BUILT_IN_ISFINITE:
8448 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8449 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8450 tree type = TREE_TYPE (arg);
8451 REAL_VALUE_TYPE r;
8452 char buf[128];
8454 if (is_ibm_extended)
8456 /* NaN and Inf are encoded in the high-order double value
8457 only. The low-order value is not significant. */
8458 type = double_type_node;
8459 mode = DFmode;
8460 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8462 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8463 real_from_string (&r, buf);
8464 result = build_call_expr (isle_fn, 2,
8465 fold_build1_loc (loc, ABS_EXPR, type, arg),
8466 build_real (type, r));
8467 /*result = fold_build2_loc (loc, UNGT_EXPR,
8468 TREE_TYPE (TREE_TYPE (fndecl)),
8469 fold_build1_loc (loc, ABS_EXPR, type, arg),
8470 build_real (type, r));
8471 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8472 TREE_TYPE (TREE_TYPE (fndecl)),
8473 result);*/
8474 return result;
8476 case BUILT_IN_ISNORMAL:
8478 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8479 islessequal(fabs(x),DBL_MAX). */
8480 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8481 tree type = TREE_TYPE (arg);
8482 tree orig_arg, max_exp, min_exp;
8483 machine_mode orig_mode = mode;
8484 REAL_VALUE_TYPE rmax, rmin;
8485 char buf[128];
8487 orig_arg = arg = builtin_save_expr (arg);
8488 if (is_ibm_extended)
8490 /* Use double to test the normal range of IBM extended
8491 precision. Emin for IBM extended precision is
8492 different to emin for IEEE double, being 53 higher
8493 since the low double exponent is at least 53 lower
8494 than the high double exponent. */
8495 type = double_type_node;
8496 mode = DFmode;
8497 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8499 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8501 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8502 real_from_string (&rmax, buf);
8503 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8504 real_from_string (&rmin, buf);
8505 max_exp = build_real (type, rmax);
8506 min_exp = build_real (type, rmin);
8508 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8509 if (is_ibm_extended)
8511 /* Testing the high end of the range is done just using
8512 the high double, using the same test as isfinite().
8513 For the subnormal end of the range we first test the
8514 high double, then if its magnitude is equal to the
8515 limit of 0x1p-969, we test whether the low double is
8516 non-zero and opposite sign to the high double. */
8517 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8518 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8519 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8520 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8521 arg, min_exp);
8522 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8523 complex_double_type_node, orig_arg);
8524 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8525 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8526 tree zero = build_real (type, dconst0);
8527 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8528 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8529 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8530 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8531 fold_build3 (COND_EXPR,
8532 integer_type_node,
8533 hilt, logt, lolt));
8534 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8535 eq_min, ok_lo);
8536 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8537 gt_min, eq_min);
8539 else
8541 tree const isge_fn
8542 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8543 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8545 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8546 max_exp, min_exp);
8547 return result;
8549 default:
8550 break;
8553 return NULL_TREE;
8556 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8557 ARG is the argument for the call. */
8559 static tree
8560 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8562 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8564 if (!validate_arg (arg, REAL_TYPE))
8565 return NULL_TREE;
8567 switch (builtin_index)
8569 case BUILT_IN_ISINF:
8570 if (!HONOR_INFINITIES (arg))
8571 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8573 return NULL_TREE;
8575 case BUILT_IN_ISINF_SIGN:
8577 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8578 /* In a boolean context, GCC will fold the inner COND_EXPR to
8579 1. So e.g. "if (isinf_sign(x))" would be folded to just
8580 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8581 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8582 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8583 tree tmp = NULL_TREE;
8585 arg = builtin_save_expr (arg);
8587 if (signbit_fn && isinf_fn)
8589 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8590 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8592 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8593 signbit_call, integer_zero_node);
8594 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8595 isinf_call, integer_zero_node);
8597 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8598 integer_minus_one_node, integer_one_node);
8599 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8600 isinf_call, tmp,
8601 integer_zero_node);
8604 return tmp;
8607 case BUILT_IN_ISFINITE:
8608 if (!HONOR_NANS (arg)
8609 && !HONOR_INFINITIES (arg))
8610 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8612 return NULL_TREE;
8614 case BUILT_IN_ISNAN:
8615 if (!HONOR_NANS (arg))
8616 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8619 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8620 if (is_ibm_extended)
8622 /* NaN and Inf are encoded in the high-order double value
8623 only. The low-order value is not significant. */
8624 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8627 arg = builtin_save_expr (arg);
8628 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8630 default:
8631 gcc_unreachable ();
8635 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8636 This builtin will generate code to return the appropriate floating
8637 point classification depending on the value of the floating point
8638 number passed in. The possible return values must be supplied as
8639 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8640 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8641 one floating point argument which is "type generic". */
8643 static tree
8644 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8646 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8647 arg, type, res, tmp;
8648 machine_mode mode;
8649 REAL_VALUE_TYPE r;
8650 char buf[128];
8652 /* Verify the required arguments in the original call. */
8653 if (nargs != 6
8654 || !validate_arg (args[0], INTEGER_TYPE)
8655 || !validate_arg (args[1], INTEGER_TYPE)
8656 || !validate_arg (args[2], INTEGER_TYPE)
8657 || !validate_arg (args[3], INTEGER_TYPE)
8658 || !validate_arg (args[4], INTEGER_TYPE)
8659 || !validate_arg (args[5], REAL_TYPE))
8660 return NULL_TREE;
8662 fp_nan = args[0];
8663 fp_infinite = args[1];
8664 fp_normal = args[2];
8665 fp_subnormal = args[3];
8666 fp_zero = args[4];
8667 arg = args[5];
8668 type = TREE_TYPE (arg);
8669 mode = TYPE_MODE (type);
8670 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8672 /* fpclassify(x) ->
8673 isnan(x) ? FP_NAN :
8674 (fabs(x) == Inf ? FP_INFINITE :
8675 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8676 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8678 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8679 build_real (type, dconst0));
8680 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8681 tmp, fp_zero, fp_subnormal);
8683 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8684 real_from_string (&r, buf);
8685 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8686 arg, build_real (type, r));
8687 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8689 if (HONOR_INFINITIES (mode))
8691 real_inf (&r);
8692 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8693 build_real (type, r));
8694 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8695 fp_infinite, res);
8698 if (HONOR_NANS (mode))
8700 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8701 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8704 return res;
8707 /* Fold a call to an unordered comparison function such as
8708 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8709 being called and ARG0 and ARG1 are the arguments for the call.
8710 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8711 the opposite of the desired result. UNORDERED_CODE is used
8712 for modes that can hold NaNs and ORDERED_CODE is used for
8713 the rest. */
8715 static tree
8716 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8717 enum tree_code unordered_code,
8718 enum tree_code ordered_code)
8720 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8721 enum tree_code code;
8722 tree type0, type1;
8723 enum tree_code code0, code1;
8724 tree cmp_type = NULL_TREE;
8726 type0 = TREE_TYPE (arg0);
8727 type1 = TREE_TYPE (arg1);
8729 code0 = TREE_CODE (type0);
8730 code1 = TREE_CODE (type1);
8732 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8733 /* Choose the wider of two real types. */
8734 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8735 ? type0 : type1;
8736 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8737 cmp_type = type0;
8738 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8739 cmp_type = type1;
8741 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8742 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8744 if (unordered_code == UNORDERED_EXPR)
8746 if (!HONOR_NANS (arg0))
8747 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8748 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8751 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8752 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8753 fold_build2_loc (loc, code, type, arg0, arg1));
8756 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8757 arithmetics if it can never overflow, or into internal functions that
8758 return both result of arithmetics and overflowed boolean flag in
8759 a complex integer result, or some other check for overflow.
8760 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8761 checking part of that. */
8763 static tree
8764 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8765 tree arg0, tree arg1, tree arg2)
8767 enum internal_fn ifn = IFN_LAST;
8768 /* The code of the expression corresponding to the type-generic
8769 built-in, or ERROR_MARK for the type-specific ones. */
8770 enum tree_code opcode = ERROR_MARK;
8771 bool ovf_only = false;
8773 switch (fcode)
8775 case BUILT_IN_ADD_OVERFLOW_P:
8776 ovf_only = true;
8777 /* FALLTHRU */
8778 case BUILT_IN_ADD_OVERFLOW:
8779 opcode = PLUS_EXPR;
8780 /* FALLTHRU */
8781 case BUILT_IN_SADD_OVERFLOW:
8782 case BUILT_IN_SADDL_OVERFLOW:
8783 case BUILT_IN_SADDLL_OVERFLOW:
8784 case BUILT_IN_UADD_OVERFLOW:
8785 case BUILT_IN_UADDL_OVERFLOW:
8786 case BUILT_IN_UADDLL_OVERFLOW:
8787 ifn = IFN_ADD_OVERFLOW;
8788 break;
8789 case BUILT_IN_SUB_OVERFLOW_P:
8790 ovf_only = true;
8791 /* FALLTHRU */
8792 case BUILT_IN_SUB_OVERFLOW:
8793 opcode = MINUS_EXPR;
8794 /* FALLTHRU */
8795 case BUILT_IN_SSUB_OVERFLOW:
8796 case BUILT_IN_SSUBL_OVERFLOW:
8797 case BUILT_IN_SSUBLL_OVERFLOW:
8798 case BUILT_IN_USUB_OVERFLOW:
8799 case BUILT_IN_USUBL_OVERFLOW:
8800 case BUILT_IN_USUBLL_OVERFLOW:
8801 ifn = IFN_SUB_OVERFLOW;
8802 break;
8803 case BUILT_IN_MUL_OVERFLOW_P:
8804 ovf_only = true;
8805 /* FALLTHRU */
8806 case BUILT_IN_MUL_OVERFLOW:
8807 opcode = MULT_EXPR;
8808 /* FALLTHRU */
8809 case BUILT_IN_SMUL_OVERFLOW:
8810 case BUILT_IN_SMULL_OVERFLOW:
8811 case BUILT_IN_SMULLL_OVERFLOW:
8812 case BUILT_IN_UMUL_OVERFLOW:
8813 case BUILT_IN_UMULL_OVERFLOW:
8814 case BUILT_IN_UMULLL_OVERFLOW:
8815 ifn = IFN_MUL_OVERFLOW;
8816 break;
8817 default:
8818 gcc_unreachable ();
8821 /* For the "generic" overloads, the first two arguments can have different
8822 types and the last argument determines the target type to use to check
8823 for overflow. The arguments of the other overloads all have the same
8824 type. */
8825 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
8827 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8828 arguments are constant, attempt to fold the built-in call into a constant
8829 expression indicating whether or not it detected an overflow. */
8830 if (ovf_only
8831 && TREE_CODE (arg0) == INTEGER_CST
8832 && TREE_CODE (arg1) == INTEGER_CST)
8833 /* Perform the computation in the target type and check for overflow. */
8834 return omit_one_operand_loc (loc, boolean_type_node,
8835 arith_overflowed_p (opcode, type, arg0, arg1)
8836 ? boolean_true_node : boolean_false_node,
8837 arg2);
8839 tree ctype = build_complex_type (type);
8840 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8841 2, arg0, arg1);
8842 tree tgt = save_expr (call);
8843 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8844 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8845 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8847 if (ovf_only)
8848 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
8850 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8851 tree store
8852 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8853 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8856 /* Fold a call to __builtin_FILE to a constant string. */
8858 static inline tree
8859 fold_builtin_FILE (location_t loc)
8861 if (const char *fname = LOCATION_FILE (loc))
8862 return build_string_literal (strlen (fname) + 1, fname);
8864 return build_string_literal (1, "");
8867 /* Fold a call to __builtin_FUNCTION to a constant string. */
8869 static inline tree
8870 fold_builtin_FUNCTION ()
8872 const char *name = "";
8874 if (current_function_decl)
8875 name = lang_hooks.decl_printable_name (current_function_decl, 0);
8877 return build_string_literal (strlen (name) + 1, name);
8880 /* Fold a call to __builtin_LINE to an integer constant. */
8882 static inline tree
8883 fold_builtin_LINE (location_t loc, tree type)
8885 return build_int_cst (type, LOCATION_LINE (loc));
8888 /* Fold a call to built-in function FNDECL with 0 arguments.
8889 This function returns NULL_TREE if no simplification was possible. */
8891 static tree
8892 fold_builtin_0 (location_t loc, tree fndecl)
8894 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8895 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8896 switch (fcode)
8898 case BUILT_IN_FILE:
8899 return fold_builtin_FILE (loc);
8901 case BUILT_IN_FUNCTION:
8902 return fold_builtin_FUNCTION ();
8904 case BUILT_IN_LINE:
8905 return fold_builtin_LINE (loc, type);
8907 CASE_FLT_FN (BUILT_IN_INF):
8908 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
8909 case BUILT_IN_INFD32:
8910 case BUILT_IN_INFD64:
8911 case BUILT_IN_INFD128:
8912 return fold_builtin_inf (loc, type, true);
8914 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8915 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
8916 return fold_builtin_inf (loc, type, false);
8918 case BUILT_IN_CLASSIFY_TYPE:
8919 return fold_builtin_classify_type (NULL_TREE);
8921 default:
8922 break;
8924 return NULL_TREE;
8927 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8928 This function returns NULL_TREE if no simplification was possible. */
8930 static tree
8931 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8933 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8934 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8936 if (TREE_CODE (arg0) == ERROR_MARK)
8937 return NULL_TREE;
8939 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
8940 return ret;
8942 switch (fcode)
8944 case BUILT_IN_CONSTANT_P:
8946 tree val = fold_builtin_constant_p (arg0);
8948 /* Gimplification will pull the CALL_EXPR for the builtin out of
8949 an if condition. When not optimizing, we'll not CSE it back.
8950 To avoid link error types of regressions, return false now. */
8951 if (!val && !optimize)
8952 val = integer_zero_node;
8954 return val;
8957 case BUILT_IN_CLASSIFY_TYPE:
8958 return fold_builtin_classify_type (arg0);
8960 case BUILT_IN_STRLEN:
8961 return fold_builtin_strlen (loc, type, arg0);
8963 CASE_FLT_FN (BUILT_IN_FABS):
8964 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8965 case BUILT_IN_FABSD32:
8966 case BUILT_IN_FABSD64:
8967 case BUILT_IN_FABSD128:
8968 return fold_builtin_fabs (loc, arg0, type);
8970 case BUILT_IN_ABS:
8971 case BUILT_IN_LABS:
8972 case BUILT_IN_LLABS:
8973 case BUILT_IN_IMAXABS:
8974 return fold_builtin_abs (loc, arg0, type);
8976 CASE_FLT_FN (BUILT_IN_CONJ):
8977 if (validate_arg (arg0, COMPLEX_TYPE)
8978 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8979 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8980 break;
8982 CASE_FLT_FN (BUILT_IN_CREAL):
8983 if (validate_arg (arg0, COMPLEX_TYPE)
8984 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8985 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8986 break;
8988 CASE_FLT_FN (BUILT_IN_CIMAG):
8989 if (validate_arg (arg0, COMPLEX_TYPE)
8990 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8991 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8992 break;
8994 CASE_FLT_FN (BUILT_IN_CARG):
8995 return fold_builtin_carg (loc, arg0, type);
8997 case BUILT_IN_ISASCII:
8998 return fold_builtin_isascii (loc, arg0);
9000 case BUILT_IN_TOASCII:
9001 return fold_builtin_toascii (loc, arg0);
9003 case BUILT_IN_ISDIGIT:
9004 return fold_builtin_isdigit (loc, arg0);
9006 CASE_FLT_FN (BUILT_IN_FINITE):
9007 case BUILT_IN_FINITED32:
9008 case BUILT_IN_FINITED64:
9009 case BUILT_IN_FINITED128:
9010 case BUILT_IN_ISFINITE:
9012 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9013 if (ret)
9014 return ret;
9015 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9018 CASE_FLT_FN (BUILT_IN_ISINF):
9019 case BUILT_IN_ISINFD32:
9020 case BUILT_IN_ISINFD64:
9021 case BUILT_IN_ISINFD128:
9023 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9024 if (ret)
9025 return ret;
9026 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9029 case BUILT_IN_ISNORMAL:
9030 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9032 case BUILT_IN_ISINF_SIGN:
9033 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9035 CASE_FLT_FN (BUILT_IN_ISNAN):
9036 case BUILT_IN_ISNAND32:
9037 case BUILT_IN_ISNAND64:
9038 case BUILT_IN_ISNAND128:
9039 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9041 case BUILT_IN_FREE:
9042 if (integer_zerop (arg0))
9043 return build_empty_stmt (loc);
9044 break;
9046 default:
9047 break;
9050 return NULL_TREE;
9054 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9055 This function returns NULL_TREE if no simplification was possible. */
9057 static tree
9058 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
9060 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9061 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9063 if (TREE_CODE (arg0) == ERROR_MARK
9064 || TREE_CODE (arg1) == ERROR_MARK)
9065 return NULL_TREE;
9067 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9068 return ret;
9070 switch (fcode)
9072 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9073 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9074 if (validate_arg (arg0, REAL_TYPE)
9075 && validate_arg (arg1, POINTER_TYPE))
9076 return do_mpfr_lgamma_r (arg0, arg1, type);
9077 break;
9079 CASE_FLT_FN (BUILT_IN_FREXP):
9080 return fold_builtin_frexp (loc, arg0, arg1, type);
9082 CASE_FLT_FN (BUILT_IN_MODF):
9083 return fold_builtin_modf (loc, arg0, arg1, type);
9085 case BUILT_IN_STRSPN:
9086 return fold_builtin_strspn (loc, arg0, arg1);
9088 case BUILT_IN_STRCSPN:
9089 return fold_builtin_strcspn (loc, arg0, arg1);
9091 case BUILT_IN_STRPBRK:
9092 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9094 case BUILT_IN_EXPECT:
9095 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
9097 case BUILT_IN_ISGREATER:
9098 return fold_builtin_unordered_cmp (loc, fndecl,
9099 arg0, arg1, UNLE_EXPR, LE_EXPR);
9100 case BUILT_IN_ISGREATEREQUAL:
9101 return fold_builtin_unordered_cmp (loc, fndecl,
9102 arg0, arg1, UNLT_EXPR, LT_EXPR);
9103 case BUILT_IN_ISLESS:
9104 return fold_builtin_unordered_cmp (loc, fndecl,
9105 arg0, arg1, UNGE_EXPR, GE_EXPR);
9106 case BUILT_IN_ISLESSEQUAL:
9107 return fold_builtin_unordered_cmp (loc, fndecl,
9108 arg0, arg1, UNGT_EXPR, GT_EXPR);
9109 case BUILT_IN_ISLESSGREATER:
9110 return fold_builtin_unordered_cmp (loc, fndecl,
9111 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9112 case BUILT_IN_ISUNORDERED:
9113 return fold_builtin_unordered_cmp (loc, fndecl,
9114 arg0, arg1, UNORDERED_EXPR,
9115 NOP_EXPR);
9117 /* We do the folding for va_start in the expander. */
9118 case BUILT_IN_VA_START:
9119 break;
9121 case BUILT_IN_OBJECT_SIZE:
9122 return fold_builtin_object_size (arg0, arg1);
9124 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9125 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9127 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9128 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9130 default:
9131 break;
9133 return NULL_TREE;
9136 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9137 and ARG2.
9138 This function returns NULL_TREE if no simplification was possible. */
9140 static tree
9141 fold_builtin_3 (location_t loc, tree fndecl,
9142 tree arg0, tree arg1, tree arg2)
9144 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9145 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9147 if (TREE_CODE (arg0) == ERROR_MARK
9148 || TREE_CODE (arg1) == ERROR_MARK
9149 || TREE_CODE (arg2) == ERROR_MARK)
9150 return NULL_TREE;
9152 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9153 arg0, arg1, arg2))
9154 return ret;
9156 switch (fcode)
9159 CASE_FLT_FN (BUILT_IN_SINCOS):
9160 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9162 CASE_FLT_FN (BUILT_IN_FMA):
9163 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
9164 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
9166 CASE_FLT_FN (BUILT_IN_REMQUO):
9167 if (validate_arg (arg0, REAL_TYPE)
9168 && validate_arg (arg1, REAL_TYPE)
9169 && validate_arg (arg2, POINTER_TYPE))
9170 return do_mpfr_remquo (arg0, arg1, arg2);
9171 break;
9173 case BUILT_IN_MEMCMP:
9174 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9176 case BUILT_IN_EXPECT:
9177 return fold_builtin_expect (loc, arg0, arg1, arg2);
9179 case BUILT_IN_ADD_OVERFLOW:
9180 case BUILT_IN_SUB_OVERFLOW:
9181 case BUILT_IN_MUL_OVERFLOW:
9182 case BUILT_IN_ADD_OVERFLOW_P:
9183 case BUILT_IN_SUB_OVERFLOW_P:
9184 case BUILT_IN_MUL_OVERFLOW_P:
9185 case BUILT_IN_SADD_OVERFLOW:
9186 case BUILT_IN_SADDL_OVERFLOW:
9187 case BUILT_IN_SADDLL_OVERFLOW:
9188 case BUILT_IN_SSUB_OVERFLOW:
9189 case BUILT_IN_SSUBL_OVERFLOW:
9190 case BUILT_IN_SSUBLL_OVERFLOW:
9191 case BUILT_IN_SMUL_OVERFLOW:
9192 case BUILT_IN_SMULL_OVERFLOW:
9193 case BUILT_IN_SMULLL_OVERFLOW:
9194 case BUILT_IN_UADD_OVERFLOW:
9195 case BUILT_IN_UADDL_OVERFLOW:
9196 case BUILT_IN_UADDLL_OVERFLOW:
9197 case BUILT_IN_USUB_OVERFLOW:
9198 case BUILT_IN_USUBL_OVERFLOW:
9199 case BUILT_IN_USUBLL_OVERFLOW:
9200 case BUILT_IN_UMUL_OVERFLOW:
9201 case BUILT_IN_UMULL_OVERFLOW:
9202 case BUILT_IN_UMULLL_OVERFLOW:
9203 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9205 default:
9206 break;
9208 return NULL_TREE;
9211 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9212 arguments. IGNORE is true if the result of the
9213 function call is ignored. This function returns NULL_TREE if no
9214 simplification was possible. */
9216 tree
9217 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9219 tree ret = NULL_TREE;
9221 switch (nargs)
9223 case 0:
9224 ret = fold_builtin_0 (loc, fndecl);
9225 break;
9226 case 1:
9227 ret = fold_builtin_1 (loc, fndecl, args[0]);
9228 break;
9229 case 2:
9230 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9231 break;
9232 case 3:
9233 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9234 break;
9235 default:
9236 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9237 break;
9239 if (ret)
9241 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9242 SET_EXPR_LOCATION (ret, loc);
9243 TREE_NO_WARNING (ret) = 1;
9244 return ret;
9246 return NULL_TREE;
9249 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9250 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9251 of arguments in ARGS to be omitted. OLDNARGS is the number of
9252 elements in ARGS. */
9254 static tree
9255 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9256 int skip, tree fndecl, int n, va_list newargs)
9258 int nargs = oldnargs - skip + n;
9259 tree *buffer;
9261 if (n > 0)
9263 int i, j;
9265 buffer = XALLOCAVEC (tree, nargs);
9266 for (i = 0; i < n; i++)
9267 buffer[i] = va_arg (newargs, tree);
9268 for (j = skip; j < oldnargs; j++, i++)
9269 buffer[i] = args[j];
9271 else
9272 buffer = args + skip;
9274 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9277 /* Return true if FNDECL shouldn't be folded right now.
9278 If a built-in function has an inline attribute always_inline
9279 wrapper, defer folding it after always_inline functions have
9280 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9281 might not be performed. */
9283 bool
9284 avoid_folding_inline_builtin (tree fndecl)
9286 return (DECL_DECLARED_INLINE_P (fndecl)
9287 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9288 && cfun
9289 && !cfun->always_inline_functions_inlined
9290 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9293 /* A wrapper function for builtin folding that prevents warnings for
9294 "statement without effect" and the like, caused by removing the
9295 call node earlier than the warning is generated. */
9297 tree
9298 fold_call_expr (location_t loc, tree exp, bool ignore)
9300 tree ret = NULL_TREE;
9301 tree fndecl = get_callee_fndecl (exp);
9302 if (fndecl
9303 && TREE_CODE (fndecl) == FUNCTION_DECL
9304 && DECL_BUILT_IN (fndecl)
9305 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9306 yet. Defer folding until we see all the arguments
9307 (after inlining). */
9308 && !CALL_EXPR_VA_ARG_PACK (exp))
9310 int nargs = call_expr_nargs (exp);
9312 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9313 instead last argument is __builtin_va_arg_pack (). Defer folding
9314 even in that case, until arguments are finalized. */
9315 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9317 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9318 if (fndecl2
9319 && TREE_CODE (fndecl2) == FUNCTION_DECL
9320 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9321 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9322 return NULL_TREE;
9325 if (avoid_folding_inline_builtin (fndecl))
9326 return NULL_TREE;
9328 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9329 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9330 CALL_EXPR_ARGP (exp), ignore);
9331 else
9333 tree *args = CALL_EXPR_ARGP (exp);
9334 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9335 if (ret)
9336 return ret;
9339 return NULL_TREE;
9342 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9343 N arguments are passed in the array ARGARRAY. Return a folded
9344 expression or NULL_TREE if no simplification was possible. */
9346 tree
9347 fold_builtin_call_array (location_t loc, tree,
9348 tree fn,
9349 int n,
9350 tree *argarray)
9352 if (TREE_CODE (fn) != ADDR_EXPR)
9353 return NULL_TREE;
9355 tree fndecl = TREE_OPERAND (fn, 0);
9356 if (TREE_CODE (fndecl) == FUNCTION_DECL
9357 && DECL_BUILT_IN (fndecl))
9359 /* If last argument is __builtin_va_arg_pack (), arguments to this
9360 function are not finalized yet. Defer folding until they are. */
9361 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9363 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9364 if (fndecl2
9365 && TREE_CODE (fndecl2) == FUNCTION_DECL
9366 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9367 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9368 return NULL_TREE;
9370 if (avoid_folding_inline_builtin (fndecl))
9371 return NULL_TREE;
9372 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9373 return targetm.fold_builtin (fndecl, n, argarray, false);
9374 else
9375 return fold_builtin_n (loc, fndecl, argarray, n, false);
9378 return NULL_TREE;
9381 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9382 along with N new arguments specified as the "..." parameters. SKIP
9383 is the number of arguments in EXP to be omitted. This function is used
9384 to do varargs-to-varargs transformations. */
9386 static tree
9387 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9389 va_list ap;
9390 tree t;
9392 va_start (ap, n);
9393 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9394 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9395 va_end (ap);
9397 return t;
9400 /* Validate a single argument ARG against a tree code CODE representing
9401 a type. Return true when argument is valid. */
9403 static bool
9404 validate_arg (const_tree arg, enum tree_code code)
9406 if (!arg)
9407 return false;
9408 else if (code == POINTER_TYPE)
9409 return POINTER_TYPE_P (TREE_TYPE (arg));
9410 else if (code == INTEGER_TYPE)
9411 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9412 return code == TREE_CODE (TREE_TYPE (arg));
9415 /* This function validates the types of a function call argument list
9416 against a specified list of tree_codes. If the last specifier is a 0,
9417 that represents an ellipses, otherwise the last specifier must be a
9418 VOID_TYPE.
9420 This is the GIMPLE version of validate_arglist. Eventually we want to
9421 completely convert builtins.c to work from GIMPLEs and the tree based
9422 validate_arglist will then be removed. */
9424 bool
9425 validate_gimple_arglist (const gcall *call, ...)
9427 enum tree_code code;
9428 bool res = 0;
9429 va_list ap;
9430 const_tree arg;
9431 size_t i;
9433 va_start (ap, call);
9434 i = 0;
9438 code = (enum tree_code) va_arg (ap, int);
9439 switch (code)
9441 case 0:
9442 /* This signifies an ellipses, any further arguments are all ok. */
9443 res = true;
9444 goto end;
9445 case VOID_TYPE:
9446 /* This signifies an endlink, if no arguments remain, return
9447 true, otherwise return false. */
9448 res = (i == gimple_call_num_args (call));
9449 goto end;
9450 default:
9451 /* If no parameters remain or the parameter's code does not
9452 match the specified code, return false. Otherwise continue
9453 checking any remaining arguments. */
9454 arg = gimple_call_arg (call, i++);
9455 if (!validate_arg (arg, code))
9456 goto end;
9457 break;
9460 while (1);
9462 /* We need gotos here since we can only have one VA_CLOSE in a
9463 function. */
9464 end: ;
9465 va_end (ap);
9467 return res;
9470 /* Default target-specific builtin expander that does nothing. */
9473 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9474 rtx target ATTRIBUTE_UNUSED,
9475 rtx subtarget ATTRIBUTE_UNUSED,
9476 machine_mode mode ATTRIBUTE_UNUSED,
9477 int ignore ATTRIBUTE_UNUSED)
9479 return NULL_RTX;
9482 /* Returns true is EXP represents data that would potentially reside
9483 in a readonly section. */
9485 bool
9486 readonly_data_expr (tree exp)
9488 STRIP_NOPS (exp);
9490 if (TREE_CODE (exp) != ADDR_EXPR)
9491 return false;
9493 exp = get_base_address (TREE_OPERAND (exp, 0));
9494 if (!exp)
9495 return false;
9497 /* Make sure we call decl_readonly_section only for trees it
9498 can handle (since it returns true for everything it doesn't
9499 understand). */
9500 if (TREE_CODE (exp) == STRING_CST
9501 || TREE_CODE (exp) == CONSTRUCTOR
9502 || (VAR_P (exp) && TREE_STATIC (exp)))
9503 return decl_readonly_section (exp, 0);
9504 else
9505 return false;
9508 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9509 to the call, and TYPE is its return type.
9511 Return NULL_TREE if no simplification was possible, otherwise return the
9512 simplified form of the call as a tree.
9514 The simplified form may be a constant or other expression which
9515 computes the same value, but in a more efficient manner (including
9516 calls to other builtin functions).
9518 The call may contain arguments which need to be evaluated, but
9519 which are not useful to determine the result of the call. In
9520 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9521 COMPOUND_EXPR will be an argument which must be evaluated.
9522 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9523 COMPOUND_EXPR in the chain will contain the tree for the simplified
9524 form of the builtin function call. */
9526 static tree
9527 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9529 if (!validate_arg (s1, POINTER_TYPE)
9530 || !validate_arg (s2, POINTER_TYPE))
9531 return NULL_TREE;
9532 else
9534 tree fn;
9535 const char *p1, *p2;
9537 p2 = c_getstr (s2);
9538 if (p2 == NULL)
9539 return NULL_TREE;
9541 p1 = c_getstr (s1);
9542 if (p1 != NULL)
9544 const char *r = strpbrk (p1, p2);
9545 tree tem;
9547 if (r == NULL)
9548 return build_int_cst (TREE_TYPE (s1), 0);
9550 /* Return an offset into the constant string argument. */
9551 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9552 return fold_convert_loc (loc, type, tem);
9555 if (p2[0] == '\0')
9556 /* strpbrk(x, "") == NULL.
9557 Evaluate and ignore s1 in case it had side-effects. */
9558 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
9560 if (p2[1] != '\0')
9561 return NULL_TREE; /* Really call strpbrk. */
9563 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9564 if (!fn)
9565 return NULL_TREE;
9567 /* New argument list transforming strpbrk(s1, s2) to
9568 strchr(s1, s2[0]). */
9569 return build_call_expr_loc (loc, fn, 2, s1,
9570 build_int_cst (integer_type_node, p2[0]));
9574 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9575 to the call.
9577 Return NULL_TREE if no simplification was possible, otherwise return the
9578 simplified form of the call as a tree.
9580 The simplified form may be a constant or other expression which
9581 computes the same value, but in a more efficient manner (including
9582 calls to other builtin functions).
9584 The call may contain arguments which need to be evaluated, but
9585 which are not useful to determine the result of the call. In
9586 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9587 COMPOUND_EXPR will be an argument which must be evaluated.
9588 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9589 COMPOUND_EXPR in the chain will contain the tree for the simplified
9590 form of the builtin function call. */
9592 static tree
9593 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9595 if (!validate_arg (s1, POINTER_TYPE)
9596 || !validate_arg (s2, POINTER_TYPE))
9597 return NULL_TREE;
9598 else
9600 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9602 /* If either argument is "", return NULL_TREE. */
9603 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9604 /* Evaluate and ignore both arguments in case either one has
9605 side-effects. */
9606 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9607 s1, s2);
9608 return NULL_TREE;
9612 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9613 to the call.
9615 Return NULL_TREE if no simplification was possible, otherwise return the
9616 simplified form of the call as a tree.
9618 The simplified form may be a constant or other expression which
9619 computes the same value, but in a more efficient manner (including
9620 calls to other builtin functions).
9622 The call may contain arguments which need to be evaluated, but
9623 which are not useful to determine the result of the call. In
9624 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9625 COMPOUND_EXPR will be an argument which must be evaluated.
9626 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9627 COMPOUND_EXPR in the chain will contain the tree for the simplified
9628 form of the builtin function call. */
9630 static tree
9631 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9633 if (!validate_arg (s1, POINTER_TYPE)
9634 || !validate_arg (s2, POINTER_TYPE))
9635 return NULL_TREE;
9636 else
9638 /* If the first argument is "", return NULL_TREE. */
9639 const char *p1 = c_getstr (s1);
9640 if (p1 && *p1 == '\0')
9642 /* Evaluate and ignore argument s2 in case it has
9643 side-effects. */
9644 return omit_one_operand_loc (loc, size_type_node,
9645 size_zero_node, s2);
9648 /* If the second argument is "", return __builtin_strlen(s1). */
9649 const char *p2 = c_getstr (s2);
9650 if (p2 && *p2 == '\0')
9652 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9654 /* If the replacement _DECL isn't initialized, don't do the
9655 transformation. */
9656 if (!fn)
9657 return NULL_TREE;
9659 return build_call_expr_loc (loc, fn, 1, s1);
9661 return NULL_TREE;
9665 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9666 produced. False otherwise. This is done so that we don't output the error
9667 or warning twice or three times. */
9669 bool
9670 fold_builtin_next_arg (tree exp, bool va_start_p)
9672 tree fntype = TREE_TYPE (current_function_decl);
9673 int nargs = call_expr_nargs (exp);
9674 tree arg;
9675 /* There is good chance the current input_location points inside the
9676 definition of the va_start macro (perhaps on the token for
9677 builtin) in a system header, so warnings will not be emitted.
9678 Use the location in real source code. */
9679 source_location current_location =
9680 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9681 NULL);
9683 if (!stdarg_p (fntype))
9685 error ("%<va_start%> used in function with fixed args");
9686 return true;
9689 if (va_start_p)
9691 if (va_start_p && (nargs != 2))
9693 error ("wrong number of arguments to function %<va_start%>");
9694 return true;
9696 arg = CALL_EXPR_ARG (exp, 1);
9698 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9699 when we checked the arguments and if needed issued a warning. */
9700 else
9702 if (nargs == 0)
9704 /* Evidently an out of date version of <stdarg.h>; can't validate
9705 va_start's second argument, but can still work as intended. */
9706 warning_at (current_location,
9707 OPT_Wvarargs,
9708 "%<__builtin_next_arg%> called without an argument");
9709 return true;
9711 else if (nargs > 1)
9713 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9714 return true;
9716 arg = CALL_EXPR_ARG (exp, 0);
9719 if (TREE_CODE (arg) == SSA_NAME)
9720 arg = SSA_NAME_VAR (arg);
9722 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9723 or __builtin_next_arg (0) the first time we see it, after checking
9724 the arguments and if needed issuing a warning. */
9725 if (!integer_zerop (arg))
9727 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9729 /* Strip off all nops for the sake of the comparison. This
9730 is not quite the same as STRIP_NOPS. It does more.
9731 We must also strip off INDIRECT_EXPR for C++ reference
9732 parameters. */
9733 while (CONVERT_EXPR_P (arg)
9734 || TREE_CODE (arg) == INDIRECT_REF)
9735 arg = TREE_OPERAND (arg, 0);
9736 if (arg != last_parm)
9738 /* FIXME: Sometimes with the tree optimizers we can get the
9739 not the last argument even though the user used the last
9740 argument. We just warn and set the arg to be the last
9741 argument so that we will get wrong-code because of
9742 it. */
9743 warning_at (current_location,
9744 OPT_Wvarargs,
9745 "second parameter of %<va_start%> not last named argument");
9748 /* Undefined by C99 7.15.1.4p4 (va_start):
9749 "If the parameter parmN is declared with the register storage
9750 class, with a function or array type, or with a type that is
9751 not compatible with the type that results after application of
9752 the default argument promotions, the behavior is undefined."
9754 else if (DECL_REGISTER (arg))
9756 warning_at (current_location,
9757 OPT_Wvarargs,
9758 "undefined behavior when second parameter of "
9759 "%<va_start%> is declared with %<register%> storage");
9762 /* We want to verify the second parameter just once before the tree
9763 optimizers are run and then avoid keeping it in the tree,
9764 as otherwise we could warn even for correct code like:
9765 void foo (int i, ...)
9766 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9767 if (va_start_p)
9768 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9769 else
9770 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9772 return false;
9776 /* Expand a call EXP to __builtin_object_size. */
9778 static rtx
9779 expand_builtin_object_size (tree exp)
9781 tree ost;
9782 int object_size_type;
9783 tree fndecl = get_callee_fndecl (exp);
9785 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9787 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
9788 exp, fndecl);
9789 expand_builtin_trap ();
9790 return const0_rtx;
9793 ost = CALL_EXPR_ARG (exp, 1);
9794 STRIP_NOPS (ost);
9796 if (TREE_CODE (ost) != INTEGER_CST
9797 || tree_int_cst_sgn (ost) < 0
9798 || compare_tree_int (ost, 3) > 0)
9800 error ("%Klast argument of %qD is not integer constant between 0 and 3",
9801 exp, fndecl);
9802 expand_builtin_trap ();
9803 return const0_rtx;
9806 object_size_type = tree_to_shwi (ost);
9808 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9811 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9812 FCODE is the BUILT_IN_* to use.
9813 Return NULL_RTX if we failed; the caller should emit a normal call,
9814 otherwise try to get the result in TARGET, if convenient (and in
9815 mode MODE if that's convenient). */
9817 static rtx
9818 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9819 enum built_in_function fcode)
9821 if (!validate_arglist (exp,
9822 POINTER_TYPE,
9823 fcode == BUILT_IN_MEMSET_CHK
9824 ? INTEGER_TYPE : POINTER_TYPE,
9825 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9826 return NULL_RTX;
9828 tree dest = CALL_EXPR_ARG (exp, 0);
9829 tree src = CALL_EXPR_ARG (exp, 1);
9830 tree len = CALL_EXPR_ARG (exp, 2);
9831 tree size = CALL_EXPR_ARG (exp, 3);
9833 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
9834 /*str=*/NULL_TREE, size);
9836 if (!tree_fits_uhwi_p (size))
9837 return NULL_RTX;
9839 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
9841 /* Avoid transforming the checking call to an ordinary one when
9842 an overflow has been detected or when the call couldn't be
9843 validated because the size is not constant. */
9844 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
9845 return NULL_RTX;
9847 tree fn = NULL_TREE;
9848 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9849 mem{cpy,pcpy,move,set} is available. */
9850 switch (fcode)
9852 case BUILT_IN_MEMCPY_CHK:
9853 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
9854 break;
9855 case BUILT_IN_MEMPCPY_CHK:
9856 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
9857 break;
9858 case BUILT_IN_MEMMOVE_CHK:
9859 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
9860 break;
9861 case BUILT_IN_MEMSET_CHK:
9862 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
9863 break;
9864 default:
9865 break;
9868 if (! fn)
9869 return NULL_RTX;
9871 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
9872 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9873 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9874 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9876 else if (fcode == BUILT_IN_MEMSET_CHK)
9877 return NULL_RTX;
9878 else
9880 unsigned int dest_align = get_pointer_alignment (dest);
9882 /* If DEST is not a pointer type, call the normal function. */
9883 if (dest_align == 0)
9884 return NULL_RTX;
9886 /* If SRC and DEST are the same (and not volatile), do nothing. */
9887 if (operand_equal_p (src, dest, 0))
9889 tree expr;
9891 if (fcode != BUILT_IN_MEMPCPY_CHK)
9893 /* Evaluate and ignore LEN in case it has side-effects. */
9894 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9895 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9898 expr = fold_build_pointer_plus (dest, len);
9899 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9902 /* __memmove_chk special case. */
9903 if (fcode == BUILT_IN_MEMMOVE_CHK)
9905 unsigned int src_align = get_pointer_alignment (src);
9907 if (src_align == 0)
9908 return NULL_RTX;
9910 /* If src is categorized for a readonly section we can use
9911 normal __memcpy_chk. */
9912 if (readonly_data_expr (src))
9914 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
9915 if (!fn)
9916 return NULL_RTX;
9917 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9918 dest, src, len, size);
9919 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9920 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9921 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9924 return NULL_RTX;
9928 /* Emit warning if a buffer overflow is detected at compile time. */
9930 static void
9931 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9933 /* The source string. */
9934 tree srcstr = NULL_TREE;
9935 /* The size of the destination object. */
9936 tree objsize = NULL_TREE;
9937 /* The string that is being concatenated with (as in __strcat_chk)
9938 or null if it isn't. */
9939 tree catstr = NULL_TREE;
9940 /* The maximum length of the source sequence in a bounded operation
9941 (such as __strncat_chk) or null if the operation isn't bounded
9942 (such as __strcat_chk). */
9943 tree maxread = NULL_TREE;
9944 /* The exact size of the access (such as in __strncpy_chk). */
9945 tree size = NULL_TREE;
9947 switch (fcode)
9949 case BUILT_IN_STRCPY_CHK:
9950 case BUILT_IN_STPCPY_CHK:
9951 srcstr = CALL_EXPR_ARG (exp, 1);
9952 objsize = CALL_EXPR_ARG (exp, 2);
9953 break;
9955 case BUILT_IN_STRCAT_CHK:
9956 /* For __strcat_chk the warning will be emitted only if overflowing
9957 by at least strlen (dest) + 1 bytes. */
9958 catstr = CALL_EXPR_ARG (exp, 0);
9959 srcstr = CALL_EXPR_ARG (exp, 1);
9960 objsize = CALL_EXPR_ARG (exp, 2);
9961 break;
9963 case BUILT_IN_STRNCAT_CHK:
9964 catstr = CALL_EXPR_ARG (exp, 0);
9965 srcstr = CALL_EXPR_ARG (exp, 1);
9966 maxread = CALL_EXPR_ARG (exp, 2);
9967 objsize = CALL_EXPR_ARG (exp, 3);
9968 break;
9970 case BUILT_IN_STRNCPY_CHK:
9971 case BUILT_IN_STPNCPY_CHK:
9972 srcstr = CALL_EXPR_ARG (exp, 1);
9973 size = CALL_EXPR_ARG (exp, 2);
9974 objsize = CALL_EXPR_ARG (exp, 3);
9975 break;
9977 case BUILT_IN_SNPRINTF_CHK:
9978 case BUILT_IN_VSNPRINTF_CHK:
9979 maxread = CALL_EXPR_ARG (exp, 1);
9980 objsize = CALL_EXPR_ARG (exp, 3);
9981 break;
9982 default:
9983 gcc_unreachable ();
9986 if (catstr && maxread)
9988 /* Check __strncat_chk. There is no way to determine the length
9989 of the string to which the source string is being appended so
9990 just warn when the length of the source string is not known. */
9991 check_strncat_sizes (exp, objsize);
9992 return;
9995 /* The destination argument is the first one for all built-ins above. */
9996 tree dst = CALL_EXPR_ARG (exp, 0);
9998 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
10001 /* Emit warning if a buffer overflow is detected at compile time
10002 in __sprintf_chk/__vsprintf_chk calls. */
10004 static void
10005 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10007 tree size, len, fmt;
10008 const char *fmt_str;
10009 int nargs = call_expr_nargs (exp);
10011 /* Verify the required arguments in the original call. */
10013 if (nargs < 4)
10014 return;
10015 size = CALL_EXPR_ARG (exp, 2);
10016 fmt = CALL_EXPR_ARG (exp, 3);
10018 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10019 return;
10021 /* Check whether the format is a literal string constant. */
10022 fmt_str = c_getstr (fmt);
10023 if (fmt_str == NULL)
10024 return;
10026 if (!init_target_chars ())
10027 return;
10029 /* If the format doesn't contain % args or %%, we know its size. */
10030 if (strchr (fmt_str, target_percent) == 0)
10031 len = build_int_cstu (size_type_node, strlen (fmt_str));
10032 /* If the format is "%s" and first ... argument is a string literal,
10033 we know it too. */
10034 else if (fcode == BUILT_IN_SPRINTF_CHK
10035 && strcmp (fmt_str, target_percent_s) == 0)
10037 tree arg;
10039 if (nargs < 5)
10040 return;
10041 arg = CALL_EXPR_ARG (exp, 4);
10042 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10043 return;
10045 len = c_strlen (arg, 1);
10046 if (!len || ! tree_fits_uhwi_p (len))
10047 return;
10049 else
10050 return;
10052 /* Add one for the terminating nul. */
10053 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10055 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
10056 /*maxread=*/NULL_TREE, len, size);
10059 /* Emit warning if a free is called with address of a variable. */
10061 static void
10062 maybe_emit_free_warning (tree exp)
10064 tree arg = CALL_EXPR_ARG (exp, 0);
10066 STRIP_NOPS (arg);
10067 if (TREE_CODE (arg) != ADDR_EXPR)
10068 return;
10070 arg = get_base_address (TREE_OPERAND (arg, 0));
10071 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
10072 return;
10074 if (SSA_VAR_P (arg))
10075 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10076 "%Kattempt to free a non-heap object %qD", exp, arg);
10077 else
10078 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10079 "%Kattempt to free a non-heap object", exp);
10082 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10083 if possible. */
10085 static tree
10086 fold_builtin_object_size (tree ptr, tree ost)
10088 unsigned HOST_WIDE_INT bytes;
10089 int object_size_type;
10091 if (!validate_arg (ptr, POINTER_TYPE)
10092 || !validate_arg (ost, INTEGER_TYPE))
10093 return NULL_TREE;
10095 STRIP_NOPS (ost);
10097 if (TREE_CODE (ost) != INTEGER_CST
10098 || tree_int_cst_sgn (ost) < 0
10099 || compare_tree_int (ost, 3) > 0)
10100 return NULL_TREE;
10102 object_size_type = tree_to_shwi (ost);
10104 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10105 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10106 and (size_t) 0 for types 2 and 3. */
10107 if (TREE_SIDE_EFFECTS (ptr))
10108 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10110 if (TREE_CODE (ptr) == ADDR_EXPR)
10112 compute_builtin_object_size (ptr, object_size_type, &bytes);
10113 if (wi::fits_to_tree_p (bytes, size_type_node))
10114 return build_int_cstu (size_type_node, bytes);
10116 else if (TREE_CODE (ptr) == SSA_NAME)
10118 /* If object size is not known yet, delay folding until
10119 later. Maybe subsequent passes will help determining
10120 it. */
10121 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10122 && wi::fits_to_tree_p (bytes, size_type_node))
10123 return build_int_cstu (size_type_node, bytes);
10126 return NULL_TREE;
10129 /* Builtins with folding operations that operate on "..." arguments
10130 need special handling; we need to store the arguments in a convenient
10131 data structure before attempting any folding. Fortunately there are
10132 only a few builtins that fall into this category. FNDECL is the
10133 function, EXP is the CALL_EXPR for the call. */
10135 static tree
10136 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10138 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10139 tree ret = NULL_TREE;
10141 switch (fcode)
10143 case BUILT_IN_FPCLASSIFY:
10144 ret = fold_builtin_fpclassify (loc, args, nargs);
10145 break;
10147 default:
10148 break;
10150 if (ret)
10152 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10153 SET_EXPR_LOCATION (ret, loc);
10154 TREE_NO_WARNING (ret) = 1;
10155 return ret;
10157 return NULL_TREE;
10160 /* Initialize format string characters in the target charset. */
10162 bool
10163 init_target_chars (void)
10165 static bool init;
10166 if (!init)
10168 target_newline = lang_hooks.to_target_charset ('\n');
10169 target_percent = lang_hooks.to_target_charset ('%');
10170 target_c = lang_hooks.to_target_charset ('c');
10171 target_s = lang_hooks.to_target_charset ('s');
10172 if (target_newline == 0 || target_percent == 0 || target_c == 0
10173 || target_s == 0)
10174 return false;
10176 target_percent_c[0] = target_percent;
10177 target_percent_c[1] = target_c;
10178 target_percent_c[2] = '\0';
10180 target_percent_s[0] = target_percent;
10181 target_percent_s[1] = target_s;
10182 target_percent_s[2] = '\0';
10184 target_percent_s_newline[0] = target_percent;
10185 target_percent_s_newline[1] = target_s;
10186 target_percent_s_newline[2] = target_newline;
10187 target_percent_s_newline[3] = '\0';
10189 init = true;
10191 return true;
10194 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10195 and no overflow/underflow occurred. INEXACT is true if M was not
10196 exactly calculated. TYPE is the tree type for the result. This
10197 function assumes that you cleared the MPFR flags and then
10198 calculated M to see if anything subsequently set a flag prior to
10199 entering this function. Return NULL_TREE if any checks fail. */
10201 static tree
10202 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10204 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10205 overflow/underflow occurred. If -frounding-math, proceed iff the
10206 result of calling FUNC was exact. */
10207 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10208 && (!flag_rounding_math || !inexact))
10210 REAL_VALUE_TYPE rr;
10212 real_from_mpfr (&rr, m, type, GMP_RNDN);
10213 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10214 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10215 but the mpft_t is not, then we underflowed in the
10216 conversion. */
10217 if (real_isfinite (&rr)
10218 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10220 REAL_VALUE_TYPE rmode;
10222 real_convert (&rmode, TYPE_MODE (type), &rr);
10223 /* Proceed iff the specified mode can hold the value. */
10224 if (real_identical (&rmode, &rr))
10225 return build_real (type, rmode);
10228 return NULL_TREE;
10231 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10232 number and no overflow/underflow occurred. INEXACT is true if M
10233 was not exactly calculated. TYPE is the tree type for the result.
10234 This function assumes that you cleared the MPFR flags and then
10235 calculated M to see if anything subsequently set a flag prior to
10236 entering this function. Return NULL_TREE if any checks fail, if
10237 FORCE_CONVERT is true, then bypass the checks. */
10239 static tree
10240 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10242 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10243 overflow/underflow occurred. If -frounding-math, proceed iff the
10244 result of calling FUNC was exact. */
10245 if (force_convert
10246 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10247 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10248 && (!flag_rounding_math || !inexact)))
10250 REAL_VALUE_TYPE re, im;
10252 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10253 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10254 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10255 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10256 but the mpft_t is not, then we underflowed in the
10257 conversion. */
10258 if (force_convert
10259 || (real_isfinite (&re) && real_isfinite (&im)
10260 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10261 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10263 REAL_VALUE_TYPE re_mode, im_mode;
10265 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10266 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10267 /* Proceed iff the specified mode can hold the value. */
10268 if (force_convert
10269 || (real_identical (&re_mode, &re)
10270 && real_identical (&im_mode, &im)))
10271 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10272 build_real (TREE_TYPE (type), im_mode));
10275 return NULL_TREE;
10278 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10279 the pointer *(ARG_QUO) and return the result. The type is taken
10280 from the type of ARG0 and is used for setting the precision of the
10281 calculation and results. */
10283 static tree
10284 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10286 tree const type = TREE_TYPE (arg0);
10287 tree result = NULL_TREE;
10289 STRIP_NOPS (arg0);
10290 STRIP_NOPS (arg1);
10292 /* To proceed, MPFR must exactly represent the target floating point
10293 format, which only happens when the target base equals two. */
10294 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10295 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10296 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10298 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10299 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10301 if (real_isfinite (ra0) && real_isfinite (ra1))
10303 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10304 const int prec = fmt->p;
10305 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10306 tree result_rem;
10307 long integer_quo;
10308 mpfr_t m0, m1;
10310 mpfr_inits2 (prec, m0, m1, NULL);
10311 mpfr_from_real (m0, ra0, GMP_RNDN);
10312 mpfr_from_real (m1, ra1, GMP_RNDN);
10313 mpfr_clear_flags ();
10314 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10315 /* Remquo is independent of the rounding mode, so pass
10316 inexact=0 to do_mpfr_ckconv(). */
10317 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10318 mpfr_clears (m0, m1, NULL);
10319 if (result_rem)
10321 /* MPFR calculates quo in the host's long so it may
10322 return more bits in quo than the target int can hold
10323 if sizeof(host long) > sizeof(target int). This can
10324 happen even for native compilers in LP64 mode. In
10325 these cases, modulo the quo value with the largest
10326 number that the target int can hold while leaving one
10327 bit for the sign. */
10328 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10329 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10331 /* Dereference the quo pointer argument. */
10332 arg_quo = build_fold_indirect_ref (arg_quo);
10333 /* Proceed iff a valid pointer type was passed in. */
10334 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10336 /* Set the value. */
10337 tree result_quo
10338 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10339 build_int_cst (TREE_TYPE (arg_quo),
10340 integer_quo));
10341 TREE_SIDE_EFFECTS (result_quo) = 1;
10342 /* Combine the quo assignment with the rem. */
10343 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10344 result_quo, result_rem));
10349 return result;
10352 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10353 resulting value as a tree with type TYPE. The mpfr precision is
10354 set to the precision of TYPE. We assume that this mpfr function
10355 returns zero if the result could be calculated exactly within the
10356 requested precision. In addition, the integer pointer represented
10357 by ARG_SG will be dereferenced and set to the appropriate signgam
10358 (-1,1) value. */
10360 static tree
10361 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10363 tree result = NULL_TREE;
10365 STRIP_NOPS (arg);
10367 /* To proceed, MPFR must exactly represent the target floating point
10368 format, which only happens when the target base equals two. Also
10369 verify ARG is a constant and that ARG_SG is an int pointer. */
10370 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10371 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10372 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10373 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10375 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10377 /* In addition to NaN and Inf, the argument cannot be zero or a
10378 negative integer. */
10379 if (real_isfinite (ra)
10380 && ra->cl != rvc_zero
10381 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10383 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10384 const int prec = fmt->p;
10385 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10386 int inexact, sg;
10387 mpfr_t m;
10388 tree result_lg;
10390 mpfr_init2 (m, prec);
10391 mpfr_from_real (m, ra, GMP_RNDN);
10392 mpfr_clear_flags ();
10393 inexact = mpfr_lgamma (m, &sg, m, rnd);
10394 result_lg = do_mpfr_ckconv (m, type, inexact);
10395 mpfr_clear (m);
10396 if (result_lg)
10398 tree result_sg;
10400 /* Dereference the arg_sg pointer argument. */
10401 arg_sg = build_fold_indirect_ref (arg_sg);
10402 /* Assign the signgam value into *arg_sg. */
10403 result_sg = fold_build2 (MODIFY_EXPR,
10404 TREE_TYPE (arg_sg), arg_sg,
10405 build_int_cst (TREE_TYPE (arg_sg), sg));
10406 TREE_SIDE_EFFECTS (result_sg) = 1;
10407 /* Combine the signgam assignment with the lgamma result. */
10408 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10409 result_sg, result_lg));
10414 return result;
10417 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10418 mpc function FUNC on it and return the resulting value as a tree
10419 with type TYPE. The mpfr precision is set to the precision of
10420 TYPE. We assume that function FUNC returns zero if the result
10421 could be calculated exactly within the requested precision. If
10422 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10423 in the arguments and/or results. */
10425 tree
10426 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10427 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10429 tree result = NULL_TREE;
10431 STRIP_NOPS (arg0);
10432 STRIP_NOPS (arg1);
10434 /* To proceed, MPFR must exactly represent the target floating point
10435 format, which only happens when the target base equals two. */
10436 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10437 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10438 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10439 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10440 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10442 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10443 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10444 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10445 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10447 if (do_nonfinite
10448 || (real_isfinite (re0) && real_isfinite (im0)
10449 && real_isfinite (re1) && real_isfinite (im1)))
10451 const struct real_format *const fmt =
10452 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10453 const int prec = fmt->p;
10454 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10455 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10456 int inexact;
10457 mpc_t m0, m1;
10459 mpc_init2 (m0, prec);
10460 mpc_init2 (m1, prec);
10461 mpfr_from_real (mpc_realref (m0), re0, rnd);
10462 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10463 mpfr_from_real (mpc_realref (m1), re1, rnd);
10464 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10465 mpfr_clear_flags ();
10466 inexact = func (m0, m0, m1, crnd);
10467 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10468 mpc_clear (m0);
10469 mpc_clear (m1);
10473 return result;
10476 /* A wrapper function for builtin folding that prevents warnings for
10477 "statement without effect" and the like, caused by removing the
10478 call node earlier than the warning is generated. */
10480 tree
10481 fold_call_stmt (gcall *stmt, bool ignore)
10483 tree ret = NULL_TREE;
10484 tree fndecl = gimple_call_fndecl (stmt);
10485 location_t loc = gimple_location (stmt);
10486 if (fndecl
10487 && TREE_CODE (fndecl) == FUNCTION_DECL
10488 && DECL_BUILT_IN (fndecl)
10489 && !gimple_call_va_arg_pack_p (stmt))
10491 int nargs = gimple_call_num_args (stmt);
10492 tree *args = (nargs > 0
10493 ? gimple_call_arg_ptr (stmt, 0)
10494 : &error_mark_node);
10496 if (avoid_folding_inline_builtin (fndecl))
10497 return NULL_TREE;
10498 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10500 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10502 else
10504 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10505 if (ret)
10507 /* Propagate location information from original call to
10508 expansion of builtin. Otherwise things like
10509 maybe_emit_chk_warning, that operate on the expansion
10510 of a builtin, will use the wrong location information. */
10511 if (gimple_has_location (stmt))
10513 tree realret = ret;
10514 if (TREE_CODE (ret) == NOP_EXPR)
10515 realret = TREE_OPERAND (ret, 0);
10516 if (CAN_HAVE_LOCATION_P (realret)
10517 && !EXPR_HAS_LOCATION (realret))
10518 SET_EXPR_LOCATION (realret, loc);
10519 return realret;
10521 return ret;
10525 return NULL_TREE;
10528 /* Look up the function in builtin_decl that corresponds to DECL
10529 and set ASMSPEC as its user assembler name. DECL must be a
10530 function decl that declares a builtin. */
10532 void
10533 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10535 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10536 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10537 && asmspec != 0);
10539 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10540 set_user_assembler_name (builtin, asmspec);
10542 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10543 && INT_TYPE_SIZE < BITS_PER_WORD)
10545 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
10546 set_user_assembler_libfunc ("ffs", asmspec);
10547 set_optab_libfunc (ffs_optab, mode, "ffs");
10551 /* Return true if DECL is a builtin that expands to a constant or similarly
10552 simple code. */
10553 bool
10554 is_simple_builtin (tree decl)
10556 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10557 switch (DECL_FUNCTION_CODE (decl))
10559 /* Builtins that expand to constants. */
10560 case BUILT_IN_CONSTANT_P:
10561 case BUILT_IN_EXPECT:
10562 case BUILT_IN_OBJECT_SIZE:
10563 case BUILT_IN_UNREACHABLE:
10564 /* Simple register moves or loads from stack. */
10565 case BUILT_IN_ASSUME_ALIGNED:
10566 case BUILT_IN_RETURN_ADDRESS:
10567 case BUILT_IN_EXTRACT_RETURN_ADDR:
10568 case BUILT_IN_FROB_RETURN_ADDR:
10569 case BUILT_IN_RETURN:
10570 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10571 case BUILT_IN_FRAME_ADDRESS:
10572 case BUILT_IN_VA_END:
10573 case BUILT_IN_STACK_SAVE:
10574 case BUILT_IN_STACK_RESTORE:
10575 /* Exception state returns or moves registers around. */
10576 case BUILT_IN_EH_FILTER:
10577 case BUILT_IN_EH_POINTER:
10578 case BUILT_IN_EH_COPY_VALUES:
10579 return true;
10581 default:
10582 return false;
10585 return false;
10588 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10589 most probably expanded inline into reasonably simple code. This is a
10590 superset of is_simple_builtin. */
10591 bool
10592 is_inexpensive_builtin (tree decl)
10594 if (!decl)
10595 return false;
10596 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10597 return true;
10598 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10599 switch (DECL_FUNCTION_CODE (decl))
10601 case BUILT_IN_ABS:
10602 CASE_BUILT_IN_ALLOCA:
10603 case BUILT_IN_BSWAP16:
10604 case BUILT_IN_BSWAP32:
10605 case BUILT_IN_BSWAP64:
10606 case BUILT_IN_CLZ:
10607 case BUILT_IN_CLZIMAX:
10608 case BUILT_IN_CLZL:
10609 case BUILT_IN_CLZLL:
10610 case BUILT_IN_CTZ:
10611 case BUILT_IN_CTZIMAX:
10612 case BUILT_IN_CTZL:
10613 case BUILT_IN_CTZLL:
10614 case BUILT_IN_FFS:
10615 case BUILT_IN_FFSIMAX:
10616 case BUILT_IN_FFSL:
10617 case BUILT_IN_FFSLL:
10618 case BUILT_IN_IMAXABS:
10619 case BUILT_IN_FINITE:
10620 case BUILT_IN_FINITEF:
10621 case BUILT_IN_FINITEL:
10622 case BUILT_IN_FINITED32:
10623 case BUILT_IN_FINITED64:
10624 case BUILT_IN_FINITED128:
10625 case BUILT_IN_FPCLASSIFY:
10626 case BUILT_IN_ISFINITE:
10627 case BUILT_IN_ISINF_SIGN:
10628 case BUILT_IN_ISINF:
10629 case BUILT_IN_ISINFF:
10630 case BUILT_IN_ISINFL:
10631 case BUILT_IN_ISINFD32:
10632 case BUILT_IN_ISINFD64:
10633 case BUILT_IN_ISINFD128:
10634 case BUILT_IN_ISNAN:
10635 case BUILT_IN_ISNANF:
10636 case BUILT_IN_ISNANL:
10637 case BUILT_IN_ISNAND32:
10638 case BUILT_IN_ISNAND64:
10639 case BUILT_IN_ISNAND128:
10640 case BUILT_IN_ISNORMAL:
10641 case BUILT_IN_ISGREATER:
10642 case BUILT_IN_ISGREATEREQUAL:
10643 case BUILT_IN_ISLESS:
10644 case BUILT_IN_ISLESSEQUAL:
10645 case BUILT_IN_ISLESSGREATER:
10646 case BUILT_IN_ISUNORDERED:
10647 case BUILT_IN_VA_ARG_PACK:
10648 case BUILT_IN_VA_ARG_PACK_LEN:
10649 case BUILT_IN_VA_COPY:
10650 case BUILT_IN_TRAP:
10651 case BUILT_IN_SAVEREGS:
10652 case BUILT_IN_POPCOUNTL:
10653 case BUILT_IN_POPCOUNTLL:
10654 case BUILT_IN_POPCOUNTIMAX:
10655 case BUILT_IN_POPCOUNT:
10656 case BUILT_IN_PARITYL:
10657 case BUILT_IN_PARITYLL:
10658 case BUILT_IN_PARITYIMAX:
10659 case BUILT_IN_PARITY:
10660 case BUILT_IN_LABS:
10661 case BUILT_IN_LLABS:
10662 case BUILT_IN_PREFETCH:
10663 case BUILT_IN_ACC_ON_DEVICE:
10664 return true;
10666 default:
10667 return is_simple_builtin (decl);
10670 return false;
10673 /* Return true if T is a constant and the value cast to a target char
10674 can be represented by a host char.
10675 Store the casted char constant in *P if so. */
10677 bool
10678 target_char_cst_p (tree t, char *p)
10680 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
10681 return false;
10683 *p = (char)tree_to_uhwi (t);
10684 return true;
10687 /* Return the maximum object size. */
10689 tree
10690 max_object_size (void)
10692 /* To do: Make this a configurable parameter. */
10693 return TYPE_MAX_VALUE (ptrdiff_type_node);