poly_int: vector_builder element count
[official-gcc.git] / gcc / builtins.c
blobc5d8435f2513b7894408c055ecb84692ff2a5329
1 /* Expand builtin functions.
2 Copyright (C) 1988-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-restrict.h"
47 #include "stor-layout.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "tree-object-size.h"
51 #include "realmpfr.h"
52 #include "cfgrtl.h"
53 #include "except.h"
54 #include "dojump.h"
55 #include "explow.h"
56 #include "stmt.h"
57 #include "expr.h"
58 #include "libfuncs.h"
59 #include "output.h"
60 #include "typeclass.h"
61 #include "langhooks.h"
62 #include "value-prof.h"
63 #include "builtins.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "asan.h"
67 #include "tree-chkp.h"
68 #include "rtl-chkp.h"
69 #include "internal-fn.h"
70 #include "case-cfn-macros.h"
71 #include "gimple-fold.h"
72 #include "intl.h"
74 struct target_builtins default_target_builtins;
75 #if SWITCHABLE_TARGET
76 struct target_builtins *this_target_builtins = &default_target_builtins;
77 #endif
79 /* Define the names of the builtin function types and codes. */
80 const char *const built_in_class_names[BUILT_IN_LAST]
81 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
83 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
84 const char * built_in_names[(int) END_BUILTINS] =
86 #include "builtins.def"
89 /* Setup an array of builtin_info_type, make sure each element decl is
90 initialized to NULL_TREE. */
91 builtin_info_type builtin_info[(int)END_BUILTINS];
93 /* Non-zero if __builtin_constant_p should be folded right away. */
94 bool force_folding_builtin_constant_p;
96 static rtx c_readstr (const char *, scalar_int_mode);
97 static int target_char_cast (tree, char *);
98 static rtx get_memory_rtx (tree, tree);
99 static int apply_args_size (void);
100 static int apply_result_size (void);
101 static rtx result_vector (int, rtx);
102 static void expand_builtin_prefetch (tree);
103 static rtx expand_builtin_apply_args (void);
104 static rtx expand_builtin_apply_args_1 (void);
105 static rtx expand_builtin_apply (rtx, rtx, rtx);
106 static void expand_builtin_return (rtx);
107 static enum type_class type_to_class (tree);
108 static rtx expand_builtin_classify_type (tree);
109 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
110 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
111 static rtx expand_builtin_interclass_mathfn (tree, rtx);
112 static rtx expand_builtin_sincos (tree);
113 static rtx expand_builtin_cexpi (tree, rtx);
114 static rtx expand_builtin_int_roundingfn (tree, rtx);
115 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
116 static rtx expand_builtin_next_arg (void);
117 static rtx expand_builtin_va_start (tree);
118 static rtx expand_builtin_va_end (tree);
119 static rtx expand_builtin_va_copy (tree);
120 static rtx expand_builtin_strcmp (tree, rtx);
121 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
122 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
123 static rtx expand_builtin_memchr (tree, rtx);
124 static rtx expand_builtin_memcpy (tree, rtx);
125 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
126 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
127 rtx target, tree exp, int endp);
128 static rtx expand_builtin_memmove (tree, rtx);
129 static rtx expand_builtin_mempcpy (tree, rtx);
130 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx);
131 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, int);
132 static rtx expand_builtin_strcat (tree, rtx);
133 static rtx expand_builtin_strcpy (tree, rtx);
134 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
135 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
136 static rtx expand_builtin_stpncpy (tree, rtx);
137 static rtx expand_builtin_strncat (tree, rtx);
138 static rtx expand_builtin_strncpy (tree, rtx);
139 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
140 static rtx expand_builtin_memset (tree, rtx, machine_mode);
141 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
142 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
143 static rtx expand_builtin_bzero (tree);
144 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
145 static rtx expand_builtin_alloca (tree);
146 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
147 static rtx expand_builtin_frame_address (tree, tree);
148 static tree stabilize_va_list_loc (location_t, tree, int);
149 static rtx expand_builtin_expect (tree, rtx);
150 static tree fold_builtin_constant_p (tree);
151 static tree fold_builtin_classify_type (tree);
152 static tree fold_builtin_strlen (location_t, tree, tree);
153 static tree fold_builtin_inf (location_t, tree, int);
154 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
155 static bool validate_arg (const_tree, enum tree_code code);
156 static rtx expand_builtin_fabs (tree, rtx, rtx);
157 static rtx expand_builtin_signbit (tree, rtx);
158 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
159 static tree fold_builtin_isascii (location_t, tree);
160 static tree fold_builtin_toascii (location_t, tree);
161 static tree fold_builtin_isdigit (location_t, tree);
162 static tree fold_builtin_fabs (location_t, tree, tree);
163 static tree fold_builtin_abs (location_t, tree, tree);
164 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
165 enum tree_code);
166 static tree fold_builtin_0 (location_t, tree);
167 static tree fold_builtin_1 (location_t, tree, tree);
168 static tree fold_builtin_2 (location_t, tree, tree, tree);
169 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
170 static tree fold_builtin_varargs (location_t, tree, tree*, int);
172 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
173 static tree fold_builtin_strspn (location_t, tree, tree);
174 static tree fold_builtin_strcspn (location_t, tree, tree);
176 static rtx expand_builtin_object_size (tree);
177 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
178 enum built_in_function);
179 static void maybe_emit_chk_warning (tree, enum built_in_function);
180 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
181 static void maybe_emit_free_warning (tree);
182 static tree fold_builtin_object_size (tree, tree);
184 unsigned HOST_WIDE_INT target_newline;
185 unsigned HOST_WIDE_INT target_percent;
186 static unsigned HOST_WIDE_INT target_c;
187 static unsigned HOST_WIDE_INT target_s;
188 char target_percent_c[3];
189 char target_percent_s[3];
190 char target_percent_s_newline[4];
191 static tree do_mpfr_remquo (tree, tree, tree);
192 static tree do_mpfr_lgamma_r (tree, tree, tree);
193 static void expand_builtin_sync_synchronize (void);
195 /* Return true if NAME starts with __builtin_ or __sync_. */
197 static bool
198 is_builtin_name (const char *name)
200 if (strncmp (name, "__builtin_", 10) == 0)
201 return true;
202 if (strncmp (name, "__sync_", 7) == 0)
203 return true;
204 if (strncmp (name, "__atomic_", 9) == 0)
205 return true;
206 return false;
210 /* Return true if DECL is a function symbol representing a built-in. */
212 bool
213 is_builtin_fn (tree decl)
215 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
218 /* Return true if NODE should be considered for inline expansion regardless
219 of the optimization level. This means whenever a function is invoked with
220 its "internal" name, which normally contains the prefix "__builtin". */
222 bool
223 called_as_built_in (tree node)
225 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
226 we want the name used to call the function, not the name it
227 will have. */
228 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
229 return is_builtin_name (name);
232 /* Compute values M and N such that M divides (address of EXP - N) and such
233 that N < M. If these numbers can be determined, store M in alignp and N in
234 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
235 *alignp and any bit-offset to *bitposp.
237 Note that the address (and thus the alignment) computed here is based
238 on the address to which a symbol resolves, whereas DECL_ALIGN is based
239 on the address at which an object is actually located. These two
240 addresses are not always the same. For example, on ARM targets,
241 the address &foo of a Thumb function foo() has the lowest bit set,
242 whereas foo() itself starts on an even address.
244 If ADDR_P is true we are taking the address of the memory reference EXP
245 and thus cannot rely on the access taking place. */
247 static bool
248 get_object_alignment_2 (tree exp, unsigned int *alignp,
249 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
251 poly_int64 bitsize, bitpos;
252 tree offset;
253 machine_mode mode;
254 int unsignedp, reversep, volatilep;
255 unsigned int align = BITS_PER_UNIT;
256 bool known_alignment = false;
258 /* Get the innermost object and the constant (bitpos) and possibly
259 variable (offset) offset of the access. */
260 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
261 &unsignedp, &reversep, &volatilep);
263 /* Extract alignment information from the innermost object and
264 possibly adjust bitpos and offset. */
265 if (TREE_CODE (exp) == FUNCTION_DECL)
267 /* Function addresses can encode extra information besides their
268 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
269 allows the low bit to be used as a virtual bit, we know
270 that the address itself must be at least 2-byte aligned. */
271 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
272 align = 2 * BITS_PER_UNIT;
274 else if (TREE_CODE (exp) == LABEL_DECL)
276 else if (TREE_CODE (exp) == CONST_DECL)
278 /* The alignment of a CONST_DECL is determined by its initializer. */
279 exp = DECL_INITIAL (exp);
280 align = TYPE_ALIGN (TREE_TYPE (exp));
281 if (CONSTANT_CLASS_P (exp))
282 align = targetm.constant_alignment (exp, align);
284 known_alignment = true;
286 else if (DECL_P (exp))
288 align = DECL_ALIGN (exp);
289 known_alignment = true;
291 else if (TREE_CODE (exp) == INDIRECT_REF
292 || TREE_CODE (exp) == MEM_REF
293 || TREE_CODE (exp) == TARGET_MEM_REF)
295 tree addr = TREE_OPERAND (exp, 0);
296 unsigned ptr_align;
297 unsigned HOST_WIDE_INT ptr_bitpos;
298 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
300 /* If the address is explicitely aligned, handle that. */
301 if (TREE_CODE (addr) == BIT_AND_EXPR
302 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
304 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
305 ptr_bitmask *= BITS_PER_UNIT;
306 align = least_bit_hwi (ptr_bitmask);
307 addr = TREE_OPERAND (addr, 0);
310 known_alignment
311 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
312 align = MAX (ptr_align, align);
314 /* Re-apply explicit alignment to the bitpos. */
315 ptr_bitpos &= ptr_bitmask;
317 /* The alignment of the pointer operand in a TARGET_MEM_REF
318 has to take the variable offset parts into account. */
319 if (TREE_CODE (exp) == TARGET_MEM_REF)
321 if (TMR_INDEX (exp))
323 unsigned HOST_WIDE_INT step = 1;
324 if (TMR_STEP (exp))
325 step = TREE_INT_CST_LOW (TMR_STEP (exp));
326 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
328 if (TMR_INDEX2 (exp))
329 align = BITS_PER_UNIT;
330 known_alignment = false;
333 /* When EXP is an actual memory reference then we can use
334 TYPE_ALIGN of a pointer indirection to derive alignment.
335 Do so only if get_pointer_alignment_1 did not reveal absolute
336 alignment knowledge and if using that alignment would
337 improve the situation. */
338 unsigned int talign;
339 if (!addr_p && !known_alignment
340 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
341 && talign > align)
342 align = talign;
343 else
345 /* Else adjust bitpos accordingly. */
346 bitpos += ptr_bitpos;
347 if (TREE_CODE (exp) == MEM_REF
348 || TREE_CODE (exp) == TARGET_MEM_REF)
349 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
352 else if (TREE_CODE (exp) == STRING_CST)
354 /* STRING_CST are the only constant objects we allow to be not
355 wrapped inside a CONST_DECL. */
356 align = TYPE_ALIGN (TREE_TYPE (exp));
357 if (CONSTANT_CLASS_P (exp))
358 align = targetm.constant_alignment (exp, align);
360 known_alignment = true;
363 /* If there is a non-constant offset part extract the maximum
364 alignment that can prevail. */
365 if (offset)
367 unsigned int trailing_zeros = tree_ctz (offset);
368 if (trailing_zeros < HOST_BITS_PER_INT)
370 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
371 if (inner)
372 align = MIN (align, inner);
376 /* Account for the alignment of runtime coefficients, so that the constant
377 bitpos is guaranteed to be accurate. */
378 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
379 if (alt_align != 0 && alt_align < align)
381 align = alt_align;
382 known_alignment = false;
385 *alignp = align;
386 *bitposp = bitpos.coeffs[0] & (align - 1);
387 return known_alignment;
390 /* For a memory reference expression EXP compute values M and N such that M
391 divides (&EXP - N) and such that N < M. If these numbers can be determined,
392 store M in alignp and N in *BITPOSP and return true. Otherwise return false
393 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
395 bool
396 get_object_alignment_1 (tree exp, unsigned int *alignp,
397 unsigned HOST_WIDE_INT *bitposp)
399 return get_object_alignment_2 (exp, alignp, bitposp, false);
402 /* Return the alignment in bits of EXP, an object. */
404 unsigned int
405 get_object_alignment (tree exp)
407 unsigned HOST_WIDE_INT bitpos = 0;
408 unsigned int align;
410 get_object_alignment_1 (exp, &align, &bitpos);
412 /* align and bitpos now specify known low bits of the pointer.
413 ptr & (align - 1) == bitpos. */
415 if (bitpos != 0)
416 align = least_bit_hwi (bitpos);
417 return align;
420 /* For a pointer valued expression EXP compute values M and N such that M
421 divides (EXP - N) and such that N < M. If these numbers can be determined,
422 store M in alignp and N in *BITPOSP and return true. Return false if
423 the results are just a conservative approximation.
425 If EXP is not a pointer, false is returned too. */
427 bool
428 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
429 unsigned HOST_WIDE_INT *bitposp)
431 STRIP_NOPS (exp);
433 if (TREE_CODE (exp) == ADDR_EXPR)
434 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
435 alignp, bitposp, true);
436 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
438 unsigned int align;
439 unsigned HOST_WIDE_INT bitpos;
440 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
441 &align, &bitpos);
442 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
443 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
444 else
446 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
447 if (trailing_zeros < HOST_BITS_PER_INT)
449 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
450 if (inner)
451 align = MIN (align, inner);
454 *alignp = align;
455 *bitposp = bitpos & (align - 1);
456 return res;
458 else if (TREE_CODE (exp) == SSA_NAME
459 && POINTER_TYPE_P (TREE_TYPE (exp)))
461 unsigned int ptr_align, ptr_misalign;
462 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
464 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
466 *bitposp = ptr_misalign * BITS_PER_UNIT;
467 *alignp = ptr_align * BITS_PER_UNIT;
468 /* Make sure to return a sensible alignment when the multiplication
469 by BITS_PER_UNIT overflowed. */
470 if (*alignp == 0)
471 *alignp = 1u << (HOST_BITS_PER_INT - 1);
472 /* We cannot really tell whether this result is an approximation. */
473 return false;
475 else
477 *bitposp = 0;
478 *alignp = BITS_PER_UNIT;
479 return false;
482 else if (TREE_CODE (exp) == INTEGER_CST)
484 *alignp = BIGGEST_ALIGNMENT;
485 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
486 & (BIGGEST_ALIGNMENT - 1));
487 return true;
490 *bitposp = 0;
491 *alignp = BITS_PER_UNIT;
492 return false;
495 /* Return the alignment in bits of EXP, a pointer valued expression.
496 The alignment returned is, by default, the alignment of the thing that
497 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
499 Otherwise, look at the expression to see if we can do better, i.e., if the
500 expression is actually pointing at an object whose alignment is tighter. */
502 unsigned int
503 get_pointer_alignment (tree exp)
505 unsigned HOST_WIDE_INT bitpos = 0;
506 unsigned int align;
508 get_pointer_alignment_1 (exp, &align, &bitpos);
510 /* align and bitpos now specify known low bits of the pointer.
511 ptr & (align - 1) == bitpos. */
513 if (bitpos != 0)
514 align = least_bit_hwi (bitpos);
516 return align;
519 /* Return the number of non-zero elements in the sequence
520 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
521 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
523 static unsigned
524 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
526 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
528 unsigned n;
530 if (eltsize == 1)
532 /* Optimize the common case of plain char. */
533 for (n = 0; n < maxelts; n++)
535 const char *elt = (const char*) ptr + n;
536 if (!*elt)
537 break;
540 else
542 for (n = 0; n < maxelts; n++)
544 const char *elt = (const char*) ptr + n * eltsize;
545 if (!memcmp (elt, "\0\0\0\0", eltsize))
546 break;
549 return n;
552 /* Compute the length of a null-terminated character string or wide
553 character string handling character sizes of 1, 2, and 4 bytes.
554 TREE_STRING_LENGTH is not the right way because it evaluates to
555 the size of the character array in bytes (as opposed to characters)
556 and because it can contain a zero byte in the middle.
558 ONLY_VALUE should be nonzero if the result is not going to be emitted
559 into the instruction stream and zero if it is going to be expanded.
560 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
561 is returned, otherwise NULL, since
562 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
563 evaluate the side-effects.
565 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
566 accesses. Note that this implies the result is not going to be emitted
567 into the instruction stream.
569 The value returned is of type `ssizetype'.
571 Unfortunately, string_constant can't access the values of const char
572 arrays with initializers, so neither can we do so here. */
574 tree
575 c_strlen (tree src, int only_value)
577 STRIP_NOPS (src);
578 if (TREE_CODE (src) == COND_EXPR
579 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
581 tree len1, len2;
583 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
584 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
585 if (tree_int_cst_equal (len1, len2))
586 return len1;
589 if (TREE_CODE (src) == COMPOUND_EXPR
590 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
591 return c_strlen (TREE_OPERAND (src, 1), only_value);
593 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
595 /* Offset from the beginning of the string in bytes. */
596 tree byteoff;
597 src = string_constant (src, &byteoff);
598 if (src == 0)
599 return NULL_TREE;
601 /* Determine the size of the string element. */
602 unsigned eltsize
603 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src))));
605 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
606 length of SRC. */
607 unsigned maxelts = TREE_STRING_LENGTH (src) / eltsize - 1;
609 /* PTR can point to the byte representation of any string type, including
610 char* and wchar_t*. */
611 const char *ptr = TREE_STRING_POINTER (src);
613 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
615 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
616 compute the offset to the following null if we don't know where to
617 start searching for it. */
618 if (string_length (ptr, eltsize, maxelts) < maxelts)
620 /* Return when an embedded null character is found. */
621 return NULL_TREE;
624 /* We don't know the starting offset, but we do know that the string
625 has no internal zero bytes. We can assume that the offset falls
626 within the bounds of the string; otherwise, the programmer deserves
627 what he gets. Subtract the offset from the length of the string,
628 and return that. This would perhaps not be valid if we were dealing
629 with named arrays in addition to literal string constants. */
631 return size_diffop_loc (loc, size_int (maxelts * eltsize), byteoff);
634 /* Offset from the beginning of the string in elements. */
635 HOST_WIDE_INT eltoff;
637 /* We have a known offset into the string. Start searching there for
638 a null character if we can represent it as a single HOST_WIDE_INT. */
639 if (byteoff == 0)
640 eltoff = 0;
641 else if (! tree_fits_shwi_p (byteoff))
642 eltoff = -1;
643 else
644 eltoff = tree_to_shwi (byteoff) / eltsize;
646 /* If the offset is known to be out of bounds, warn, and call strlen at
647 runtime. */
648 if (eltoff < 0 || eltoff > maxelts)
650 /* Suppress multiple warnings for propagated constant strings. */
651 if (only_value != 2
652 && !TREE_NO_WARNING (src))
654 warning_at (loc, 0, "offset %qwi outside bounds of constant string",
655 eltoff);
656 TREE_NO_WARNING (src) = 1;
658 return NULL_TREE;
661 /* Use strlen to search for the first zero byte. Since any strings
662 constructed with build_string will have nulls appended, we win even
663 if we get handed something like (char[4])"abcd".
665 Since ELTOFF is our starting index into the string, no further
666 calculation is needed. */
667 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
668 maxelts - eltoff);
670 return ssize_int (len);
673 /* Return a constant integer corresponding to target reading
674 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
676 static rtx
677 c_readstr (const char *str, scalar_int_mode mode)
679 HOST_WIDE_INT ch;
680 unsigned int i, j;
681 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
683 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
684 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
685 / HOST_BITS_PER_WIDE_INT;
687 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
688 for (i = 0; i < len; i++)
689 tmp[i] = 0;
691 ch = 1;
692 for (i = 0; i < GET_MODE_SIZE (mode); i++)
694 j = i;
695 if (WORDS_BIG_ENDIAN)
696 j = GET_MODE_SIZE (mode) - i - 1;
697 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
698 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
699 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
700 j *= BITS_PER_UNIT;
702 if (ch)
703 ch = (unsigned char) str[i];
704 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
707 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
708 return immed_wide_int_const (c, mode);
711 /* Cast a target constant CST to target CHAR and if that value fits into
712 host char type, return zero and put that value into variable pointed to by
713 P. */
715 static int
716 target_char_cast (tree cst, char *p)
718 unsigned HOST_WIDE_INT val, hostval;
720 if (TREE_CODE (cst) != INTEGER_CST
721 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
722 return 1;
724 /* Do not care if it fits or not right here. */
725 val = TREE_INT_CST_LOW (cst);
727 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
728 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
730 hostval = val;
731 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
732 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
734 if (val != hostval)
735 return 1;
737 *p = hostval;
738 return 0;
741 /* Similar to save_expr, but assumes that arbitrary code is not executed
742 in between the multiple evaluations. In particular, we assume that a
743 non-addressable local variable will not be modified. */
745 static tree
746 builtin_save_expr (tree exp)
748 if (TREE_CODE (exp) == SSA_NAME
749 || (TREE_ADDRESSABLE (exp) == 0
750 && (TREE_CODE (exp) == PARM_DECL
751 || (VAR_P (exp) && !TREE_STATIC (exp)))))
752 return exp;
754 return save_expr (exp);
757 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
758 times to get the address of either a higher stack frame, or a return
759 address located within it (depending on FNDECL_CODE). */
761 static rtx
762 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
764 int i;
765 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
766 if (tem == NULL_RTX)
768 /* For a zero count with __builtin_return_address, we don't care what
769 frame address we return, because target-specific definitions will
770 override us. Therefore frame pointer elimination is OK, and using
771 the soft frame pointer is OK.
773 For a nonzero count, or a zero count with __builtin_frame_address,
774 we require a stable offset from the current frame pointer to the
775 previous one, so we must use the hard frame pointer, and
776 we must disable frame pointer elimination. */
777 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
778 tem = frame_pointer_rtx;
779 else
781 tem = hard_frame_pointer_rtx;
783 /* Tell reload not to eliminate the frame pointer. */
784 crtl->accesses_prior_frames = 1;
788 if (count > 0)
789 SETUP_FRAME_ADDRESSES ();
791 /* On the SPARC, the return address is not in the frame, it is in a
792 register. There is no way to access it off of the current frame
793 pointer, but it can be accessed off the previous frame pointer by
794 reading the value from the register window save area. */
795 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
796 count--;
798 /* Scan back COUNT frames to the specified frame. */
799 for (i = 0; i < count; i++)
801 /* Assume the dynamic chain pointer is in the word that the
802 frame address points to, unless otherwise specified. */
803 tem = DYNAMIC_CHAIN_ADDRESS (tem);
804 tem = memory_address (Pmode, tem);
805 tem = gen_frame_mem (Pmode, tem);
806 tem = copy_to_reg (tem);
809 /* For __builtin_frame_address, return what we've got. But, on
810 the SPARC for example, we may have to add a bias. */
811 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
812 return FRAME_ADDR_RTX (tem);
814 /* For __builtin_return_address, get the return address from that frame. */
815 #ifdef RETURN_ADDR_RTX
816 tem = RETURN_ADDR_RTX (count, tem);
817 #else
818 tem = memory_address (Pmode,
819 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
820 tem = gen_frame_mem (Pmode, tem);
821 #endif
822 return tem;
825 /* Alias set used for setjmp buffer. */
826 static alias_set_type setjmp_alias_set = -1;
828 /* Construct the leading half of a __builtin_setjmp call. Control will
829 return to RECEIVER_LABEL. This is also called directly by the SJLJ
830 exception handling code. */
832 void
833 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
835 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
836 rtx stack_save;
837 rtx mem;
839 if (setjmp_alias_set == -1)
840 setjmp_alias_set = new_alias_set ();
842 buf_addr = convert_memory_address (Pmode, buf_addr);
844 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
846 /* We store the frame pointer and the address of receiver_label in
847 the buffer and use the rest of it for the stack save area, which
848 is machine-dependent. */
850 mem = gen_rtx_MEM (Pmode, buf_addr);
851 set_mem_alias_set (mem, setjmp_alias_set);
852 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
854 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
855 GET_MODE_SIZE (Pmode))),
856 set_mem_alias_set (mem, setjmp_alias_set);
858 emit_move_insn (validize_mem (mem),
859 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
861 stack_save = gen_rtx_MEM (sa_mode,
862 plus_constant (Pmode, buf_addr,
863 2 * GET_MODE_SIZE (Pmode)));
864 set_mem_alias_set (stack_save, setjmp_alias_set);
865 emit_stack_save (SAVE_NONLOCAL, &stack_save);
867 /* If there is further processing to do, do it. */
868 if (targetm.have_builtin_setjmp_setup ())
869 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
871 /* We have a nonlocal label. */
872 cfun->has_nonlocal_label = 1;
875 /* Construct the trailing part of a __builtin_setjmp call. This is
876 also called directly by the SJLJ exception handling code.
877 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
879 void
880 expand_builtin_setjmp_receiver (rtx receiver_label)
882 rtx chain;
884 /* Mark the FP as used when we get here, so we have to make sure it's
885 marked as used by this function. */
886 emit_use (hard_frame_pointer_rtx);
888 /* Mark the static chain as clobbered here so life information
889 doesn't get messed up for it. */
890 chain = rtx_for_static_chain (current_function_decl, true);
891 if (chain && REG_P (chain))
892 emit_clobber (chain);
894 /* Now put in the code to restore the frame pointer, and argument
895 pointer, if needed. */
896 if (! targetm.have_nonlocal_goto ())
898 /* First adjust our frame pointer to its actual value. It was
899 previously set to the start of the virtual area corresponding to
900 the stacked variables when we branched here and now needs to be
901 adjusted to the actual hardware fp value.
903 Assignments to virtual registers are converted by
904 instantiate_virtual_regs into the corresponding assignment
905 to the underlying register (fp in this case) that makes
906 the original assignment true.
907 So the following insn will actually be decrementing fp by
908 TARGET_STARTING_FRAME_OFFSET. */
909 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
911 /* Restoring the frame pointer also modifies the hard frame pointer.
912 Mark it used (so that the previous assignment remains live once
913 the frame pointer is eliminated) and clobbered (to represent the
914 implicit update from the assignment). */
915 emit_use (hard_frame_pointer_rtx);
916 emit_clobber (hard_frame_pointer_rtx);
919 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
921 /* If the argument pointer can be eliminated in favor of the
922 frame pointer, we don't need to restore it. We assume here
923 that if such an elimination is present, it can always be used.
924 This is the case on all known machines; if we don't make this
925 assumption, we do unnecessary saving on many machines. */
926 size_t i;
927 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
929 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
930 if (elim_regs[i].from == ARG_POINTER_REGNUM
931 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
932 break;
934 if (i == ARRAY_SIZE (elim_regs))
936 /* Now restore our arg pointer from the address at which it
937 was saved in our stack frame. */
938 emit_move_insn (crtl->args.internal_arg_pointer,
939 copy_to_reg (get_arg_pointer_save_area ()));
943 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
944 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
945 else if (targetm.have_nonlocal_goto_receiver ())
946 emit_insn (targetm.gen_nonlocal_goto_receiver ());
947 else
948 { /* Nothing */ }
950 /* We must not allow the code we just generated to be reordered by
951 scheduling. Specifically, the update of the frame pointer must
952 happen immediately, not later. */
953 emit_insn (gen_blockage ());
956 /* __builtin_longjmp is passed a pointer to an array of five words (not
957 all will be used on all machines). It operates similarly to the C
958 library function of the same name, but is more efficient. Much of
959 the code below is copied from the handling of non-local gotos. */
961 static void
962 expand_builtin_longjmp (rtx buf_addr, rtx value)
964 rtx fp, lab, stack;
965 rtx_insn *insn, *last;
966 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
968 /* DRAP is needed for stack realign if longjmp is expanded to current
969 function */
970 if (SUPPORTS_STACK_ALIGNMENT)
971 crtl->need_drap = true;
973 if (setjmp_alias_set == -1)
974 setjmp_alias_set = new_alias_set ();
976 buf_addr = convert_memory_address (Pmode, buf_addr);
978 buf_addr = force_reg (Pmode, buf_addr);
980 /* We require that the user must pass a second argument of 1, because
981 that is what builtin_setjmp will return. */
982 gcc_assert (value == const1_rtx);
984 last = get_last_insn ();
985 if (targetm.have_builtin_longjmp ())
986 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
987 else
989 fp = gen_rtx_MEM (Pmode, buf_addr);
990 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
991 GET_MODE_SIZE (Pmode)));
993 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
994 2 * GET_MODE_SIZE (Pmode)));
995 set_mem_alias_set (fp, setjmp_alias_set);
996 set_mem_alias_set (lab, setjmp_alias_set);
997 set_mem_alias_set (stack, setjmp_alias_set);
999 /* Pick up FP, label, and SP from the block and jump. This code is
1000 from expand_goto in stmt.c; see there for detailed comments. */
1001 if (targetm.have_nonlocal_goto ())
1002 /* We have to pass a value to the nonlocal_goto pattern that will
1003 get copied into the static_chain pointer, but it does not matter
1004 what that value is, because builtin_setjmp does not use it. */
1005 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1006 else
1008 lab = copy_to_reg (lab);
1010 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1011 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1013 emit_move_insn (hard_frame_pointer_rtx, fp);
1014 emit_stack_restore (SAVE_NONLOCAL, stack);
1016 emit_use (hard_frame_pointer_rtx);
1017 emit_use (stack_pointer_rtx);
1018 emit_indirect_jump (lab);
1022 /* Search backwards and mark the jump insn as a non-local goto.
1023 Note that this precludes the use of __builtin_longjmp to a
1024 __builtin_setjmp target in the same function. However, we've
1025 already cautioned the user that these functions are for
1026 internal exception handling use only. */
1027 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1029 gcc_assert (insn != last);
1031 if (JUMP_P (insn))
1033 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1034 break;
1036 else if (CALL_P (insn))
1037 break;
1041 static inline bool
1042 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1044 return (iter->i < iter->n);
1047 /* This function validates the types of a function call argument list
1048 against a specified list of tree_codes. If the last specifier is a 0,
1049 that represents an ellipsis, otherwise the last specifier must be a
1050 VOID_TYPE. */
1052 static bool
1053 validate_arglist (const_tree callexpr, ...)
1055 enum tree_code code;
1056 bool res = 0;
1057 va_list ap;
1058 const_call_expr_arg_iterator iter;
1059 const_tree arg;
1061 va_start (ap, callexpr);
1062 init_const_call_expr_arg_iterator (callexpr, &iter);
1064 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1065 tree fn = CALL_EXPR_FN (callexpr);
1066 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1068 for (unsigned argno = 1; ; ++argno)
1070 code = (enum tree_code) va_arg (ap, int);
1072 switch (code)
1074 case 0:
1075 /* This signifies an ellipses, any further arguments are all ok. */
1076 res = true;
1077 goto end;
1078 case VOID_TYPE:
1079 /* This signifies an endlink, if no arguments remain, return
1080 true, otherwise return false. */
1081 res = !more_const_call_expr_args_p (&iter);
1082 goto end;
1083 case POINTER_TYPE:
1084 /* The actual argument must be nonnull when either the whole
1085 called function has been declared nonnull, or when the formal
1086 argument corresponding to the actual argument has been. */
1087 if (argmap
1088 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1090 arg = next_const_call_expr_arg (&iter);
1091 if (!validate_arg (arg, code) || integer_zerop (arg))
1092 goto end;
1093 break;
1095 /* FALLTHRU */
1096 default:
1097 /* If no parameters remain or the parameter's code does not
1098 match the specified code, return false. Otherwise continue
1099 checking any remaining arguments. */
1100 arg = next_const_call_expr_arg (&iter);
1101 if (!validate_arg (arg, code))
1102 goto end;
1103 break;
1107 /* We need gotos here since we can only have one VA_CLOSE in a
1108 function. */
1109 end: ;
1110 va_end (ap);
1112 BITMAP_FREE (argmap);
1114 return res;
1117 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1118 and the address of the save area. */
1120 static rtx
1121 expand_builtin_nonlocal_goto (tree exp)
1123 tree t_label, t_save_area;
1124 rtx r_label, r_save_area, r_fp, r_sp;
1125 rtx_insn *insn;
1127 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1128 return NULL_RTX;
1130 t_label = CALL_EXPR_ARG (exp, 0);
1131 t_save_area = CALL_EXPR_ARG (exp, 1);
1133 r_label = expand_normal (t_label);
1134 r_label = convert_memory_address (Pmode, r_label);
1135 r_save_area = expand_normal (t_save_area);
1136 r_save_area = convert_memory_address (Pmode, r_save_area);
1137 /* Copy the address of the save location to a register just in case it was
1138 based on the frame pointer. */
1139 r_save_area = copy_to_reg (r_save_area);
1140 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1141 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1142 plus_constant (Pmode, r_save_area,
1143 GET_MODE_SIZE (Pmode)));
1145 crtl->has_nonlocal_goto = 1;
1147 /* ??? We no longer need to pass the static chain value, afaik. */
1148 if (targetm.have_nonlocal_goto ())
1149 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1150 else
1152 r_label = copy_to_reg (r_label);
1154 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1155 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1157 /* Restore frame pointer for containing function. */
1158 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1159 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1161 /* USE of hard_frame_pointer_rtx added for consistency;
1162 not clear if really needed. */
1163 emit_use (hard_frame_pointer_rtx);
1164 emit_use (stack_pointer_rtx);
1166 /* If the architecture is using a GP register, we must
1167 conservatively assume that the target function makes use of it.
1168 The prologue of functions with nonlocal gotos must therefore
1169 initialize the GP register to the appropriate value, and we
1170 must then make sure that this value is live at the point
1171 of the jump. (Note that this doesn't necessarily apply
1172 to targets with a nonlocal_goto pattern; they are free
1173 to implement it in their own way. Note also that this is
1174 a no-op if the GP register is a global invariant.) */
1175 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1176 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1177 emit_use (pic_offset_table_rtx);
1179 emit_indirect_jump (r_label);
1182 /* Search backwards to the jump insn and mark it as a
1183 non-local goto. */
1184 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1186 if (JUMP_P (insn))
1188 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1189 break;
1191 else if (CALL_P (insn))
1192 break;
1195 return const0_rtx;
1198 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1199 (not all will be used on all machines) that was passed to __builtin_setjmp.
1200 It updates the stack pointer in that block to the current value. This is
1201 also called directly by the SJLJ exception handling code. */
1203 void
1204 expand_builtin_update_setjmp_buf (rtx buf_addr)
1206 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1207 buf_addr = convert_memory_address (Pmode, buf_addr);
1208 rtx stack_save
1209 = gen_rtx_MEM (sa_mode,
1210 memory_address
1211 (sa_mode,
1212 plus_constant (Pmode, buf_addr,
1213 2 * GET_MODE_SIZE (Pmode))));
1215 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1218 /* Expand a call to __builtin_prefetch. For a target that does not support
1219 data prefetch, evaluate the memory address argument in case it has side
1220 effects. */
1222 static void
1223 expand_builtin_prefetch (tree exp)
1225 tree arg0, arg1, arg2;
1226 int nargs;
1227 rtx op0, op1, op2;
1229 if (!validate_arglist (exp, POINTER_TYPE, 0))
1230 return;
1232 arg0 = CALL_EXPR_ARG (exp, 0);
1234 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1235 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1236 locality). */
1237 nargs = call_expr_nargs (exp);
1238 if (nargs > 1)
1239 arg1 = CALL_EXPR_ARG (exp, 1);
1240 else
1241 arg1 = integer_zero_node;
1242 if (nargs > 2)
1243 arg2 = CALL_EXPR_ARG (exp, 2);
1244 else
1245 arg2 = integer_three_node;
1247 /* Argument 0 is an address. */
1248 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1250 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1251 if (TREE_CODE (arg1) != INTEGER_CST)
1253 error ("second argument to %<__builtin_prefetch%> must be a constant");
1254 arg1 = integer_zero_node;
1256 op1 = expand_normal (arg1);
1257 /* Argument 1 must be either zero or one. */
1258 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1260 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1261 " using zero");
1262 op1 = const0_rtx;
1265 /* Argument 2 (locality) must be a compile-time constant int. */
1266 if (TREE_CODE (arg2) != INTEGER_CST)
1268 error ("third argument to %<__builtin_prefetch%> must be a constant");
1269 arg2 = integer_zero_node;
1271 op2 = expand_normal (arg2);
1272 /* Argument 2 must be 0, 1, 2, or 3. */
1273 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1275 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1276 op2 = const0_rtx;
1279 if (targetm.have_prefetch ())
1281 struct expand_operand ops[3];
1283 create_address_operand (&ops[0], op0);
1284 create_integer_operand (&ops[1], INTVAL (op1));
1285 create_integer_operand (&ops[2], INTVAL (op2));
1286 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1287 return;
1290 /* Don't do anything with direct references to volatile memory, but
1291 generate code to handle other side effects. */
1292 if (!MEM_P (op0) && side_effects_p (op0))
1293 emit_insn (op0);
1296 /* Get a MEM rtx for expression EXP which is the address of an operand
1297 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1298 the maximum length of the block of memory that might be accessed or
1299 NULL if unknown. */
1301 static rtx
1302 get_memory_rtx (tree exp, tree len)
1304 tree orig_exp = exp;
1305 rtx addr, mem;
1307 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1308 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1309 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1310 exp = TREE_OPERAND (exp, 0);
1312 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1313 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1315 /* Get an expression we can use to find the attributes to assign to MEM.
1316 First remove any nops. */
1317 while (CONVERT_EXPR_P (exp)
1318 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1319 exp = TREE_OPERAND (exp, 0);
1321 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1322 (as builtin stringops may alias with anything). */
1323 exp = fold_build2 (MEM_REF,
1324 build_array_type (char_type_node,
1325 build_range_type (sizetype,
1326 size_one_node, len)),
1327 exp, build_int_cst (ptr_type_node, 0));
1329 /* If the MEM_REF has no acceptable address, try to get the base object
1330 from the original address we got, and build an all-aliasing
1331 unknown-sized access to that one. */
1332 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1333 set_mem_attributes (mem, exp, 0);
1334 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1335 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1336 0))))
1338 exp = build_fold_addr_expr (exp);
1339 exp = fold_build2 (MEM_REF,
1340 build_array_type (char_type_node,
1341 build_range_type (sizetype,
1342 size_zero_node,
1343 NULL)),
1344 exp, build_int_cst (ptr_type_node, 0));
1345 set_mem_attributes (mem, exp, 0);
1347 set_mem_alias_set (mem, 0);
1348 return mem;
1351 /* Built-in functions to perform an untyped call and return. */
1353 #define apply_args_mode \
1354 (this_target_builtins->x_apply_args_mode)
1355 #define apply_result_mode \
1356 (this_target_builtins->x_apply_result_mode)
1358 /* Return the size required for the block returned by __builtin_apply_args,
1359 and initialize apply_args_mode. */
1361 static int
1362 apply_args_size (void)
1364 static int size = -1;
1365 int align;
1366 unsigned int regno;
1367 machine_mode mode;
1369 /* The values computed by this function never change. */
1370 if (size < 0)
1372 /* The first value is the incoming arg-pointer. */
1373 size = GET_MODE_SIZE (Pmode);
1375 /* The second value is the structure value address unless this is
1376 passed as an "invisible" first argument. */
1377 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1378 size += GET_MODE_SIZE (Pmode);
1380 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1381 if (FUNCTION_ARG_REGNO_P (regno))
1383 mode = targetm.calls.get_raw_arg_mode (regno);
1385 gcc_assert (mode != VOIDmode);
1387 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1388 if (size % align != 0)
1389 size = CEIL (size, align) * align;
1390 size += GET_MODE_SIZE (mode);
1391 apply_args_mode[regno] = mode;
1393 else
1395 apply_args_mode[regno] = VOIDmode;
1398 return size;
1401 /* Return the size required for the block returned by __builtin_apply,
1402 and initialize apply_result_mode. */
1404 static int
1405 apply_result_size (void)
1407 static int size = -1;
1408 int align, regno;
1409 machine_mode mode;
1411 /* The values computed by this function never change. */
1412 if (size < 0)
1414 size = 0;
1416 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1417 if (targetm.calls.function_value_regno_p (regno))
1419 mode = targetm.calls.get_raw_result_mode (regno);
1421 gcc_assert (mode != VOIDmode);
1423 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1424 if (size % align != 0)
1425 size = CEIL (size, align) * align;
1426 size += GET_MODE_SIZE (mode);
1427 apply_result_mode[regno] = mode;
1429 else
1430 apply_result_mode[regno] = VOIDmode;
1432 /* Allow targets that use untyped_call and untyped_return to override
1433 the size so that machine-specific information can be stored here. */
1434 #ifdef APPLY_RESULT_SIZE
1435 size = APPLY_RESULT_SIZE;
1436 #endif
1438 return size;
1441 /* Create a vector describing the result block RESULT. If SAVEP is true,
1442 the result block is used to save the values; otherwise it is used to
1443 restore the values. */
1445 static rtx
1446 result_vector (int savep, rtx result)
1448 int regno, size, align, nelts;
1449 machine_mode mode;
1450 rtx reg, mem;
1451 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1453 size = nelts = 0;
1454 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1455 if ((mode = apply_result_mode[regno]) != VOIDmode)
1457 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1458 if (size % align != 0)
1459 size = CEIL (size, align) * align;
1460 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1461 mem = adjust_address (result, mode, size);
1462 savevec[nelts++] = (savep
1463 ? gen_rtx_SET (mem, reg)
1464 : gen_rtx_SET (reg, mem));
1465 size += GET_MODE_SIZE (mode);
1467 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1470 /* Save the state required to perform an untyped call with the same
1471 arguments as were passed to the current function. */
1473 static rtx
1474 expand_builtin_apply_args_1 (void)
1476 rtx registers, tem;
1477 int size, align, regno;
1478 machine_mode mode;
1479 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1481 /* Create a block where the arg-pointer, structure value address,
1482 and argument registers can be saved. */
1483 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1485 /* Walk past the arg-pointer and structure value address. */
1486 size = GET_MODE_SIZE (Pmode);
1487 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1488 size += GET_MODE_SIZE (Pmode);
1490 /* Save each register used in calling a function to the block. */
1491 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1492 if ((mode = apply_args_mode[regno]) != VOIDmode)
1494 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1495 if (size % align != 0)
1496 size = CEIL (size, align) * align;
1498 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1500 emit_move_insn (adjust_address (registers, mode, size), tem);
1501 size += GET_MODE_SIZE (mode);
1504 /* Save the arg pointer to the block. */
1505 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1506 /* We need the pointer as the caller actually passed them to us, not
1507 as we might have pretended they were passed. Make sure it's a valid
1508 operand, as emit_move_insn isn't expected to handle a PLUS. */
1509 if (STACK_GROWS_DOWNWARD)
1511 = force_operand (plus_constant (Pmode, tem,
1512 crtl->args.pretend_args_size),
1513 NULL_RTX);
1514 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1516 size = GET_MODE_SIZE (Pmode);
1518 /* Save the structure value address unless this is passed as an
1519 "invisible" first argument. */
1520 if (struct_incoming_value)
1522 emit_move_insn (adjust_address (registers, Pmode, size),
1523 copy_to_reg (struct_incoming_value));
1524 size += GET_MODE_SIZE (Pmode);
1527 /* Return the address of the block. */
1528 return copy_addr_to_reg (XEXP (registers, 0));
1531 /* __builtin_apply_args returns block of memory allocated on
1532 the stack into which is stored the arg pointer, structure
1533 value address, static chain, and all the registers that might
1534 possibly be used in performing a function call. The code is
1535 moved to the start of the function so the incoming values are
1536 saved. */
1538 static rtx
1539 expand_builtin_apply_args (void)
1541 /* Don't do __builtin_apply_args more than once in a function.
1542 Save the result of the first call and reuse it. */
1543 if (apply_args_value != 0)
1544 return apply_args_value;
1546 /* When this function is called, it means that registers must be
1547 saved on entry to this function. So we migrate the
1548 call to the first insn of this function. */
1549 rtx temp;
1551 start_sequence ();
1552 temp = expand_builtin_apply_args_1 ();
1553 rtx_insn *seq = get_insns ();
1554 end_sequence ();
1556 apply_args_value = temp;
1558 /* Put the insns after the NOTE that starts the function.
1559 If this is inside a start_sequence, make the outer-level insn
1560 chain current, so the code is placed at the start of the
1561 function. If internal_arg_pointer is a non-virtual pseudo,
1562 it needs to be placed after the function that initializes
1563 that pseudo. */
1564 push_topmost_sequence ();
1565 if (REG_P (crtl->args.internal_arg_pointer)
1566 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1567 emit_insn_before (seq, parm_birth_insn);
1568 else
1569 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1570 pop_topmost_sequence ();
1571 return temp;
1575 /* Perform an untyped call and save the state required to perform an
1576 untyped return of whatever value was returned by the given function. */
1578 static rtx
1579 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1581 int size, align, regno;
1582 machine_mode mode;
1583 rtx incoming_args, result, reg, dest, src;
1584 rtx_call_insn *call_insn;
1585 rtx old_stack_level = 0;
1586 rtx call_fusage = 0;
1587 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1589 arguments = convert_memory_address (Pmode, arguments);
1591 /* Create a block where the return registers can be saved. */
1592 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1594 /* Fetch the arg pointer from the ARGUMENTS block. */
1595 incoming_args = gen_reg_rtx (Pmode);
1596 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1597 if (!STACK_GROWS_DOWNWARD)
1598 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1599 incoming_args, 0, OPTAB_LIB_WIDEN);
1601 /* Push a new argument block and copy the arguments. Do not allow
1602 the (potential) memcpy call below to interfere with our stack
1603 manipulations. */
1604 do_pending_stack_adjust ();
1605 NO_DEFER_POP;
1607 /* Save the stack with nonlocal if available. */
1608 if (targetm.have_save_stack_nonlocal ())
1609 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1610 else
1611 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1613 /* Allocate a block of memory onto the stack and copy the memory
1614 arguments to the outgoing arguments address. We can pass TRUE
1615 as the 4th argument because we just saved the stack pointer
1616 and will restore it right after the call. */
1617 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1619 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1620 may have already set current_function_calls_alloca to true.
1621 current_function_calls_alloca won't be set if argsize is zero,
1622 so we have to guarantee need_drap is true here. */
1623 if (SUPPORTS_STACK_ALIGNMENT)
1624 crtl->need_drap = true;
1626 dest = virtual_outgoing_args_rtx;
1627 if (!STACK_GROWS_DOWNWARD)
1629 if (CONST_INT_P (argsize))
1630 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1631 else
1632 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1634 dest = gen_rtx_MEM (BLKmode, dest);
1635 set_mem_align (dest, PARM_BOUNDARY);
1636 src = gen_rtx_MEM (BLKmode, incoming_args);
1637 set_mem_align (src, PARM_BOUNDARY);
1638 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1640 /* Refer to the argument block. */
1641 apply_args_size ();
1642 arguments = gen_rtx_MEM (BLKmode, arguments);
1643 set_mem_align (arguments, PARM_BOUNDARY);
1645 /* Walk past the arg-pointer and structure value address. */
1646 size = GET_MODE_SIZE (Pmode);
1647 if (struct_value)
1648 size += GET_MODE_SIZE (Pmode);
1650 /* Restore each of the registers previously saved. Make USE insns
1651 for each of these registers for use in making the call. */
1652 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1653 if ((mode = apply_args_mode[regno]) != VOIDmode)
1655 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1656 if (size % align != 0)
1657 size = CEIL (size, align) * align;
1658 reg = gen_rtx_REG (mode, regno);
1659 emit_move_insn (reg, adjust_address (arguments, mode, size));
1660 use_reg (&call_fusage, reg);
1661 size += GET_MODE_SIZE (mode);
1664 /* Restore the structure value address unless this is passed as an
1665 "invisible" first argument. */
1666 size = GET_MODE_SIZE (Pmode);
1667 if (struct_value)
1669 rtx value = gen_reg_rtx (Pmode);
1670 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1671 emit_move_insn (struct_value, value);
1672 if (REG_P (struct_value))
1673 use_reg (&call_fusage, struct_value);
1674 size += GET_MODE_SIZE (Pmode);
1677 /* All arguments and registers used for the call are set up by now! */
1678 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1680 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1681 and we don't want to load it into a register as an optimization,
1682 because prepare_call_address already did it if it should be done. */
1683 if (GET_CODE (function) != SYMBOL_REF)
1684 function = memory_address (FUNCTION_MODE, function);
1686 /* Generate the actual call instruction and save the return value. */
1687 if (targetm.have_untyped_call ())
1689 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1690 emit_call_insn (targetm.gen_untyped_call (mem, result,
1691 result_vector (1, result)));
1693 else if (targetm.have_call_value ())
1695 rtx valreg = 0;
1697 /* Locate the unique return register. It is not possible to
1698 express a call that sets more than one return register using
1699 call_value; use untyped_call for that. In fact, untyped_call
1700 only needs to save the return registers in the given block. */
1701 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1702 if ((mode = apply_result_mode[regno]) != VOIDmode)
1704 gcc_assert (!valreg); /* have_untyped_call required. */
1706 valreg = gen_rtx_REG (mode, regno);
1709 emit_insn (targetm.gen_call_value (valreg,
1710 gen_rtx_MEM (FUNCTION_MODE, function),
1711 const0_rtx, NULL_RTX, const0_rtx));
1713 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1715 else
1716 gcc_unreachable ();
1718 /* Find the CALL insn we just emitted, and attach the register usage
1719 information. */
1720 call_insn = last_call_insn ();
1721 add_function_usage_to (call_insn, call_fusage);
1723 /* Restore the stack. */
1724 if (targetm.have_save_stack_nonlocal ())
1725 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1726 else
1727 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1728 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1730 OK_DEFER_POP;
1732 /* Return the address of the result block. */
1733 result = copy_addr_to_reg (XEXP (result, 0));
1734 return convert_memory_address (ptr_mode, result);
1737 /* Perform an untyped return. */
1739 static void
1740 expand_builtin_return (rtx result)
1742 int size, align, regno;
1743 machine_mode mode;
1744 rtx reg;
1745 rtx_insn *call_fusage = 0;
1747 result = convert_memory_address (Pmode, result);
1749 apply_result_size ();
1750 result = gen_rtx_MEM (BLKmode, result);
1752 if (targetm.have_untyped_return ())
1754 rtx vector = result_vector (0, result);
1755 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1756 emit_barrier ();
1757 return;
1760 /* Restore the return value and note that each value is used. */
1761 size = 0;
1762 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1763 if ((mode = apply_result_mode[regno]) != VOIDmode)
1765 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1766 if (size % align != 0)
1767 size = CEIL (size, align) * align;
1768 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1769 emit_move_insn (reg, adjust_address (result, mode, size));
1771 push_to_sequence (call_fusage);
1772 emit_use (reg);
1773 call_fusage = get_insns ();
1774 end_sequence ();
1775 size += GET_MODE_SIZE (mode);
1778 /* Put the USE insns before the return. */
1779 emit_insn (call_fusage);
1781 /* Return whatever values was restored by jumping directly to the end
1782 of the function. */
1783 expand_naked_return ();
1786 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1788 static enum type_class
1789 type_to_class (tree type)
1791 switch (TREE_CODE (type))
1793 case VOID_TYPE: return void_type_class;
1794 case INTEGER_TYPE: return integer_type_class;
1795 case ENUMERAL_TYPE: return enumeral_type_class;
1796 case BOOLEAN_TYPE: return boolean_type_class;
1797 case POINTER_TYPE: return pointer_type_class;
1798 case REFERENCE_TYPE: return reference_type_class;
1799 case OFFSET_TYPE: return offset_type_class;
1800 case REAL_TYPE: return real_type_class;
1801 case COMPLEX_TYPE: return complex_type_class;
1802 case FUNCTION_TYPE: return function_type_class;
1803 case METHOD_TYPE: return method_type_class;
1804 case RECORD_TYPE: return record_type_class;
1805 case UNION_TYPE:
1806 case QUAL_UNION_TYPE: return union_type_class;
1807 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1808 ? string_type_class : array_type_class);
1809 case LANG_TYPE: return lang_type_class;
1810 default: return no_type_class;
1814 /* Expand a call EXP to __builtin_classify_type. */
1816 static rtx
1817 expand_builtin_classify_type (tree exp)
1819 if (call_expr_nargs (exp))
1820 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1821 return GEN_INT (no_type_class);
1824 /* This helper macro, meant to be used in mathfn_built_in below, determines
1825 which among a set of builtin math functions is appropriate for a given type
1826 mode. The `F' (float) and `L' (long double) are automatically generated
1827 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1828 types, there are additional types that are considered with 'F32', 'F64',
1829 'F128', etc. suffixes. */
1830 #define CASE_MATHFN(MATHFN) \
1831 CASE_CFN_##MATHFN: \
1832 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1833 fcodel = BUILT_IN_##MATHFN##L ; break;
1834 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1835 types. */
1836 #define CASE_MATHFN_FLOATN(MATHFN) \
1837 CASE_CFN_##MATHFN: \
1838 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1839 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1840 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1841 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1842 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1843 break;
1844 /* Similar to above, but appends _R after any F/L suffix. */
1845 #define CASE_MATHFN_REENT(MATHFN) \
1846 case CFN_BUILT_IN_##MATHFN##_R: \
1847 case CFN_BUILT_IN_##MATHFN##F_R: \
1848 case CFN_BUILT_IN_##MATHFN##L_R: \
1849 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1850 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1852 /* Return a function equivalent to FN but operating on floating-point
1853 values of type TYPE, or END_BUILTINS if no such function exists.
1854 This is purely an operation on function codes; it does not guarantee
1855 that the target actually has an implementation of the function. */
1857 static built_in_function
1858 mathfn_built_in_2 (tree type, combined_fn fn)
1860 tree mtype;
1861 built_in_function fcode, fcodef, fcodel;
1862 built_in_function fcodef16 = END_BUILTINS;
1863 built_in_function fcodef32 = END_BUILTINS;
1864 built_in_function fcodef64 = END_BUILTINS;
1865 built_in_function fcodef128 = END_BUILTINS;
1866 built_in_function fcodef32x = END_BUILTINS;
1867 built_in_function fcodef64x = END_BUILTINS;
1868 built_in_function fcodef128x = END_BUILTINS;
1870 switch (fn)
1872 CASE_MATHFN (ACOS)
1873 CASE_MATHFN (ACOSH)
1874 CASE_MATHFN (ASIN)
1875 CASE_MATHFN (ASINH)
1876 CASE_MATHFN (ATAN)
1877 CASE_MATHFN (ATAN2)
1878 CASE_MATHFN (ATANH)
1879 CASE_MATHFN (CBRT)
1880 CASE_MATHFN_FLOATN (CEIL)
1881 CASE_MATHFN (CEXPI)
1882 CASE_MATHFN_FLOATN (COPYSIGN)
1883 CASE_MATHFN (COS)
1884 CASE_MATHFN (COSH)
1885 CASE_MATHFN (DREM)
1886 CASE_MATHFN (ERF)
1887 CASE_MATHFN (ERFC)
1888 CASE_MATHFN (EXP)
1889 CASE_MATHFN (EXP10)
1890 CASE_MATHFN (EXP2)
1891 CASE_MATHFN (EXPM1)
1892 CASE_MATHFN (FABS)
1893 CASE_MATHFN (FDIM)
1894 CASE_MATHFN_FLOATN (FLOOR)
1895 CASE_MATHFN_FLOATN (FMA)
1896 CASE_MATHFN_FLOATN (FMAX)
1897 CASE_MATHFN_FLOATN (FMIN)
1898 CASE_MATHFN (FMOD)
1899 CASE_MATHFN (FREXP)
1900 CASE_MATHFN (GAMMA)
1901 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1902 CASE_MATHFN (HUGE_VAL)
1903 CASE_MATHFN (HYPOT)
1904 CASE_MATHFN (ILOGB)
1905 CASE_MATHFN (ICEIL)
1906 CASE_MATHFN (IFLOOR)
1907 CASE_MATHFN (INF)
1908 CASE_MATHFN (IRINT)
1909 CASE_MATHFN (IROUND)
1910 CASE_MATHFN (ISINF)
1911 CASE_MATHFN (J0)
1912 CASE_MATHFN (J1)
1913 CASE_MATHFN (JN)
1914 CASE_MATHFN (LCEIL)
1915 CASE_MATHFN (LDEXP)
1916 CASE_MATHFN (LFLOOR)
1917 CASE_MATHFN (LGAMMA)
1918 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1919 CASE_MATHFN (LLCEIL)
1920 CASE_MATHFN (LLFLOOR)
1921 CASE_MATHFN (LLRINT)
1922 CASE_MATHFN (LLROUND)
1923 CASE_MATHFN (LOG)
1924 CASE_MATHFN (LOG10)
1925 CASE_MATHFN (LOG1P)
1926 CASE_MATHFN (LOG2)
1927 CASE_MATHFN (LOGB)
1928 CASE_MATHFN (LRINT)
1929 CASE_MATHFN (LROUND)
1930 CASE_MATHFN (MODF)
1931 CASE_MATHFN (NAN)
1932 CASE_MATHFN (NANS)
1933 CASE_MATHFN_FLOATN (NEARBYINT)
1934 CASE_MATHFN (NEXTAFTER)
1935 CASE_MATHFN (NEXTTOWARD)
1936 CASE_MATHFN (POW)
1937 CASE_MATHFN (POWI)
1938 CASE_MATHFN (POW10)
1939 CASE_MATHFN (REMAINDER)
1940 CASE_MATHFN (REMQUO)
1941 CASE_MATHFN_FLOATN (RINT)
1942 CASE_MATHFN_FLOATN (ROUND)
1943 CASE_MATHFN (SCALB)
1944 CASE_MATHFN (SCALBLN)
1945 CASE_MATHFN (SCALBN)
1946 CASE_MATHFN (SIGNBIT)
1947 CASE_MATHFN (SIGNIFICAND)
1948 CASE_MATHFN (SIN)
1949 CASE_MATHFN (SINCOS)
1950 CASE_MATHFN (SINH)
1951 CASE_MATHFN_FLOATN (SQRT)
1952 CASE_MATHFN (TAN)
1953 CASE_MATHFN (TANH)
1954 CASE_MATHFN (TGAMMA)
1955 CASE_MATHFN_FLOATN (TRUNC)
1956 CASE_MATHFN (Y0)
1957 CASE_MATHFN (Y1)
1958 CASE_MATHFN (YN)
1960 default:
1961 return END_BUILTINS;
1964 mtype = TYPE_MAIN_VARIANT (type);
1965 if (mtype == double_type_node)
1966 return fcode;
1967 else if (mtype == float_type_node)
1968 return fcodef;
1969 else if (mtype == long_double_type_node)
1970 return fcodel;
1971 else if (mtype == float16_type_node)
1972 return fcodef16;
1973 else if (mtype == float32_type_node)
1974 return fcodef32;
1975 else if (mtype == float64_type_node)
1976 return fcodef64;
1977 else if (mtype == float128_type_node)
1978 return fcodef128;
1979 else if (mtype == float32x_type_node)
1980 return fcodef32x;
1981 else if (mtype == float64x_type_node)
1982 return fcodef64x;
1983 else if (mtype == float128x_type_node)
1984 return fcodef128x;
1985 else
1986 return END_BUILTINS;
1989 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1990 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1991 otherwise use the explicit declaration. If we can't do the conversion,
1992 return null. */
1994 static tree
1995 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
1997 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1998 if (fcode2 == END_BUILTINS)
1999 return NULL_TREE;
2001 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2002 return NULL_TREE;
2004 return builtin_decl_explicit (fcode2);
2007 /* Like mathfn_built_in_1, but always use the implicit array. */
2009 tree
2010 mathfn_built_in (tree type, combined_fn fn)
2012 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2015 /* Like mathfn_built_in_1, but take a built_in_function and
2016 always use the implicit array. */
2018 tree
2019 mathfn_built_in (tree type, enum built_in_function fn)
2021 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2024 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2025 return its code, otherwise return IFN_LAST. Note that this function
2026 only tests whether the function is defined in internals.def, not whether
2027 it is actually available on the target. */
2029 internal_fn
2030 associated_internal_fn (tree fndecl)
2032 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2033 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2034 switch (DECL_FUNCTION_CODE (fndecl))
2036 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2037 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2038 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2039 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2040 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2041 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2042 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2043 #include "internal-fn.def"
2045 CASE_FLT_FN (BUILT_IN_POW10):
2046 return IFN_EXP10;
2048 CASE_FLT_FN (BUILT_IN_DREM):
2049 return IFN_REMAINDER;
2051 CASE_FLT_FN (BUILT_IN_SCALBN):
2052 CASE_FLT_FN (BUILT_IN_SCALBLN):
2053 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2054 return IFN_LDEXP;
2055 return IFN_LAST;
2057 default:
2058 return IFN_LAST;
2062 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2063 on the current target by a call to an internal function, return the
2064 code of that internal function, otherwise return IFN_LAST. The caller
2065 is responsible for ensuring that any side-effects of the built-in
2066 call are dealt with correctly. E.g. if CALL sets errno, the caller
2067 must decide that the errno result isn't needed or make it available
2068 in some other way. */
2070 internal_fn
2071 replacement_internal_fn (gcall *call)
2073 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2075 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2076 if (ifn != IFN_LAST)
2078 tree_pair types = direct_internal_fn_types (ifn, call);
2079 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2080 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2081 return ifn;
2084 return IFN_LAST;
2087 /* Expand a call to the builtin trinary math functions (fma).
2088 Return NULL_RTX if a normal call should be emitted rather than expanding the
2089 function in-line. EXP is the expression that is a call to the builtin
2090 function; if convenient, the result should be placed in TARGET.
2091 SUBTARGET may be used as the target for computing one of EXP's
2092 operands. */
2094 static rtx
2095 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2097 optab builtin_optab;
2098 rtx op0, op1, op2, result;
2099 rtx_insn *insns;
2100 tree fndecl = get_callee_fndecl (exp);
2101 tree arg0, arg1, arg2;
2102 machine_mode mode;
2104 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2105 return NULL_RTX;
2107 arg0 = CALL_EXPR_ARG (exp, 0);
2108 arg1 = CALL_EXPR_ARG (exp, 1);
2109 arg2 = CALL_EXPR_ARG (exp, 2);
2111 switch (DECL_FUNCTION_CODE (fndecl))
2113 CASE_FLT_FN (BUILT_IN_FMA):
2114 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2115 builtin_optab = fma_optab; break;
2116 default:
2117 gcc_unreachable ();
2120 /* Make a suitable register to place result in. */
2121 mode = TYPE_MODE (TREE_TYPE (exp));
2123 /* Before working hard, check whether the instruction is available. */
2124 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2125 return NULL_RTX;
2127 result = gen_reg_rtx (mode);
2129 /* Always stabilize the argument list. */
2130 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2131 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2132 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2134 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2135 op1 = expand_normal (arg1);
2136 op2 = expand_normal (arg2);
2138 start_sequence ();
2140 /* Compute into RESULT.
2141 Set RESULT to wherever the result comes back. */
2142 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2143 result, 0);
2145 /* If we were unable to expand via the builtin, stop the sequence
2146 (without outputting the insns) and call to the library function
2147 with the stabilized argument list. */
2148 if (result == 0)
2150 end_sequence ();
2151 return expand_call (exp, target, target == const0_rtx);
2154 /* Output the entire sequence. */
2155 insns = get_insns ();
2156 end_sequence ();
2157 emit_insn (insns);
2159 return result;
2162 /* Expand a call to the builtin sin and cos math functions.
2163 Return NULL_RTX if a normal call should be emitted rather than expanding the
2164 function in-line. EXP is the expression that is a call to the builtin
2165 function; if convenient, the result should be placed in TARGET.
2166 SUBTARGET may be used as the target for computing one of EXP's
2167 operands. */
2169 static rtx
2170 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2172 optab builtin_optab;
2173 rtx op0;
2174 rtx_insn *insns;
2175 tree fndecl = get_callee_fndecl (exp);
2176 machine_mode mode;
2177 tree arg;
2179 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2180 return NULL_RTX;
2182 arg = CALL_EXPR_ARG (exp, 0);
2184 switch (DECL_FUNCTION_CODE (fndecl))
2186 CASE_FLT_FN (BUILT_IN_SIN):
2187 CASE_FLT_FN (BUILT_IN_COS):
2188 builtin_optab = sincos_optab; break;
2189 default:
2190 gcc_unreachable ();
2193 /* Make a suitable register to place result in. */
2194 mode = TYPE_MODE (TREE_TYPE (exp));
2196 /* Check if sincos insn is available, otherwise fallback
2197 to sin or cos insn. */
2198 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2199 switch (DECL_FUNCTION_CODE (fndecl))
2201 CASE_FLT_FN (BUILT_IN_SIN):
2202 builtin_optab = sin_optab; break;
2203 CASE_FLT_FN (BUILT_IN_COS):
2204 builtin_optab = cos_optab; break;
2205 default:
2206 gcc_unreachable ();
2209 /* Before working hard, check whether the instruction is available. */
2210 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2212 rtx result = gen_reg_rtx (mode);
2214 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2215 need to expand the argument again. This way, we will not perform
2216 side-effects more the once. */
2217 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2219 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2221 start_sequence ();
2223 /* Compute into RESULT.
2224 Set RESULT to wherever the result comes back. */
2225 if (builtin_optab == sincos_optab)
2227 int ok;
2229 switch (DECL_FUNCTION_CODE (fndecl))
2231 CASE_FLT_FN (BUILT_IN_SIN):
2232 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2233 break;
2234 CASE_FLT_FN (BUILT_IN_COS):
2235 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2236 break;
2237 default:
2238 gcc_unreachable ();
2240 gcc_assert (ok);
2242 else
2243 result = expand_unop (mode, builtin_optab, op0, result, 0);
2245 if (result != 0)
2247 /* Output the entire sequence. */
2248 insns = get_insns ();
2249 end_sequence ();
2250 emit_insn (insns);
2251 return result;
2254 /* If we were unable to expand via the builtin, stop the sequence
2255 (without outputting the insns) and call to the library function
2256 with the stabilized argument list. */
2257 end_sequence ();
2260 return expand_call (exp, target, target == const0_rtx);
2263 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2264 return an RTL instruction code that implements the functionality.
2265 If that isn't possible or available return CODE_FOR_nothing. */
2267 static enum insn_code
2268 interclass_mathfn_icode (tree arg, tree fndecl)
2270 bool errno_set = false;
2271 optab builtin_optab = unknown_optab;
2272 machine_mode mode;
2274 switch (DECL_FUNCTION_CODE (fndecl))
2276 CASE_FLT_FN (BUILT_IN_ILOGB):
2277 errno_set = true; builtin_optab = ilogb_optab; break;
2278 CASE_FLT_FN (BUILT_IN_ISINF):
2279 builtin_optab = isinf_optab; break;
2280 case BUILT_IN_ISNORMAL:
2281 case BUILT_IN_ISFINITE:
2282 CASE_FLT_FN (BUILT_IN_FINITE):
2283 case BUILT_IN_FINITED32:
2284 case BUILT_IN_FINITED64:
2285 case BUILT_IN_FINITED128:
2286 case BUILT_IN_ISINFD32:
2287 case BUILT_IN_ISINFD64:
2288 case BUILT_IN_ISINFD128:
2289 /* These builtins have no optabs (yet). */
2290 break;
2291 default:
2292 gcc_unreachable ();
2295 /* There's no easy way to detect the case we need to set EDOM. */
2296 if (flag_errno_math && errno_set)
2297 return CODE_FOR_nothing;
2299 /* Optab mode depends on the mode of the input argument. */
2300 mode = TYPE_MODE (TREE_TYPE (arg));
2302 if (builtin_optab)
2303 return optab_handler (builtin_optab, mode);
2304 return CODE_FOR_nothing;
2307 /* Expand a call to one of the builtin math functions that operate on
2308 floating point argument and output an integer result (ilogb, isinf,
2309 isnan, etc).
2310 Return 0 if a normal call should be emitted rather than expanding the
2311 function in-line. EXP is the expression that is a call to the builtin
2312 function; if convenient, the result should be placed in TARGET. */
2314 static rtx
2315 expand_builtin_interclass_mathfn (tree exp, rtx target)
2317 enum insn_code icode = CODE_FOR_nothing;
2318 rtx op0;
2319 tree fndecl = get_callee_fndecl (exp);
2320 machine_mode mode;
2321 tree arg;
2323 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2324 return NULL_RTX;
2326 arg = CALL_EXPR_ARG (exp, 0);
2327 icode = interclass_mathfn_icode (arg, fndecl);
2328 mode = TYPE_MODE (TREE_TYPE (arg));
2330 if (icode != CODE_FOR_nothing)
2332 struct expand_operand ops[1];
2333 rtx_insn *last = get_last_insn ();
2334 tree orig_arg = arg;
2336 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2337 need to expand the argument again. This way, we will not perform
2338 side-effects more the once. */
2339 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2341 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2343 if (mode != GET_MODE (op0))
2344 op0 = convert_to_mode (mode, op0, 0);
2346 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2347 if (maybe_legitimize_operands (icode, 0, 1, ops)
2348 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2349 return ops[0].value;
2351 delete_insns_since (last);
2352 CALL_EXPR_ARG (exp, 0) = orig_arg;
2355 return NULL_RTX;
2358 /* Expand a call to the builtin sincos math function.
2359 Return NULL_RTX if a normal call should be emitted rather than expanding the
2360 function in-line. EXP is the expression that is a call to the builtin
2361 function. */
2363 static rtx
2364 expand_builtin_sincos (tree exp)
2366 rtx op0, op1, op2, target1, target2;
2367 machine_mode mode;
2368 tree arg, sinp, cosp;
2369 int result;
2370 location_t loc = EXPR_LOCATION (exp);
2371 tree alias_type, alias_off;
2373 if (!validate_arglist (exp, REAL_TYPE,
2374 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2375 return NULL_RTX;
2377 arg = CALL_EXPR_ARG (exp, 0);
2378 sinp = CALL_EXPR_ARG (exp, 1);
2379 cosp = CALL_EXPR_ARG (exp, 2);
2381 /* Make a suitable register to place result in. */
2382 mode = TYPE_MODE (TREE_TYPE (arg));
2384 /* Check if sincos insn is available, otherwise emit the call. */
2385 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2386 return NULL_RTX;
2388 target1 = gen_reg_rtx (mode);
2389 target2 = gen_reg_rtx (mode);
2391 op0 = expand_normal (arg);
2392 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2393 alias_off = build_int_cst (alias_type, 0);
2394 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2395 sinp, alias_off));
2396 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2397 cosp, alias_off));
2399 /* Compute into target1 and target2.
2400 Set TARGET to wherever the result comes back. */
2401 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2402 gcc_assert (result);
2404 /* Move target1 and target2 to the memory locations indicated
2405 by op1 and op2. */
2406 emit_move_insn (op1, target1);
2407 emit_move_insn (op2, target2);
2409 return const0_rtx;
2412 /* Expand a call to the internal cexpi builtin to the sincos math function.
2413 EXP is the expression that is a call to the builtin function; if convenient,
2414 the result should be placed in TARGET. */
2416 static rtx
2417 expand_builtin_cexpi (tree exp, rtx target)
2419 tree fndecl = get_callee_fndecl (exp);
2420 tree arg, type;
2421 machine_mode mode;
2422 rtx op0, op1, op2;
2423 location_t loc = EXPR_LOCATION (exp);
2425 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2426 return NULL_RTX;
2428 arg = CALL_EXPR_ARG (exp, 0);
2429 type = TREE_TYPE (arg);
2430 mode = TYPE_MODE (TREE_TYPE (arg));
2432 /* Try expanding via a sincos optab, fall back to emitting a libcall
2433 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2434 is only generated from sincos, cexp or if we have either of them. */
2435 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2437 op1 = gen_reg_rtx (mode);
2438 op2 = gen_reg_rtx (mode);
2440 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2442 /* Compute into op1 and op2. */
2443 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2445 else if (targetm.libc_has_function (function_sincos))
2447 tree call, fn = NULL_TREE;
2448 tree top1, top2;
2449 rtx op1a, op2a;
2451 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2452 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2453 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2454 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2455 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2456 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2457 else
2458 gcc_unreachable ();
2460 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2461 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2462 op1a = copy_addr_to_reg (XEXP (op1, 0));
2463 op2a = copy_addr_to_reg (XEXP (op2, 0));
2464 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2465 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2467 /* Make sure not to fold the sincos call again. */
2468 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2469 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2470 call, 3, arg, top1, top2));
2472 else
2474 tree call, fn = NULL_TREE, narg;
2475 tree ctype = build_complex_type (type);
2477 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2478 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2479 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2480 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2481 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2482 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2483 else
2484 gcc_unreachable ();
2486 /* If we don't have a decl for cexp create one. This is the
2487 friendliest fallback if the user calls __builtin_cexpi
2488 without full target C99 function support. */
2489 if (fn == NULL_TREE)
2491 tree fntype;
2492 const char *name = NULL;
2494 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2495 name = "cexpf";
2496 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2497 name = "cexp";
2498 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2499 name = "cexpl";
2501 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2502 fn = build_fn_decl (name, fntype);
2505 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2506 build_real (type, dconst0), arg);
2508 /* Make sure not to fold the cexp call again. */
2509 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2510 return expand_expr (build_call_nary (ctype, call, 1, narg),
2511 target, VOIDmode, EXPAND_NORMAL);
2514 /* Now build the proper return type. */
2515 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2516 make_tree (TREE_TYPE (arg), op2),
2517 make_tree (TREE_TYPE (arg), op1)),
2518 target, VOIDmode, EXPAND_NORMAL);
2521 /* Conveniently construct a function call expression. FNDECL names the
2522 function to be called, N is the number of arguments, and the "..."
2523 parameters are the argument expressions. Unlike build_call_exr
2524 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2526 static tree
2527 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2529 va_list ap;
2530 tree fntype = TREE_TYPE (fndecl);
2531 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2533 va_start (ap, n);
2534 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2535 va_end (ap);
2536 SET_EXPR_LOCATION (fn, loc);
2537 return fn;
2540 /* Expand a call to one of the builtin rounding functions gcc defines
2541 as an extension (lfloor and lceil). As these are gcc extensions we
2542 do not need to worry about setting errno to EDOM.
2543 If expanding via optab fails, lower expression to (int)(floor(x)).
2544 EXP is the expression that is a call to the builtin function;
2545 if convenient, the result should be placed in TARGET. */
2547 static rtx
2548 expand_builtin_int_roundingfn (tree exp, rtx target)
2550 convert_optab builtin_optab;
2551 rtx op0, tmp;
2552 rtx_insn *insns;
2553 tree fndecl = get_callee_fndecl (exp);
2554 enum built_in_function fallback_fn;
2555 tree fallback_fndecl;
2556 machine_mode mode;
2557 tree arg;
2559 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2560 gcc_unreachable ();
2562 arg = CALL_EXPR_ARG (exp, 0);
2564 switch (DECL_FUNCTION_CODE (fndecl))
2566 CASE_FLT_FN (BUILT_IN_ICEIL):
2567 CASE_FLT_FN (BUILT_IN_LCEIL):
2568 CASE_FLT_FN (BUILT_IN_LLCEIL):
2569 builtin_optab = lceil_optab;
2570 fallback_fn = BUILT_IN_CEIL;
2571 break;
2573 CASE_FLT_FN (BUILT_IN_IFLOOR):
2574 CASE_FLT_FN (BUILT_IN_LFLOOR):
2575 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2576 builtin_optab = lfloor_optab;
2577 fallback_fn = BUILT_IN_FLOOR;
2578 break;
2580 default:
2581 gcc_unreachable ();
2584 /* Make a suitable register to place result in. */
2585 mode = TYPE_MODE (TREE_TYPE (exp));
2587 target = gen_reg_rtx (mode);
2589 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2590 need to expand the argument again. This way, we will not perform
2591 side-effects more the once. */
2592 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2594 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2596 start_sequence ();
2598 /* Compute into TARGET. */
2599 if (expand_sfix_optab (target, op0, builtin_optab))
2601 /* Output the entire sequence. */
2602 insns = get_insns ();
2603 end_sequence ();
2604 emit_insn (insns);
2605 return target;
2608 /* If we were unable to expand via the builtin, stop the sequence
2609 (without outputting the insns). */
2610 end_sequence ();
2612 /* Fall back to floating point rounding optab. */
2613 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2615 /* For non-C99 targets we may end up without a fallback fndecl here
2616 if the user called __builtin_lfloor directly. In this case emit
2617 a call to the floor/ceil variants nevertheless. This should result
2618 in the best user experience for not full C99 targets. */
2619 if (fallback_fndecl == NULL_TREE)
2621 tree fntype;
2622 const char *name = NULL;
2624 switch (DECL_FUNCTION_CODE (fndecl))
2626 case BUILT_IN_ICEIL:
2627 case BUILT_IN_LCEIL:
2628 case BUILT_IN_LLCEIL:
2629 name = "ceil";
2630 break;
2631 case BUILT_IN_ICEILF:
2632 case BUILT_IN_LCEILF:
2633 case BUILT_IN_LLCEILF:
2634 name = "ceilf";
2635 break;
2636 case BUILT_IN_ICEILL:
2637 case BUILT_IN_LCEILL:
2638 case BUILT_IN_LLCEILL:
2639 name = "ceill";
2640 break;
2641 case BUILT_IN_IFLOOR:
2642 case BUILT_IN_LFLOOR:
2643 case BUILT_IN_LLFLOOR:
2644 name = "floor";
2645 break;
2646 case BUILT_IN_IFLOORF:
2647 case BUILT_IN_LFLOORF:
2648 case BUILT_IN_LLFLOORF:
2649 name = "floorf";
2650 break;
2651 case BUILT_IN_IFLOORL:
2652 case BUILT_IN_LFLOORL:
2653 case BUILT_IN_LLFLOORL:
2654 name = "floorl";
2655 break;
2656 default:
2657 gcc_unreachable ();
2660 fntype = build_function_type_list (TREE_TYPE (arg),
2661 TREE_TYPE (arg), NULL_TREE);
2662 fallback_fndecl = build_fn_decl (name, fntype);
2665 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2667 tmp = expand_normal (exp);
2668 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2670 /* Truncate the result of floating point optab to integer
2671 via expand_fix (). */
2672 target = gen_reg_rtx (mode);
2673 expand_fix (target, tmp, 0);
2675 return target;
2678 /* Expand a call to one of the builtin math functions doing integer
2679 conversion (lrint).
2680 Return 0 if a normal call should be emitted rather than expanding the
2681 function in-line. EXP is the expression that is a call to the builtin
2682 function; if convenient, the result should be placed in TARGET. */
2684 static rtx
2685 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2687 convert_optab builtin_optab;
2688 rtx op0;
2689 rtx_insn *insns;
2690 tree fndecl = get_callee_fndecl (exp);
2691 tree arg;
2692 machine_mode mode;
2693 enum built_in_function fallback_fn = BUILT_IN_NONE;
2695 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2696 gcc_unreachable ();
2698 arg = CALL_EXPR_ARG (exp, 0);
2700 switch (DECL_FUNCTION_CODE (fndecl))
2702 CASE_FLT_FN (BUILT_IN_IRINT):
2703 fallback_fn = BUILT_IN_LRINT;
2704 gcc_fallthrough ();
2705 CASE_FLT_FN (BUILT_IN_LRINT):
2706 CASE_FLT_FN (BUILT_IN_LLRINT):
2707 builtin_optab = lrint_optab;
2708 break;
2710 CASE_FLT_FN (BUILT_IN_IROUND):
2711 fallback_fn = BUILT_IN_LROUND;
2712 gcc_fallthrough ();
2713 CASE_FLT_FN (BUILT_IN_LROUND):
2714 CASE_FLT_FN (BUILT_IN_LLROUND):
2715 builtin_optab = lround_optab;
2716 break;
2718 default:
2719 gcc_unreachable ();
2722 /* There's no easy way to detect the case we need to set EDOM. */
2723 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2724 return NULL_RTX;
2726 /* Make a suitable register to place result in. */
2727 mode = TYPE_MODE (TREE_TYPE (exp));
2729 /* There's no easy way to detect the case we need to set EDOM. */
2730 if (!flag_errno_math)
2732 rtx result = gen_reg_rtx (mode);
2734 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2735 need to expand the argument again. This way, we will not perform
2736 side-effects more the once. */
2737 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2739 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2741 start_sequence ();
2743 if (expand_sfix_optab (result, op0, builtin_optab))
2745 /* Output the entire sequence. */
2746 insns = get_insns ();
2747 end_sequence ();
2748 emit_insn (insns);
2749 return result;
2752 /* If we were unable to expand via the builtin, stop the sequence
2753 (without outputting the insns) and call to the library function
2754 with the stabilized argument list. */
2755 end_sequence ();
2758 if (fallback_fn != BUILT_IN_NONE)
2760 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2761 targets, (int) round (x) should never be transformed into
2762 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2763 a call to lround in the hope that the target provides at least some
2764 C99 functions. This should result in the best user experience for
2765 not full C99 targets. */
2766 tree fallback_fndecl = mathfn_built_in_1
2767 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2769 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2770 fallback_fndecl, 1, arg);
2772 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2773 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2774 return convert_to_mode (mode, target, 0);
2777 return expand_call (exp, target, target == const0_rtx);
2780 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2781 a normal call should be emitted rather than expanding the function
2782 in-line. EXP is the expression that is a call to the builtin
2783 function; if convenient, the result should be placed in TARGET. */
2785 static rtx
2786 expand_builtin_powi (tree exp, rtx target)
2788 tree arg0, arg1;
2789 rtx op0, op1;
2790 machine_mode mode;
2791 machine_mode mode2;
2793 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2794 return NULL_RTX;
2796 arg0 = CALL_EXPR_ARG (exp, 0);
2797 arg1 = CALL_EXPR_ARG (exp, 1);
2798 mode = TYPE_MODE (TREE_TYPE (exp));
2800 /* Emit a libcall to libgcc. */
2802 /* Mode of the 2nd argument must match that of an int. */
2803 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2805 if (target == NULL_RTX)
2806 target = gen_reg_rtx (mode);
2808 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2809 if (GET_MODE (op0) != mode)
2810 op0 = convert_to_mode (mode, op0, 0);
2811 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2812 if (GET_MODE (op1) != mode2)
2813 op1 = convert_to_mode (mode2, op1, 0);
2815 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2816 target, LCT_CONST, mode,
2817 op0, mode, op1, mode2);
2819 return target;
2822 /* Expand expression EXP which is a call to the strlen builtin. Return
2823 NULL_RTX if we failed the caller should emit a normal call, otherwise
2824 try to get the result in TARGET, if convenient. */
2826 static rtx
2827 expand_builtin_strlen (tree exp, rtx target,
2828 machine_mode target_mode)
2830 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2831 return NULL_RTX;
2833 struct expand_operand ops[4];
2834 rtx pat;
2835 tree len;
2836 tree src = CALL_EXPR_ARG (exp, 0);
2837 rtx src_reg;
2838 rtx_insn *before_strlen;
2839 machine_mode insn_mode;
2840 enum insn_code icode = CODE_FOR_nothing;
2841 unsigned int align;
2843 /* If the length can be computed at compile-time, return it. */
2844 len = c_strlen (src, 0);
2845 if (len)
2846 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2848 /* If the length can be computed at compile-time and is constant
2849 integer, but there are side-effects in src, evaluate
2850 src for side-effects, then return len.
2851 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2852 can be optimized into: i++; x = 3; */
2853 len = c_strlen (src, 1);
2854 if (len && TREE_CODE (len) == INTEGER_CST)
2856 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2857 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2860 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2862 /* If SRC is not a pointer type, don't do this operation inline. */
2863 if (align == 0)
2864 return NULL_RTX;
2866 /* Bail out if we can't compute strlen in the right mode. */
2867 FOR_EACH_MODE_FROM (insn_mode, target_mode)
2869 icode = optab_handler (strlen_optab, insn_mode);
2870 if (icode != CODE_FOR_nothing)
2871 break;
2873 if (insn_mode == VOIDmode)
2874 return NULL_RTX;
2876 /* Make a place to hold the source address. We will not expand
2877 the actual source until we are sure that the expansion will
2878 not fail -- there are trees that cannot be expanded twice. */
2879 src_reg = gen_reg_rtx (Pmode);
2881 /* Mark the beginning of the strlen sequence so we can emit the
2882 source operand later. */
2883 before_strlen = get_last_insn ();
2885 create_output_operand (&ops[0], target, insn_mode);
2886 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2887 create_integer_operand (&ops[2], 0);
2888 create_integer_operand (&ops[3], align);
2889 if (!maybe_expand_insn (icode, 4, ops))
2890 return NULL_RTX;
2892 /* Check to see if the argument was declared attribute nonstring
2893 and if so, issue a warning since at this point it's not known
2894 to be nul-terminated. */
2895 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
2897 /* Now that we are assured of success, expand the source. */
2898 start_sequence ();
2899 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2900 if (pat != src_reg)
2902 #ifdef POINTERS_EXTEND_UNSIGNED
2903 if (GET_MODE (pat) != Pmode)
2904 pat = convert_to_mode (Pmode, pat,
2905 POINTERS_EXTEND_UNSIGNED);
2906 #endif
2907 emit_move_insn (src_reg, pat);
2909 pat = get_insns ();
2910 end_sequence ();
2912 if (before_strlen)
2913 emit_insn_after (pat, before_strlen);
2914 else
2915 emit_insn_before (pat, get_insns ());
2917 /* Return the value in the proper mode for this function. */
2918 if (GET_MODE (ops[0].value) == target_mode)
2919 target = ops[0].value;
2920 else if (target != 0)
2921 convert_move (target, ops[0].value, 0);
2922 else
2923 target = convert_to_mode (target_mode, ops[0].value, 0);
2925 return target;
2928 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2929 bytes from constant string DATA + OFFSET and return it as target
2930 constant. */
2932 static rtx
2933 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2934 scalar_int_mode mode)
2936 const char *str = (const char *) data;
2938 gcc_assert (offset >= 0
2939 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2940 <= strlen (str) + 1));
2942 return c_readstr (str + offset, mode);
2945 /* LEN specify length of the block of memcpy/memset operation.
2946 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2947 In some cases we can make very likely guess on max size, then we
2948 set it into PROBABLE_MAX_SIZE. */
2950 static void
2951 determine_block_size (tree len, rtx len_rtx,
2952 unsigned HOST_WIDE_INT *min_size,
2953 unsigned HOST_WIDE_INT *max_size,
2954 unsigned HOST_WIDE_INT *probable_max_size)
2956 if (CONST_INT_P (len_rtx))
2958 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
2959 return;
2961 else
2963 wide_int min, max;
2964 enum value_range_type range_type = VR_UNDEFINED;
2966 /* Determine bounds from the type. */
2967 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2968 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2969 else
2970 *min_size = 0;
2971 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2972 *probable_max_size = *max_size
2973 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
2974 else
2975 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2977 if (TREE_CODE (len) == SSA_NAME)
2978 range_type = get_range_info (len, &min, &max);
2979 if (range_type == VR_RANGE)
2981 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
2982 *min_size = min.to_uhwi ();
2983 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
2984 *probable_max_size = *max_size = max.to_uhwi ();
2986 else if (range_type == VR_ANTI_RANGE)
2988 /* Anti range 0...N lets us to determine minimal size to N+1. */
2989 if (min == 0)
2991 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2992 *min_size = max.to_uhwi () + 1;
2994 /* Code like
2996 int n;
2997 if (n < 100)
2998 memcpy (a, b, n)
3000 Produce anti range allowing negative values of N. We still
3001 can use the information and make a guess that N is not negative.
3003 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3004 *probable_max_size = min.to_uhwi () - 1;
3007 gcc_checking_assert (*max_size <=
3008 (unsigned HOST_WIDE_INT)
3009 GET_MODE_MASK (GET_MODE (len_rtx)));
3012 /* Try to verify that the sizes and lengths of the arguments to a string
3013 manipulation function given by EXP are within valid bounds and that
3014 the operation does not lead to buffer overflow or read past the end.
3015 Arguments other than EXP may be null. When non-null, the arguments
3016 have the following meaning:
3017 DST is the destination of a copy call or NULL otherwise.
3018 SRC is the source of a copy call or NULL otherwise.
3019 DSTWRITE is the number of bytes written into the destination obtained
3020 from the user-supplied size argument to the function (such as in
3021 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3022 MAXREAD is the user-supplied bound on the length of the source sequence
3023 (such as in strncat(d, s, N). It specifies the upper limit on the number
3024 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3025 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3026 expression EXP is a string function call (as opposed to a memory call
3027 like memcpy). As an exception, SRCSTR can also be an integer denoting
3028 the precomputed size of the source string or object (for functions like
3029 memcpy).
3030 DSTSIZE is the size of the destination object specified by the last
3031 argument to the _chk builtins, typically resulting from the expansion
3032 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3033 DSTSIZE).
3035 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3036 SIZE_MAX.
3038 If the call is successfully verified as safe return true, otherwise
3039 return false. */
3041 static bool
3042 check_access (tree exp, tree, tree, tree dstwrite,
3043 tree maxread, tree srcstr, tree dstsize)
3045 int opt = OPT_Wstringop_overflow_;
3047 /* The size of the largest object is half the address space, or
3048 PTRDIFF_MAX. (This is way too permissive.) */
3049 tree maxobjsize = max_object_size ();
3051 /* Either the length of the source string for string functions or
3052 the size of the source object for raw memory functions. */
3053 tree slen = NULL_TREE;
3055 tree range[2] = { NULL_TREE, NULL_TREE };
3057 /* Set to true when the exact number of bytes written by a string
3058 function like strcpy is not known and the only thing that is
3059 known is that it must be at least one (for the terminating nul). */
3060 bool at_least_one = false;
3061 if (srcstr)
3063 /* SRCSTR is normally a pointer to string but as a special case
3064 it can be an integer denoting the length of a string. */
3065 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
3067 /* Try to determine the range of lengths the source string
3068 refers to. If it can be determined and is less than
3069 the upper bound given by MAXREAD add one to it for
3070 the terminating nul. Otherwise, set it to one for
3071 the same reason, or to MAXREAD as appropriate. */
3072 get_range_strlen (srcstr, range);
3073 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
3075 if (maxread && tree_int_cst_le (maxread, range[0]))
3076 range[0] = range[1] = maxread;
3077 else
3078 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3079 range[0], size_one_node);
3081 if (maxread && tree_int_cst_le (maxread, range[1]))
3082 range[1] = maxread;
3083 else if (!integer_all_onesp (range[1]))
3084 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3085 range[1], size_one_node);
3087 slen = range[0];
3089 else
3091 at_least_one = true;
3092 slen = size_one_node;
3095 else
3096 slen = srcstr;
3099 if (!dstwrite && !maxread)
3101 /* When the only available piece of data is the object size
3102 there is nothing to do. */
3103 if (!slen)
3104 return true;
3106 /* Otherwise, when the length of the source sequence is known
3107 (as with strlen), set DSTWRITE to it. */
3108 if (!range[0])
3109 dstwrite = slen;
3112 if (!dstsize)
3113 dstsize = maxobjsize;
3115 if (dstwrite)
3116 get_size_range (dstwrite, range);
3118 tree func = get_callee_fndecl (exp);
3120 /* First check the number of bytes to be written against the maximum
3121 object size. */
3122 if (range[0] && tree_int_cst_lt (maxobjsize, range[0]))
3124 location_t loc = tree_nonartificial_location (exp);
3125 loc = expansion_point_location_if_in_system_header (loc);
3127 if (range[0] == range[1])
3128 warning_at (loc, opt,
3129 "%K%qD specified size %E "
3130 "exceeds maximum object size %E",
3131 exp, func, range[0], maxobjsize);
3132 else
3133 warning_at (loc, opt,
3134 "%K%qD specified size between %E and %E "
3135 "exceeds maximum object size %E",
3136 exp, func,
3137 range[0], range[1], maxobjsize);
3138 return false;
3141 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3142 constant, and in range of unsigned HOST_WIDE_INT. */
3143 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3145 /* Next check the number of bytes to be written against the destination
3146 object size. */
3147 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
3149 if (range[0]
3150 && ((tree_fits_uhwi_p (dstsize)
3151 && tree_int_cst_lt (dstsize, range[0]))
3152 || (tree_fits_uhwi_p (dstwrite)
3153 && tree_int_cst_lt (dstwrite, range[0]))))
3155 location_t loc = tree_nonartificial_location (exp);
3156 loc = expansion_point_location_if_in_system_header (loc);
3158 if (dstwrite == slen && at_least_one)
3160 /* This is a call to strcpy with a destination of 0 size
3161 and a source of unknown length. The call will write
3162 at least one byte past the end of the destination. */
3163 warning_at (loc, opt,
3164 "%K%qD writing %E or more bytes into a region "
3165 "of size %E overflows the destination",
3166 exp, func, range[0], dstsize);
3168 else if (tree_int_cst_equal (range[0], range[1]))
3169 warning_at (loc, opt,
3170 (integer_onep (range[0])
3171 ? G_("%K%qD writing %E byte into a region "
3172 "of size %E overflows the destination")
3173 : G_("%K%qD writing %E bytes into a region "
3174 "of size %E overflows the destination")),
3175 exp, func, range[0], dstsize);
3176 else if (tree_int_cst_sign_bit (range[1]))
3178 /* Avoid printing the upper bound if it's invalid. */
3179 warning_at (loc, opt,
3180 "%K%qD writing %E or more bytes into a region "
3181 "of size %E overflows the destination",
3182 exp, func, range[0], dstsize);
3184 else
3185 warning_at (loc, opt,
3186 "%K%qD writing between %E and %E bytes into "
3187 "a region of size %E overflows the destination",
3188 exp, func, range[0], range[1],
3189 dstsize);
3191 /* Return error when an overflow has been detected. */
3192 return false;
3196 /* Check the maximum length of the source sequence against the size
3197 of the destination object if known, or against the maximum size
3198 of an object. */
3199 if (maxread)
3201 get_size_range (maxread, range);
3203 /* Use the lower end for MAXREAD from now on. */
3204 if (range[0])
3205 maxread = range[0];
3207 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
3209 location_t loc = tree_nonartificial_location (exp);
3210 loc = expansion_point_location_if_in_system_header (loc);
3212 if (tree_int_cst_lt (maxobjsize, range[0]))
3214 /* Warn about crazy big sizes first since that's more
3215 likely to be meaningful than saying that the bound
3216 is greater than the object size if both are big. */
3217 if (range[0] == range[1])
3218 warning_at (loc, opt,
3219 "%K%qD specified bound %E "
3220 "exceeds maximum object size %E",
3221 exp, func,
3222 range[0], maxobjsize);
3223 else
3224 warning_at (loc, opt,
3225 "%K%qD specified bound between %E and %E "
3226 "exceeds maximum object size %E",
3227 exp, func,
3228 range[0], range[1], maxobjsize);
3230 return false;
3233 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
3235 if (tree_int_cst_equal (range[0], range[1]))
3236 warning_at (loc, opt,
3237 "%K%qD specified bound %E "
3238 "exceeds destination size %E",
3239 exp, func,
3240 range[0], dstsize);
3241 else
3242 warning_at (loc, opt,
3243 "%K%qD specified bound between %E and %E "
3244 "exceeds destination size %E",
3245 exp, func,
3246 range[0], range[1], dstsize);
3247 return false;
3252 /* Check for reading past the end of SRC. */
3253 if (slen
3254 && slen == srcstr
3255 && dstwrite && range[0]
3256 && tree_int_cst_lt (slen, range[0]))
3258 location_t loc = tree_nonartificial_location (exp);
3260 if (tree_int_cst_equal (range[0], range[1]))
3261 warning_at (loc, opt,
3262 (tree_int_cst_equal (range[0], integer_one_node)
3263 ? G_("%K%qD reading %E byte from a region of size %E")
3264 : G_("%K%qD reading %E bytes from a region of size %E")),
3265 exp, func, range[0], slen);
3266 else if (tree_int_cst_sign_bit (range[1]))
3268 /* Avoid printing the upper bound if it's invalid. */
3269 warning_at (loc, opt,
3270 "%K%qD reading %E or more bytes from a region "
3271 "of size %E",
3272 exp, func, range[0], slen);
3274 else
3275 warning_at (loc, opt,
3276 "%K%qD reading between %E and %E bytes from a region "
3277 "of size %E",
3278 exp, func, range[0], range[1], slen);
3279 return false;
3282 return true;
3285 /* Helper to compute the size of the object referenced by the DEST
3286 expression which must have pointer type, using Object Size type
3287 OSTYPE (only the least significant 2 bits are used). Return
3288 an estimate of the size of the object if successful or NULL when
3289 the size cannot be determined. When the referenced object involves
3290 a non-constant offset in some range the returned value represents
3291 the largest size given the smallest non-negative offset in the
3292 range. The function is intended for diagnostics and should not
3293 be used to influence code generation or optimization. */
3295 tree
3296 compute_objsize (tree dest, int ostype)
3298 unsigned HOST_WIDE_INT size;
3300 /* Only the two least significant bits are meaningful. */
3301 ostype &= 3;
3303 if (compute_builtin_object_size (dest, ostype, &size))
3304 return build_int_cst (sizetype, size);
3306 if (TREE_CODE (dest) == SSA_NAME)
3308 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3309 if (!is_gimple_assign (stmt))
3310 return NULL_TREE;
3312 dest = gimple_assign_rhs1 (stmt);
3314 tree_code code = gimple_assign_rhs_code (stmt);
3315 if (code == POINTER_PLUS_EXPR)
3317 /* compute_builtin_object_size fails for addresses with
3318 non-constant offsets. Try to determine the range of
3319 such an offset here and use it to adjus the constant
3320 size. */
3321 tree off = gimple_assign_rhs2 (stmt);
3322 if (TREE_CODE (off) == SSA_NAME
3323 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3325 wide_int min, max;
3326 enum value_range_type rng = get_range_info (off, &min, &max);
3328 if (rng == VR_RANGE)
3330 if (tree size = compute_objsize (dest, ostype))
3332 wide_int wisiz = wi::to_wide (size);
3334 /* Ignore negative offsets for now. For others,
3335 use the lower bound as the most optimistic
3336 estimate of the (remaining)size. */
3337 if (wi::sign_mask (min))
3339 else if (wi::ltu_p (min, wisiz))
3340 return wide_int_to_tree (TREE_TYPE (size),
3341 wi::sub (wisiz, min));
3342 else
3343 return size_zero_node;
3348 else if (code != ADDR_EXPR)
3349 return NULL_TREE;
3352 /* Unless computing the largest size (for memcpy and other raw memory
3353 functions), try to determine the size of the object from its type. */
3354 if (!ostype)
3355 return NULL_TREE;
3357 if (TREE_CODE (dest) != ADDR_EXPR)
3358 return NULL_TREE;
3360 tree type = TREE_TYPE (dest);
3361 if (TREE_CODE (type) == POINTER_TYPE)
3362 type = TREE_TYPE (type);
3364 type = TYPE_MAIN_VARIANT (type);
3366 if (TREE_CODE (type) == ARRAY_TYPE
3367 && !array_at_struct_end_p (dest))
3369 /* Return the constant size unless it's zero (that's a zero-length
3370 array likely at the end of a struct). */
3371 tree size = TYPE_SIZE_UNIT (type);
3372 if (size && TREE_CODE (size) == INTEGER_CST
3373 && !integer_zerop (size))
3374 return size;
3377 return NULL_TREE;
3380 /* Helper to determine and check the sizes of the source and the destination
3381 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3382 call expression, DEST is the destination argument, SRC is the source
3383 argument or null, and LEN is the number of bytes. Use Object Size type-0
3384 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3385 (no overflow or invalid sizes), false otherwise. */
3387 static bool
3388 check_memop_access (tree exp, tree dest, tree src, tree size)
3390 /* For functions like memset and memcpy that operate on raw memory
3391 try to determine the size of the largest source and destination
3392 object using type-0 Object Size regardless of the object size
3393 type specified by the option. */
3394 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3395 tree dstsize = compute_objsize (dest, 0);
3397 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
3398 srcsize, dstsize);
3401 /* Validate memchr arguments without performing any expansion.
3402 Return NULL_RTX. */
3404 static rtx
3405 expand_builtin_memchr (tree exp, rtx)
3407 if (!validate_arglist (exp,
3408 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3409 return NULL_RTX;
3411 tree arg1 = CALL_EXPR_ARG (exp, 0);
3412 tree len = CALL_EXPR_ARG (exp, 2);
3414 /* Diagnose calls where the specified length exceeds the size
3415 of the object. */
3416 if (warn_stringop_overflow)
3418 tree size = compute_objsize (arg1, 0);
3419 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
3420 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
3423 return NULL_RTX;
3426 /* Expand a call EXP to the memcpy builtin.
3427 Return NULL_RTX if we failed, the caller should emit a normal call,
3428 otherwise try to get the result in TARGET, if convenient (and in
3429 mode MODE if that's convenient). */
3431 static rtx
3432 expand_builtin_memcpy (tree exp, rtx target)
3434 if (!validate_arglist (exp,
3435 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3436 return NULL_RTX;
3438 tree dest = CALL_EXPR_ARG (exp, 0);
3439 tree src = CALL_EXPR_ARG (exp, 1);
3440 tree len = CALL_EXPR_ARG (exp, 2);
3442 check_memop_access (exp, dest, src, len);
3444 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3445 /*endp=*/ 0);
3448 /* Check a call EXP to the memmove built-in for validity.
3449 Return NULL_RTX on both success and failure. */
3451 static rtx
3452 expand_builtin_memmove (tree exp, rtx)
3454 if (!validate_arglist (exp,
3455 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3456 return NULL_RTX;
3458 tree dest = CALL_EXPR_ARG (exp, 0);
3459 tree src = CALL_EXPR_ARG (exp, 1);
3460 tree len = CALL_EXPR_ARG (exp, 2);
3462 check_memop_access (exp, dest, src, len);
3464 return NULL_RTX;
3467 /* Expand an instrumented call EXP to the memcpy builtin.
3468 Return NULL_RTX if we failed, the caller should emit a normal call,
3469 otherwise try to get the result in TARGET, if convenient (and in
3470 mode MODE if that's convenient). */
3472 static rtx
3473 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3475 if (!validate_arglist (exp,
3476 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3477 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3478 INTEGER_TYPE, VOID_TYPE))
3479 return NULL_RTX;
3480 else
3482 tree dest = CALL_EXPR_ARG (exp, 0);
3483 tree src = CALL_EXPR_ARG (exp, 2);
3484 tree len = CALL_EXPR_ARG (exp, 4);
3485 rtx res = expand_builtin_memory_copy_args (dest, src, len, target, exp,
3486 /*end_p=*/ 0);
3488 /* Return src bounds with the result. */
3489 if (res)
3491 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3492 expand_normal (CALL_EXPR_ARG (exp, 1)));
3493 res = chkp_join_splitted_slot (res, bnd);
3495 return res;
3499 /* Expand a call EXP to the mempcpy builtin.
3500 Return NULL_RTX if we failed; the caller should emit a normal call,
3501 otherwise try to get the result in TARGET, if convenient (and in
3502 mode MODE if that's convenient). If ENDP is 0 return the
3503 destination pointer, if ENDP is 1 return the end pointer ala
3504 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3505 stpcpy. */
3507 static rtx
3508 expand_builtin_mempcpy (tree exp, rtx target)
3510 if (!validate_arglist (exp,
3511 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3512 return NULL_RTX;
3514 tree dest = CALL_EXPR_ARG (exp, 0);
3515 tree src = CALL_EXPR_ARG (exp, 1);
3516 tree len = CALL_EXPR_ARG (exp, 2);
3518 /* Policy does not generally allow using compute_objsize (which
3519 is used internally by check_memop_size) to change code generation
3520 or drive optimization decisions.
3522 In this instance it is safe because the code we generate has
3523 the same semantics regardless of the return value of
3524 check_memop_sizes. Exactly the same amount of data is copied
3525 and the return value is exactly the same in both cases.
3527 Furthermore, check_memop_size always uses mode 0 for the call to
3528 compute_objsize, so the imprecise nature of compute_objsize is
3529 avoided. */
3531 /* Avoid expanding mempcpy into memcpy when the call is determined
3532 to overflow the buffer. This also prevents the same overflow
3533 from being diagnosed again when expanding memcpy. */
3534 if (!check_memop_access (exp, dest, src, len))
3535 return NULL_RTX;
3537 return expand_builtin_mempcpy_args (dest, src, len,
3538 target, exp, /*endp=*/ 1);
3541 /* Expand an instrumented call EXP to the mempcpy builtin.
3542 Return NULL_RTX if we failed, the caller should emit a normal call,
3543 otherwise try to get the result in TARGET, if convenient (and in
3544 mode MODE if that's convenient). */
3546 static rtx
3547 expand_builtin_mempcpy_with_bounds (tree exp, rtx target)
3549 if (!validate_arglist (exp,
3550 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3551 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3552 INTEGER_TYPE, VOID_TYPE))
3553 return NULL_RTX;
3554 else
3556 tree dest = CALL_EXPR_ARG (exp, 0);
3557 tree src = CALL_EXPR_ARG (exp, 2);
3558 tree len = CALL_EXPR_ARG (exp, 4);
3559 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3560 exp, 1);
3562 /* Return src bounds with the result. */
3563 if (res)
3565 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3566 expand_normal (CALL_EXPR_ARG (exp, 1)));
3567 res = chkp_join_splitted_slot (res, bnd);
3569 return res;
3573 /* Helper function to do the actual work for expand of memory copy family
3574 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3575 of memory from SRC to DEST and assign to TARGET if convenient.
3576 If ENDP is 0 return the
3577 destination pointer, if ENDP is 1 return the end pointer ala
3578 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3579 stpcpy. */
3581 static rtx
3582 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3583 rtx target, tree exp, int endp)
3585 const char *src_str;
3586 unsigned int src_align = get_pointer_alignment (src);
3587 unsigned int dest_align = get_pointer_alignment (dest);
3588 rtx dest_mem, src_mem, dest_addr, len_rtx;
3589 HOST_WIDE_INT expected_size = -1;
3590 unsigned int expected_align = 0;
3591 unsigned HOST_WIDE_INT min_size;
3592 unsigned HOST_WIDE_INT max_size;
3593 unsigned HOST_WIDE_INT probable_max_size;
3595 /* If DEST is not a pointer type, call the normal function. */
3596 if (dest_align == 0)
3597 return NULL_RTX;
3599 /* If either SRC is not a pointer type, don't do this
3600 operation in-line. */
3601 if (src_align == 0)
3602 return NULL_RTX;
3604 if (currently_expanding_gimple_stmt)
3605 stringop_block_profile (currently_expanding_gimple_stmt,
3606 &expected_align, &expected_size);
3608 if (expected_align < dest_align)
3609 expected_align = dest_align;
3610 dest_mem = get_memory_rtx (dest, len);
3611 set_mem_align (dest_mem, dest_align);
3612 len_rtx = expand_normal (len);
3613 determine_block_size (len, len_rtx, &min_size, &max_size,
3614 &probable_max_size);
3615 src_str = c_getstr (src);
3617 /* If SRC is a string constant and block move would be done
3618 by pieces, we can avoid loading the string from memory
3619 and only stored the computed constants. */
3620 if (src_str
3621 && CONST_INT_P (len_rtx)
3622 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3623 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3624 CONST_CAST (char *, src_str),
3625 dest_align, false))
3627 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3628 builtin_memcpy_read_str,
3629 CONST_CAST (char *, src_str),
3630 dest_align, false, endp);
3631 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3632 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3633 return dest_mem;
3636 src_mem = get_memory_rtx (src, len);
3637 set_mem_align (src_mem, src_align);
3639 /* Copy word part most expediently. */
3640 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3641 CALL_EXPR_TAILCALL (exp)
3642 && (endp == 0 || target == const0_rtx)
3643 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3644 expected_align, expected_size,
3645 min_size, max_size, probable_max_size);
3647 if (dest_addr == 0)
3649 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3650 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3653 if (endp && target != const0_rtx)
3655 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3656 /* stpcpy pointer to last byte. */
3657 if (endp == 2)
3658 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3661 return dest_addr;
3664 static rtx
3665 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3666 rtx target, tree orig_exp, int endp)
3668 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3669 endp);
3672 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3673 we failed, the caller should emit a normal call, otherwise try to
3674 get the result in TARGET, if convenient. If ENDP is 0 return the
3675 destination pointer, if ENDP is 1 return the end pointer ala
3676 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3677 stpcpy. */
3679 static rtx
3680 expand_movstr (tree dest, tree src, rtx target, int endp)
3682 struct expand_operand ops[3];
3683 rtx dest_mem;
3684 rtx src_mem;
3686 if (!targetm.have_movstr ())
3687 return NULL_RTX;
3689 dest_mem = get_memory_rtx (dest, NULL);
3690 src_mem = get_memory_rtx (src, NULL);
3691 if (!endp)
3693 target = force_reg (Pmode, XEXP (dest_mem, 0));
3694 dest_mem = replace_equiv_address (dest_mem, target);
3697 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3698 create_fixed_operand (&ops[1], dest_mem);
3699 create_fixed_operand (&ops[2], src_mem);
3700 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3701 return NULL_RTX;
3703 if (endp && target != const0_rtx)
3705 target = ops[0].value;
3706 /* movstr is supposed to set end to the address of the NUL
3707 terminator. If the caller requested a mempcpy-like return value,
3708 adjust it. */
3709 if (endp == 1)
3711 rtx tem = plus_constant (GET_MODE (target),
3712 gen_lowpart (GET_MODE (target), target), 1);
3713 emit_move_insn (target, force_operand (tem, NULL_RTX));
3716 return target;
3719 /* Do some very basic size validation of a call to the strcpy builtin
3720 given by EXP. Return NULL_RTX to have the built-in expand to a call
3721 to the library function. */
3723 static rtx
3724 expand_builtin_strcat (tree exp, rtx)
3726 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3727 || !warn_stringop_overflow)
3728 return NULL_RTX;
3730 tree dest = CALL_EXPR_ARG (exp, 0);
3731 tree src = CALL_EXPR_ARG (exp, 1);
3733 /* There is no way here to determine the length of the string in
3734 the destination to which the SRC string is being appended so
3735 just diagnose cases when the souce string is longer than
3736 the destination object. */
3738 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3740 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
3741 destsize);
3743 return NULL_RTX;
3746 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3747 NULL_RTX if we failed the caller should emit a normal call, otherwise
3748 try to get the result in TARGET, if convenient (and in mode MODE if that's
3749 convenient). */
3751 static rtx
3752 expand_builtin_strcpy (tree exp, rtx target)
3754 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3755 return NULL_RTX;
3757 tree dest = CALL_EXPR_ARG (exp, 0);
3758 tree src = CALL_EXPR_ARG (exp, 1);
3760 if (warn_stringop_overflow)
3762 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3763 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3764 src, destsize);
3767 return expand_builtin_strcpy_args (dest, src, target);
3770 /* Helper function to do the actual work for expand_builtin_strcpy. The
3771 arguments to the builtin_strcpy call DEST and SRC are broken out
3772 so that this can also be called without constructing an actual CALL_EXPR.
3773 The other arguments and return value are the same as for
3774 expand_builtin_strcpy. */
3776 static rtx
3777 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3779 return expand_movstr (dest, src, target, /*endp=*/0);
3782 /* Expand a call EXP to the stpcpy builtin.
3783 Return NULL_RTX if we failed the caller should emit a normal call,
3784 otherwise try to get the result in TARGET, if convenient (and in
3785 mode MODE if that's convenient). */
3787 static rtx
3788 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3790 tree dst, src;
3791 location_t loc = EXPR_LOCATION (exp);
3793 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3794 return NULL_RTX;
3796 dst = CALL_EXPR_ARG (exp, 0);
3797 src = CALL_EXPR_ARG (exp, 1);
3799 if (warn_stringop_overflow)
3801 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
3802 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3803 src, destsize);
3806 /* If return value is ignored, transform stpcpy into strcpy. */
3807 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3809 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3810 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3811 return expand_expr (result, target, mode, EXPAND_NORMAL);
3813 else
3815 tree len, lenp1;
3816 rtx ret;
3818 /* Ensure we get an actual string whose length can be evaluated at
3819 compile-time, not an expression containing a string. This is
3820 because the latter will potentially produce pessimized code
3821 when used to produce the return value. */
3822 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3823 return expand_movstr (dst, src, target, /*endp=*/2);
3825 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3826 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3827 target, exp, /*endp=*/2);
3829 if (ret)
3830 return ret;
3832 if (TREE_CODE (len) == INTEGER_CST)
3834 rtx len_rtx = expand_normal (len);
3836 if (CONST_INT_P (len_rtx))
3838 ret = expand_builtin_strcpy_args (dst, src, target);
3840 if (ret)
3842 if (! target)
3844 if (mode != VOIDmode)
3845 target = gen_reg_rtx (mode);
3846 else
3847 target = gen_reg_rtx (GET_MODE (ret));
3849 if (GET_MODE (target) != GET_MODE (ret))
3850 ret = gen_lowpart (GET_MODE (target), ret);
3852 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3853 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3854 gcc_assert (ret);
3856 return target;
3861 return expand_movstr (dst, src, target, /*endp=*/2);
3865 /* Check a call EXP to the stpncpy built-in for validity.
3866 Return NULL_RTX on both success and failure. */
3868 static rtx
3869 expand_builtin_stpncpy (tree exp, rtx)
3871 if (!validate_arglist (exp,
3872 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3873 || !warn_stringop_overflow)
3874 return NULL_RTX;
3876 /* The source and destination of the call. */
3877 tree dest = CALL_EXPR_ARG (exp, 0);
3878 tree src = CALL_EXPR_ARG (exp, 1);
3880 /* The exact number of bytes to write (not the maximum). */
3881 tree len = CALL_EXPR_ARG (exp, 2);
3883 /* The size of the destination object. */
3884 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3886 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
3888 return NULL_RTX;
3891 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3892 bytes from constant string DATA + OFFSET and return it as target
3893 constant. */
3896 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3897 scalar_int_mode mode)
3899 const char *str = (const char *) data;
3901 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3902 return const0_rtx;
3904 return c_readstr (str + offset, mode);
3907 /* Helper to check the sizes of sequences and the destination of calls
3908 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3909 success (no overflow or invalid sizes), false otherwise. */
3911 static bool
3912 check_strncat_sizes (tree exp, tree objsize)
3914 tree dest = CALL_EXPR_ARG (exp, 0);
3915 tree src = CALL_EXPR_ARG (exp, 1);
3916 tree maxread = CALL_EXPR_ARG (exp, 2);
3918 /* Try to determine the range of lengths that the source expression
3919 refers to. */
3920 tree lenrange[2];
3921 get_range_strlen (src, lenrange);
3923 /* Try to verify that the destination is big enough for the shortest
3924 string. */
3926 if (!objsize && warn_stringop_overflow)
3928 /* If it hasn't been provided by __strncat_chk, try to determine
3929 the size of the destination object into which the source is
3930 being copied. */
3931 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
3934 /* Add one for the terminating nul. */
3935 tree srclen = (lenrange[0]
3936 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3937 size_one_node)
3938 : NULL_TREE);
3940 /* The strncat function copies at most MAXREAD bytes and always appends
3941 the terminating nul so the specified upper bound should never be equal
3942 to (or greater than) the size of the destination. */
3943 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
3944 && tree_int_cst_equal (objsize, maxread))
3946 location_t loc = tree_nonartificial_location (exp);
3947 loc = expansion_point_location_if_in_system_header (loc);
3949 warning_at (loc, OPT_Wstringop_overflow_,
3950 "%K%qD specified bound %E equals destination size",
3951 exp, get_callee_fndecl (exp), maxread);
3953 return false;
3956 if (!srclen
3957 || (maxread && tree_fits_uhwi_p (maxread)
3958 && tree_fits_uhwi_p (srclen)
3959 && tree_int_cst_lt (maxread, srclen)))
3960 srclen = maxread;
3962 /* The number of bytes to write is LEN but check_access will also
3963 check SRCLEN if LEN's value isn't known. */
3964 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
3965 objsize);
3968 /* Similar to expand_builtin_strcat, do some very basic size validation
3969 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
3970 the built-in expand to a call to the library function. */
3972 static rtx
3973 expand_builtin_strncat (tree exp, rtx)
3975 if (!validate_arglist (exp,
3976 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3977 || !warn_stringop_overflow)
3978 return NULL_RTX;
3980 tree dest = CALL_EXPR_ARG (exp, 0);
3981 tree src = CALL_EXPR_ARG (exp, 1);
3982 /* The upper bound on the number of bytes to write. */
3983 tree maxread = CALL_EXPR_ARG (exp, 2);
3984 /* The length of the source sequence. */
3985 tree slen = c_strlen (src, 1);
3987 /* Try to determine the range of lengths that the source expression
3988 refers to. */
3989 tree lenrange[2];
3990 if (slen)
3991 lenrange[0] = lenrange[1] = slen;
3992 else
3993 get_range_strlen (src, lenrange);
3995 /* Try to verify that the destination is big enough for the shortest
3996 string. First try to determine the size of the destination object
3997 into which the source is being copied. */
3998 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4000 /* Add one for the terminating nul. */
4001 tree srclen = (lenrange[0]
4002 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4003 size_one_node)
4004 : NULL_TREE);
4006 /* The strncat function copies at most MAXREAD bytes and always appends
4007 the terminating nul so the specified upper bound should never be equal
4008 to (or greater than) the size of the destination. */
4009 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4010 && tree_int_cst_equal (destsize, maxread))
4012 location_t loc = tree_nonartificial_location (exp);
4013 loc = expansion_point_location_if_in_system_header (loc);
4015 warning_at (loc, OPT_Wstringop_overflow_,
4016 "%K%qD specified bound %E equals destination size",
4017 exp, get_callee_fndecl (exp), maxread);
4019 return NULL_RTX;
4022 if (!srclen
4023 || (maxread && tree_fits_uhwi_p (maxread)
4024 && tree_fits_uhwi_p (srclen)
4025 && tree_int_cst_lt (maxread, srclen)))
4026 srclen = maxread;
4028 /* The number of bytes to write is SRCLEN. */
4029 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
4031 return NULL_RTX;
4034 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4035 NULL_RTX if we failed the caller should emit a normal call. */
4037 static rtx
4038 expand_builtin_strncpy (tree exp, rtx target)
4040 location_t loc = EXPR_LOCATION (exp);
4042 if (validate_arglist (exp,
4043 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4045 tree dest = CALL_EXPR_ARG (exp, 0);
4046 tree src = CALL_EXPR_ARG (exp, 1);
4047 /* The number of bytes to write (not the maximum). */
4048 tree len = CALL_EXPR_ARG (exp, 2);
4049 /* The length of the source sequence. */
4050 tree slen = c_strlen (src, 1);
4052 if (warn_stringop_overflow)
4054 tree destsize = compute_objsize (dest,
4055 warn_stringop_overflow - 1);
4057 /* The number of bytes to write is LEN but check_access will also
4058 check SLEN if LEN's value isn't known. */
4059 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4060 destsize);
4063 /* We must be passed a constant len and src parameter. */
4064 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4065 return NULL_RTX;
4067 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4069 /* We're required to pad with trailing zeros if the requested
4070 len is greater than strlen(s2)+1. In that case try to
4071 use store_by_pieces, if it fails, punt. */
4072 if (tree_int_cst_lt (slen, len))
4074 unsigned int dest_align = get_pointer_alignment (dest);
4075 const char *p = c_getstr (src);
4076 rtx dest_mem;
4078 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4079 || !can_store_by_pieces (tree_to_uhwi (len),
4080 builtin_strncpy_read_str,
4081 CONST_CAST (char *, p),
4082 dest_align, false))
4083 return NULL_RTX;
4085 dest_mem = get_memory_rtx (dest, len);
4086 store_by_pieces (dest_mem, tree_to_uhwi (len),
4087 builtin_strncpy_read_str,
4088 CONST_CAST (char *, p), dest_align, false, 0);
4089 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4090 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4091 return dest_mem;
4094 return NULL_RTX;
4097 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4098 bytes from constant string DATA + OFFSET and return it as target
4099 constant. */
4102 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4103 scalar_int_mode mode)
4105 const char *c = (const char *) data;
4106 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4108 memset (p, *c, GET_MODE_SIZE (mode));
4110 return c_readstr (p, mode);
4113 /* Callback routine for store_by_pieces. Return the RTL of a register
4114 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4115 char value given in the RTL register data. For example, if mode is
4116 4 bytes wide, return the RTL for 0x01010101*data. */
4118 static rtx
4119 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4120 scalar_int_mode mode)
4122 rtx target, coeff;
4123 size_t size;
4124 char *p;
4126 size = GET_MODE_SIZE (mode);
4127 if (size == 1)
4128 return (rtx) data;
4130 p = XALLOCAVEC (char, size);
4131 memset (p, 1, size);
4132 coeff = c_readstr (p, mode);
4134 target = convert_to_mode (mode, (rtx) data, 1);
4135 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4136 return force_reg (mode, target);
4139 /* Expand expression EXP, which is a call to the memset builtin. Return
4140 NULL_RTX if we failed the caller should emit a normal call, otherwise
4141 try to get the result in TARGET, if convenient (and in mode MODE if that's
4142 convenient). */
4144 static rtx
4145 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4147 if (!validate_arglist (exp,
4148 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4149 return NULL_RTX;
4151 tree dest = CALL_EXPR_ARG (exp, 0);
4152 tree val = CALL_EXPR_ARG (exp, 1);
4153 tree len = CALL_EXPR_ARG (exp, 2);
4155 check_memop_access (exp, dest, NULL_TREE, len);
4157 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4160 /* Expand expression EXP, which is an instrumented call to the memset builtin.
4161 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
4162 try to get the result in TARGET, if convenient (and in mode MODE if that's
4163 convenient). */
4165 static rtx
4166 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
4168 if (!validate_arglist (exp,
4169 POINTER_TYPE, POINTER_BOUNDS_TYPE,
4170 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4171 return NULL_RTX;
4172 else
4174 tree dest = CALL_EXPR_ARG (exp, 0);
4175 tree val = CALL_EXPR_ARG (exp, 2);
4176 tree len = CALL_EXPR_ARG (exp, 3);
4177 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
4179 /* Return src bounds with the result. */
4180 if (res)
4182 rtx bnd = force_reg (targetm.chkp_bound_mode (),
4183 expand_normal (CALL_EXPR_ARG (exp, 1)));
4184 res = chkp_join_splitted_slot (res, bnd);
4186 return res;
4190 /* Helper function to do the actual work for expand_builtin_memset. The
4191 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4192 so that this can also be called without constructing an actual CALL_EXPR.
4193 The other arguments and return value are the same as for
4194 expand_builtin_memset. */
4196 static rtx
4197 expand_builtin_memset_args (tree dest, tree val, tree len,
4198 rtx target, machine_mode mode, tree orig_exp)
4200 tree fndecl, fn;
4201 enum built_in_function fcode;
4202 machine_mode val_mode;
4203 char c;
4204 unsigned int dest_align;
4205 rtx dest_mem, dest_addr, len_rtx;
4206 HOST_WIDE_INT expected_size = -1;
4207 unsigned int expected_align = 0;
4208 unsigned HOST_WIDE_INT min_size;
4209 unsigned HOST_WIDE_INT max_size;
4210 unsigned HOST_WIDE_INT probable_max_size;
4212 dest_align = get_pointer_alignment (dest);
4214 /* If DEST is not a pointer type, don't do this operation in-line. */
4215 if (dest_align == 0)
4216 return NULL_RTX;
4218 if (currently_expanding_gimple_stmt)
4219 stringop_block_profile (currently_expanding_gimple_stmt,
4220 &expected_align, &expected_size);
4222 if (expected_align < dest_align)
4223 expected_align = dest_align;
4225 /* If the LEN parameter is zero, return DEST. */
4226 if (integer_zerop (len))
4228 /* Evaluate and ignore VAL in case it has side-effects. */
4229 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4230 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4233 /* Stabilize the arguments in case we fail. */
4234 dest = builtin_save_expr (dest);
4235 val = builtin_save_expr (val);
4236 len = builtin_save_expr (len);
4238 len_rtx = expand_normal (len);
4239 determine_block_size (len, len_rtx, &min_size, &max_size,
4240 &probable_max_size);
4241 dest_mem = get_memory_rtx (dest, len);
4242 val_mode = TYPE_MODE (unsigned_char_type_node);
4244 if (TREE_CODE (val) != INTEGER_CST)
4246 rtx val_rtx;
4248 val_rtx = expand_normal (val);
4249 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4251 /* Assume that we can memset by pieces if we can store
4252 * the coefficients by pieces (in the required modes).
4253 * We can't pass builtin_memset_gen_str as that emits RTL. */
4254 c = 1;
4255 if (tree_fits_uhwi_p (len)
4256 && can_store_by_pieces (tree_to_uhwi (len),
4257 builtin_memset_read_str, &c, dest_align,
4258 true))
4260 val_rtx = force_reg (val_mode, val_rtx);
4261 store_by_pieces (dest_mem, tree_to_uhwi (len),
4262 builtin_memset_gen_str, val_rtx, dest_align,
4263 true, 0);
4265 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4266 dest_align, expected_align,
4267 expected_size, min_size, max_size,
4268 probable_max_size))
4269 goto do_libcall;
4271 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4272 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4273 return dest_mem;
4276 if (target_char_cast (val, &c))
4277 goto do_libcall;
4279 if (c)
4281 if (tree_fits_uhwi_p (len)
4282 && can_store_by_pieces (tree_to_uhwi (len),
4283 builtin_memset_read_str, &c, dest_align,
4284 true))
4285 store_by_pieces (dest_mem, tree_to_uhwi (len),
4286 builtin_memset_read_str, &c, dest_align, true, 0);
4287 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4288 gen_int_mode (c, val_mode),
4289 dest_align, expected_align,
4290 expected_size, min_size, max_size,
4291 probable_max_size))
4292 goto do_libcall;
4294 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4295 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4296 return dest_mem;
4299 set_mem_align (dest_mem, dest_align);
4300 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4301 CALL_EXPR_TAILCALL (orig_exp)
4302 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4303 expected_align, expected_size,
4304 min_size, max_size,
4305 probable_max_size);
4307 if (dest_addr == 0)
4309 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4310 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4313 return dest_addr;
4315 do_libcall:
4316 fndecl = get_callee_fndecl (orig_exp);
4317 fcode = DECL_FUNCTION_CODE (fndecl);
4318 if (fcode == BUILT_IN_MEMSET
4319 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
4320 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4321 dest, val, len);
4322 else if (fcode == BUILT_IN_BZERO)
4323 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4324 dest, len);
4325 else
4326 gcc_unreachable ();
4327 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4328 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4329 return expand_call (fn, target, target == const0_rtx);
4332 /* Expand expression EXP, which is a call to the bzero builtin. Return
4333 NULL_RTX if we failed the caller should emit a normal call. */
4335 static rtx
4336 expand_builtin_bzero (tree exp)
4338 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4339 return NULL_RTX;
4341 tree dest = CALL_EXPR_ARG (exp, 0);
4342 tree size = CALL_EXPR_ARG (exp, 1);
4344 check_memop_access (exp, dest, NULL_TREE, size);
4346 /* New argument list transforming bzero(ptr x, int y) to
4347 memset(ptr x, int 0, size_t y). This is done this way
4348 so that if it isn't expanded inline, we fallback to
4349 calling bzero instead of memset. */
4351 location_t loc = EXPR_LOCATION (exp);
4353 return expand_builtin_memset_args (dest, integer_zero_node,
4354 fold_convert_loc (loc,
4355 size_type_node, size),
4356 const0_rtx, VOIDmode, exp);
4359 /* Try to expand cmpstr operation ICODE with the given operands.
4360 Return the result rtx on success, otherwise return null. */
4362 static rtx
4363 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4364 HOST_WIDE_INT align)
4366 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4368 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4369 target = NULL_RTX;
4371 struct expand_operand ops[4];
4372 create_output_operand (&ops[0], target, insn_mode);
4373 create_fixed_operand (&ops[1], arg1_rtx);
4374 create_fixed_operand (&ops[2], arg2_rtx);
4375 create_integer_operand (&ops[3], align);
4376 if (maybe_expand_insn (icode, 4, ops))
4377 return ops[0].value;
4378 return NULL_RTX;
4381 /* Expand expression EXP, which is a call to the memcmp built-in function.
4382 Return NULL_RTX if we failed and the caller should emit a normal call,
4383 otherwise try to get the result in TARGET, if convenient.
4384 RESULT_EQ is true if we can relax the returned value to be either zero
4385 or nonzero, without caring about the sign. */
4387 static rtx
4388 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4390 if (!validate_arglist (exp,
4391 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4392 return NULL_RTX;
4394 tree arg1 = CALL_EXPR_ARG (exp, 0);
4395 tree arg2 = CALL_EXPR_ARG (exp, 1);
4396 tree len = CALL_EXPR_ARG (exp, 2);
4398 /* Diagnose calls where the specified length exceeds the size of either
4399 object. */
4400 if (warn_stringop_overflow)
4402 tree size = compute_objsize (arg1, 0);
4403 if (check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
4404 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE))
4406 size = compute_objsize (arg2, 0);
4407 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
4408 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
4412 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4413 location_t loc = EXPR_LOCATION (exp);
4415 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4416 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4418 /* If we don't have POINTER_TYPE, call the function. */
4419 if (arg1_align == 0 || arg2_align == 0)
4420 return NULL_RTX;
4422 rtx arg1_rtx = get_memory_rtx (arg1, len);
4423 rtx arg2_rtx = get_memory_rtx (arg2, len);
4424 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4426 /* Set MEM_SIZE as appropriate. */
4427 if (CONST_INT_P (len_rtx))
4429 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4430 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4433 by_pieces_constfn constfn = NULL;
4435 const char *src_str = c_getstr (arg2);
4436 if (result_eq && src_str == NULL)
4438 src_str = c_getstr (arg1);
4439 if (src_str != NULL)
4440 std::swap (arg1_rtx, arg2_rtx);
4443 /* If SRC is a string constant and block move would be done
4444 by pieces, we can avoid loading the string from memory
4445 and only stored the computed constants. */
4446 if (src_str
4447 && CONST_INT_P (len_rtx)
4448 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4449 constfn = builtin_memcpy_read_str;
4451 rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4452 TREE_TYPE (len), target,
4453 result_eq, constfn,
4454 CONST_CAST (char *, src_str));
4456 if (result)
4458 /* Return the value in the proper mode for this function. */
4459 if (GET_MODE (result) == mode)
4460 return result;
4462 if (target != 0)
4464 convert_move (target, result, 0);
4465 return target;
4468 return convert_to_mode (mode, result, 0);
4471 return NULL_RTX;
4474 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4475 if we failed the caller should emit a normal call, otherwise try to get
4476 the result in TARGET, if convenient. */
4478 static rtx
4479 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4481 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4482 return NULL_RTX;
4484 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4485 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4486 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4487 return NULL_RTX;
4489 tree arg1 = CALL_EXPR_ARG (exp, 0);
4490 tree arg2 = CALL_EXPR_ARG (exp, 1);
4492 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4493 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4495 /* If we don't have POINTER_TYPE, call the function. */
4496 if (arg1_align == 0 || arg2_align == 0)
4497 return NULL_RTX;
4499 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4500 arg1 = builtin_save_expr (arg1);
4501 arg2 = builtin_save_expr (arg2);
4503 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4504 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4506 rtx result = NULL_RTX;
4507 /* Try to call cmpstrsi. */
4508 if (cmpstr_icode != CODE_FOR_nothing)
4509 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4510 MIN (arg1_align, arg2_align));
4512 /* Try to determine at least one length and call cmpstrnsi. */
4513 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4515 tree len;
4516 rtx arg3_rtx;
4518 tree len1 = c_strlen (arg1, 1);
4519 tree len2 = c_strlen (arg2, 1);
4521 if (len1)
4522 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4523 if (len2)
4524 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4526 /* If we don't have a constant length for the first, use the length
4527 of the second, if we know it. We don't require a constant for
4528 this case; some cost analysis could be done if both are available
4529 but neither is constant. For now, assume they're equally cheap,
4530 unless one has side effects. If both strings have constant lengths,
4531 use the smaller. */
4533 if (!len1)
4534 len = len2;
4535 else if (!len2)
4536 len = len1;
4537 else if (TREE_SIDE_EFFECTS (len1))
4538 len = len2;
4539 else if (TREE_SIDE_EFFECTS (len2))
4540 len = len1;
4541 else if (TREE_CODE (len1) != INTEGER_CST)
4542 len = len2;
4543 else if (TREE_CODE (len2) != INTEGER_CST)
4544 len = len1;
4545 else if (tree_int_cst_lt (len1, len2))
4546 len = len1;
4547 else
4548 len = len2;
4550 /* If both arguments have side effects, we cannot optimize. */
4551 if (len && !TREE_SIDE_EFFECTS (len))
4553 arg3_rtx = expand_normal (len);
4554 result = expand_cmpstrn_or_cmpmem
4555 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4556 arg3_rtx, MIN (arg1_align, arg2_align));
4560 /* Check to see if the argument was declared attribute nonstring
4561 and if so, issue a warning since at this point it's not known
4562 to be nul-terminated. */
4563 tree fndecl = get_callee_fndecl (exp);
4564 maybe_warn_nonstring_arg (fndecl, exp);
4566 if (result)
4568 /* Return the value in the proper mode for this function. */
4569 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4570 if (GET_MODE (result) == mode)
4571 return result;
4572 if (target == 0)
4573 return convert_to_mode (mode, result, 0);
4574 convert_move (target, result, 0);
4575 return target;
4578 /* Expand the library call ourselves using a stabilized argument
4579 list to avoid re-evaluating the function's arguments twice. */
4580 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4581 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4582 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4583 return expand_call (fn, target, target == const0_rtx);
4586 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4587 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4588 the result in TARGET, if convenient. */
4590 static rtx
4591 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4592 ATTRIBUTE_UNUSED machine_mode mode)
4594 if (!validate_arglist (exp,
4595 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4596 return NULL_RTX;
4598 /* If c_strlen can determine an expression for one of the string
4599 lengths, and it doesn't have side effects, then emit cmpstrnsi
4600 using length MIN(strlen(string)+1, arg3). */
4601 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4602 if (cmpstrn_icode == CODE_FOR_nothing)
4603 return NULL_RTX;
4605 tree len;
4607 tree arg1 = CALL_EXPR_ARG (exp, 0);
4608 tree arg2 = CALL_EXPR_ARG (exp, 1);
4609 tree arg3 = CALL_EXPR_ARG (exp, 2);
4611 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4612 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4614 tree len1 = c_strlen (arg1, 1);
4615 tree len2 = c_strlen (arg2, 1);
4617 location_t loc = EXPR_LOCATION (exp);
4619 if (len1)
4620 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4621 if (len2)
4622 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4624 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4626 /* If we don't have a constant length for the first, use the length
4627 of the second, if we know it. If neither string is constant length,
4628 use the given length argument. We don't require a constant for
4629 this case; some cost analysis could be done if both are available
4630 but neither is constant. For now, assume they're equally cheap,
4631 unless one has side effects. If both strings have constant lengths,
4632 use the smaller. */
4634 if (!len1 && !len2)
4635 len = len3;
4636 else if (!len1)
4637 len = len2;
4638 else if (!len2)
4639 len = len1;
4640 else if (TREE_SIDE_EFFECTS (len1))
4641 len = len2;
4642 else if (TREE_SIDE_EFFECTS (len2))
4643 len = len1;
4644 else if (TREE_CODE (len1) != INTEGER_CST)
4645 len = len2;
4646 else if (TREE_CODE (len2) != INTEGER_CST)
4647 len = len1;
4648 else if (tree_int_cst_lt (len1, len2))
4649 len = len1;
4650 else
4651 len = len2;
4653 /* If we are not using the given length, we must incorporate it here.
4654 The actual new length parameter will be MIN(len,arg3) in this case. */
4655 if (len != len3)
4656 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4657 rtx arg1_rtx = get_memory_rtx (arg1, len);
4658 rtx arg2_rtx = get_memory_rtx (arg2, len);
4659 rtx arg3_rtx = expand_normal (len);
4660 rtx result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4661 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4662 MIN (arg1_align, arg2_align));
4664 /* Check to see if the argument was declared attribute nonstring
4665 and if so, issue a warning since at this point it's not known
4666 to be nul-terminated. */
4667 tree fndecl = get_callee_fndecl (exp);
4668 maybe_warn_nonstring_arg (fndecl, exp);
4670 if (result)
4672 /* Return the value in the proper mode for this function. */
4673 mode = TYPE_MODE (TREE_TYPE (exp));
4674 if (GET_MODE (result) == mode)
4675 return result;
4676 if (target == 0)
4677 return convert_to_mode (mode, result, 0);
4678 convert_move (target, result, 0);
4679 return target;
4682 /* Expand the library call ourselves using a stabilized argument
4683 list to avoid re-evaluating the function's arguments twice. */
4684 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4685 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4686 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4687 return expand_call (fn, target, target == const0_rtx);
4690 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4691 if that's convenient. */
4694 expand_builtin_saveregs (void)
4696 rtx val;
4697 rtx_insn *seq;
4699 /* Don't do __builtin_saveregs more than once in a function.
4700 Save the result of the first call and reuse it. */
4701 if (saveregs_value != 0)
4702 return saveregs_value;
4704 /* When this function is called, it means that registers must be
4705 saved on entry to this function. So we migrate the call to the
4706 first insn of this function. */
4708 start_sequence ();
4710 /* Do whatever the machine needs done in this case. */
4711 val = targetm.calls.expand_builtin_saveregs ();
4713 seq = get_insns ();
4714 end_sequence ();
4716 saveregs_value = val;
4718 /* Put the insns after the NOTE that starts the function. If this
4719 is inside a start_sequence, make the outer-level insn chain current, so
4720 the code is placed at the start of the function. */
4721 push_topmost_sequence ();
4722 emit_insn_after (seq, entry_of_function ());
4723 pop_topmost_sequence ();
4725 return val;
4728 /* Expand a call to __builtin_next_arg. */
4730 static rtx
4731 expand_builtin_next_arg (void)
4733 /* Checking arguments is already done in fold_builtin_next_arg
4734 that must be called before this function. */
4735 return expand_binop (ptr_mode, add_optab,
4736 crtl->args.internal_arg_pointer,
4737 crtl->args.arg_offset_rtx,
4738 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4741 /* Make it easier for the backends by protecting the valist argument
4742 from multiple evaluations. */
4744 static tree
4745 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4747 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4749 /* The current way of determining the type of valist is completely
4750 bogus. We should have the information on the va builtin instead. */
4751 if (!vatype)
4752 vatype = targetm.fn_abi_va_list (cfun->decl);
4754 if (TREE_CODE (vatype) == ARRAY_TYPE)
4756 if (TREE_SIDE_EFFECTS (valist))
4757 valist = save_expr (valist);
4759 /* For this case, the backends will be expecting a pointer to
4760 vatype, but it's possible we've actually been given an array
4761 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4762 So fix it. */
4763 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4765 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4766 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4769 else
4771 tree pt = build_pointer_type (vatype);
4773 if (! needs_lvalue)
4775 if (! TREE_SIDE_EFFECTS (valist))
4776 return valist;
4778 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4779 TREE_SIDE_EFFECTS (valist) = 1;
4782 if (TREE_SIDE_EFFECTS (valist))
4783 valist = save_expr (valist);
4784 valist = fold_build2_loc (loc, MEM_REF,
4785 vatype, valist, build_int_cst (pt, 0));
4788 return valist;
4791 /* The "standard" definition of va_list is void*. */
4793 tree
4794 std_build_builtin_va_list (void)
4796 return ptr_type_node;
4799 /* The "standard" abi va_list is va_list_type_node. */
4801 tree
4802 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4804 return va_list_type_node;
4807 /* The "standard" type of va_list is va_list_type_node. */
4809 tree
4810 std_canonical_va_list_type (tree type)
4812 tree wtype, htype;
4814 wtype = va_list_type_node;
4815 htype = type;
4817 if (TREE_CODE (wtype) == ARRAY_TYPE)
4819 /* If va_list is an array type, the argument may have decayed
4820 to a pointer type, e.g. by being passed to another function.
4821 In that case, unwrap both types so that we can compare the
4822 underlying records. */
4823 if (TREE_CODE (htype) == ARRAY_TYPE
4824 || POINTER_TYPE_P (htype))
4826 wtype = TREE_TYPE (wtype);
4827 htype = TREE_TYPE (htype);
4830 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4831 return va_list_type_node;
4833 return NULL_TREE;
4836 /* The "standard" implementation of va_start: just assign `nextarg' to
4837 the variable. */
4839 void
4840 std_expand_builtin_va_start (tree valist, rtx nextarg)
4842 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4843 convert_move (va_r, nextarg, 0);
4845 /* We do not have any valid bounds for the pointer, so
4846 just store zero bounds for it. */
4847 if (chkp_function_instrumented_p (current_function_decl))
4848 chkp_expand_bounds_reset_for_mem (valist,
4849 make_tree (TREE_TYPE (valist),
4850 nextarg));
4853 /* Expand EXP, a call to __builtin_va_start. */
4855 static rtx
4856 expand_builtin_va_start (tree exp)
4858 rtx nextarg;
4859 tree valist;
4860 location_t loc = EXPR_LOCATION (exp);
4862 if (call_expr_nargs (exp) < 2)
4864 error_at (loc, "too few arguments to function %<va_start%>");
4865 return const0_rtx;
4868 if (fold_builtin_next_arg (exp, true))
4869 return const0_rtx;
4871 nextarg = expand_builtin_next_arg ();
4872 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4874 if (targetm.expand_builtin_va_start)
4875 targetm.expand_builtin_va_start (valist, nextarg);
4876 else
4877 std_expand_builtin_va_start (valist, nextarg);
4879 return const0_rtx;
4882 /* Expand EXP, a call to __builtin_va_end. */
4884 static rtx
4885 expand_builtin_va_end (tree exp)
4887 tree valist = CALL_EXPR_ARG (exp, 0);
4889 /* Evaluate for side effects, if needed. I hate macros that don't
4890 do that. */
4891 if (TREE_SIDE_EFFECTS (valist))
4892 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4894 return const0_rtx;
4897 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4898 builtin rather than just as an assignment in stdarg.h because of the
4899 nastiness of array-type va_list types. */
4901 static rtx
4902 expand_builtin_va_copy (tree exp)
4904 tree dst, src, t;
4905 location_t loc = EXPR_LOCATION (exp);
4907 dst = CALL_EXPR_ARG (exp, 0);
4908 src = CALL_EXPR_ARG (exp, 1);
4910 dst = stabilize_va_list_loc (loc, dst, 1);
4911 src = stabilize_va_list_loc (loc, src, 0);
4913 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4915 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4917 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4918 TREE_SIDE_EFFECTS (t) = 1;
4919 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4921 else
4923 rtx dstb, srcb, size;
4925 /* Evaluate to pointers. */
4926 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4927 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4928 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4929 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4931 dstb = convert_memory_address (Pmode, dstb);
4932 srcb = convert_memory_address (Pmode, srcb);
4934 /* "Dereference" to BLKmode memories. */
4935 dstb = gen_rtx_MEM (BLKmode, dstb);
4936 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4937 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4938 srcb = gen_rtx_MEM (BLKmode, srcb);
4939 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4940 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4942 /* Copy. */
4943 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4946 return const0_rtx;
4949 /* Expand a call to one of the builtin functions __builtin_frame_address or
4950 __builtin_return_address. */
4952 static rtx
4953 expand_builtin_frame_address (tree fndecl, tree exp)
4955 /* The argument must be a nonnegative integer constant.
4956 It counts the number of frames to scan up the stack.
4957 The value is either the frame pointer value or the return
4958 address saved in that frame. */
4959 if (call_expr_nargs (exp) == 0)
4960 /* Warning about missing arg was already issued. */
4961 return const0_rtx;
4962 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4964 error ("invalid argument to %qD", fndecl);
4965 return const0_rtx;
4967 else
4969 /* Number of frames to scan up the stack. */
4970 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4972 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4974 /* Some ports cannot access arbitrary stack frames. */
4975 if (tem == NULL)
4977 warning (0, "unsupported argument to %qD", fndecl);
4978 return const0_rtx;
4981 if (count)
4983 /* Warn since no effort is made to ensure that any frame
4984 beyond the current one exists or can be safely reached. */
4985 warning (OPT_Wframe_address, "calling %qD with "
4986 "a nonzero argument is unsafe", fndecl);
4989 /* For __builtin_frame_address, return what we've got. */
4990 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4991 return tem;
4993 if (!REG_P (tem)
4994 && ! CONSTANT_P (tem))
4995 tem = copy_addr_to_reg (tem);
4996 return tem;
5000 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5001 failed and the caller should emit a normal call. */
5003 static rtx
5004 expand_builtin_alloca (tree exp)
5006 rtx op0;
5007 rtx result;
5008 unsigned int align;
5009 tree fndecl = get_callee_fndecl (exp);
5010 HOST_WIDE_INT max_size;
5011 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5012 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5013 bool valid_arglist
5014 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5015 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5016 VOID_TYPE)
5017 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5018 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5019 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5021 if (!valid_arglist)
5022 return NULL_RTX;
5024 if ((alloca_for_var && !warn_vla_limit)
5025 || (!alloca_for_var && !warn_alloca_limit))
5027 /* -Walloca-larger-than and -Wvla-larger-than settings override
5028 the more general -Walloc-size-larger-than so unless either of
5029 the former options is specified check the alloca arguments for
5030 overflow. */
5031 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5032 int idx[] = { 0, -1 };
5033 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5036 /* Compute the argument. */
5037 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5039 /* Compute the alignment. */
5040 align = (fcode == BUILT_IN_ALLOCA
5041 ? BIGGEST_ALIGNMENT
5042 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5044 /* Compute the maximum size. */
5045 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5046 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5047 : -1);
5049 /* Allocate the desired space. If the allocation stems from the declaration
5050 of a variable-sized object, it cannot accumulate. */
5051 result
5052 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5053 result = convert_memory_address (ptr_mode, result);
5055 return result;
5058 /* Emit a call to __asan_allocas_unpoison call in EXP. Replace second argument
5059 of the call with virtual_stack_dynamic_rtx because in asan pass we emit a
5060 dummy value into second parameter relying on this function to perform the
5061 change. See motivation for this in comment to handle_builtin_stack_restore
5062 function. */
5064 static rtx
5065 expand_asan_emit_allocas_unpoison (tree exp)
5067 tree arg0 = CALL_EXPR_ARG (exp, 0);
5068 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5069 rtx bot = convert_memory_address (ptr_mode, virtual_stack_dynamic_rtx);
5070 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5071 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5072 top, ptr_mode, bot, ptr_mode);
5073 return ret;
5076 /* Expand a call to bswap builtin in EXP.
5077 Return NULL_RTX if a normal call should be emitted rather than expanding the
5078 function in-line. If convenient, the result should be placed in TARGET.
5079 SUBTARGET may be used as the target for computing one of EXP's operands. */
5081 static rtx
5082 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5083 rtx subtarget)
5085 tree arg;
5086 rtx op0;
5088 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5089 return NULL_RTX;
5091 arg = CALL_EXPR_ARG (exp, 0);
5092 op0 = expand_expr (arg,
5093 subtarget && GET_MODE (subtarget) == target_mode
5094 ? subtarget : NULL_RTX,
5095 target_mode, EXPAND_NORMAL);
5096 if (GET_MODE (op0) != target_mode)
5097 op0 = convert_to_mode (target_mode, op0, 1);
5099 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5101 gcc_assert (target);
5103 return convert_to_mode (target_mode, target, 1);
5106 /* Expand a call to a unary builtin in EXP.
5107 Return NULL_RTX if a normal call should be emitted rather than expanding the
5108 function in-line. If convenient, the result should be placed in TARGET.
5109 SUBTARGET may be used as the target for computing one of EXP's operands. */
5111 static rtx
5112 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5113 rtx subtarget, optab op_optab)
5115 rtx op0;
5117 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5118 return NULL_RTX;
5120 /* Compute the argument. */
5121 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5122 (subtarget
5123 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5124 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5125 VOIDmode, EXPAND_NORMAL);
5126 /* Compute op, into TARGET if possible.
5127 Set TARGET to wherever the result comes back. */
5128 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5129 op_optab, op0, target, op_optab != clrsb_optab);
5130 gcc_assert (target);
5132 return convert_to_mode (target_mode, target, 0);
5135 /* Expand a call to __builtin_expect. We just return our argument
5136 as the builtin_expect semantic should've been already executed by
5137 tree branch prediction pass. */
5139 static rtx
5140 expand_builtin_expect (tree exp, rtx target)
5142 tree arg;
5144 if (call_expr_nargs (exp) < 2)
5145 return const0_rtx;
5146 arg = CALL_EXPR_ARG (exp, 0);
5148 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5149 /* When guessing was done, the hints should be already stripped away. */
5150 gcc_assert (!flag_guess_branch_prob
5151 || optimize == 0 || seen_error ());
5152 return target;
5155 /* Expand a call to __builtin_assume_aligned. We just return our first
5156 argument as the builtin_assume_aligned semantic should've been already
5157 executed by CCP. */
5159 static rtx
5160 expand_builtin_assume_aligned (tree exp, rtx target)
5162 if (call_expr_nargs (exp) < 2)
5163 return const0_rtx;
5164 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5165 EXPAND_NORMAL);
5166 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5167 && (call_expr_nargs (exp) < 3
5168 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5169 return target;
5172 void
5173 expand_builtin_trap (void)
5175 if (targetm.have_trap ())
5177 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5178 /* For trap insns when not accumulating outgoing args force
5179 REG_ARGS_SIZE note to prevent crossjumping of calls with
5180 different args sizes. */
5181 if (!ACCUMULATE_OUTGOING_ARGS)
5182 add_args_size_note (insn, stack_pointer_delta);
5184 else
5186 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5187 tree call_expr = build_call_expr (fn, 0);
5188 expand_call (call_expr, NULL_RTX, false);
5191 emit_barrier ();
5194 /* Expand a call to __builtin_unreachable. We do nothing except emit
5195 a barrier saying that control flow will not pass here.
5197 It is the responsibility of the program being compiled to ensure
5198 that control flow does never reach __builtin_unreachable. */
5199 static void
5200 expand_builtin_unreachable (void)
5202 emit_barrier ();
5205 /* Expand EXP, a call to fabs, fabsf or fabsl.
5206 Return NULL_RTX if a normal call should be emitted rather than expanding
5207 the function inline. If convenient, the result should be placed
5208 in TARGET. SUBTARGET may be used as the target for computing
5209 the operand. */
5211 static rtx
5212 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5214 machine_mode mode;
5215 tree arg;
5216 rtx op0;
5218 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5219 return NULL_RTX;
5221 arg = CALL_EXPR_ARG (exp, 0);
5222 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5223 mode = TYPE_MODE (TREE_TYPE (arg));
5224 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5225 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5228 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5229 Return NULL is a normal call should be emitted rather than expanding the
5230 function inline. If convenient, the result should be placed in TARGET.
5231 SUBTARGET may be used as the target for computing the operand. */
5233 static rtx
5234 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5236 rtx op0, op1;
5237 tree arg;
5239 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5240 return NULL_RTX;
5242 arg = CALL_EXPR_ARG (exp, 0);
5243 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5245 arg = CALL_EXPR_ARG (exp, 1);
5246 op1 = expand_normal (arg);
5248 return expand_copysign (op0, op1, target);
5251 /* Expand a call to __builtin___clear_cache. */
5253 static rtx
5254 expand_builtin___clear_cache (tree exp)
5256 if (!targetm.code_for_clear_cache)
5258 #ifdef CLEAR_INSN_CACHE
5259 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5260 does something. Just do the default expansion to a call to
5261 __clear_cache(). */
5262 return NULL_RTX;
5263 #else
5264 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5265 does nothing. There is no need to call it. Do nothing. */
5266 return const0_rtx;
5267 #endif /* CLEAR_INSN_CACHE */
5270 /* We have a "clear_cache" insn, and it will handle everything. */
5271 tree begin, end;
5272 rtx begin_rtx, end_rtx;
5274 /* We must not expand to a library call. If we did, any
5275 fallback library function in libgcc that might contain a call to
5276 __builtin___clear_cache() would recurse infinitely. */
5277 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5279 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5280 return const0_rtx;
5283 if (targetm.have_clear_cache ())
5285 struct expand_operand ops[2];
5287 begin = CALL_EXPR_ARG (exp, 0);
5288 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5290 end = CALL_EXPR_ARG (exp, 1);
5291 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5293 create_address_operand (&ops[0], begin_rtx);
5294 create_address_operand (&ops[1], end_rtx);
5295 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5296 return const0_rtx;
5298 return const0_rtx;
5301 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5303 static rtx
5304 round_trampoline_addr (rtx tramp)
5306 rtx temp, addend, mask;
5308 /* If we don't need too much alignment, we'll have been guaranteed
5309 proper alignment by get_trampoline_type. */
5310 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5311 return tramp;
5313 /* Round address up to desired boundary. */
5314 temp = gen_reg_rtx (Pmode);
5315 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5316 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5318 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5319 temp, 0, OPTAB_LIB_WIDEN);
5320 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5321 temp, 0, OPTAB_LIB_WIDEN);
5323 return tramp;
5326 static rtx
5327 expand_builtin_init_trampoline (tree exp, bool onstack)
5329 tree t_tramp, t_func, t_chain;
5330 rtx m_tramp, r_tramp, r_chain, tmp;
5332 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5333 POINTER_TYPE, VOID_TYPE))
5334 return NULL_RTX;
5336 t_tramp = CALL_EXPR_ARG (exp, 0);
5337 t_func = CALL_EXPR_ARG (exp, 1);
5338 t_chain = CALL_EXPR_ARG (exp, 2);
5340 r_tramp = expand_normal (t_tramp);
5341 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5342 MEM_NOTRAP_P (m_tramp) = 1;
5344 /* If ONSTACK, the TRAMP argument should be the address of a field
5345 within the local function's FRAME decl. Either way, let's see if
5346 we can fill in the MEM_ATTRs for this memory. */
5347 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5348 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5350 /* Creator of a heap trampoline is responsible for making sure the
5351 address is aligned to at least STACK_BOUNDARY. Normally malloc
5352 will ensure this anyhow. */
5353 tmp = round_trampoline_addr (r_tramp);
5354 if (tmp != r_tramp)
5356 m_tramp = change_address (m_tramp, BLKmode, tmp);
5357 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5358 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5361 /* The FUNC argument should be the address of the nested function.
5362 Extract the actual function decl to pass to the hook. */
5363 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5364 t_func = TREE_OPERAND (t_func, 0);
5365 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5367 r_chain = expand_normal (t_chain);
5369 /* Generate insns to initialize the trampoline. */
5370 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5372 if (onstack)
5374 trampolines_created = 1;
5376 if (targetm.calls.custom_function_descriptors != 0)
5377 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5378 "trampoline generated for nested function %qD", t_func);
5381 return const0_rtx;
5384 static rtx
5385 expand_builtin_adjust_trampoline (tree exp)
5387 rtx tramp;
5389 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5390 return NULL_RTX;
5392 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5393 tramp = round_trampoline_addr (tramp);
5394 if (targetm.calls.trampoline_adjust_address)
5395 tramp = targetm.calls.trampoline_adjust_address (tramp);
5397 return tramp;
5400 /* Expand a call to the builtin descriptor initialization routine.
5401 A descriptor is made up of a couple of pointers to the static
5402 chain and the code entry in this order. */
5404 static rtx
5405 expand_builtin_init_descriptor (tree exp)
5407 tree t_descr, t_func, t_chain;
5408 rtx m_descr, r_descr, r_func, r_chain;
5410 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5411 VOID_TYPE))
5412 return NULL_RTX;
5414 t_descr = CALL_EXPR_ARG (exp, 0);
5415 t_func = CALL_EXPR_ARG (exp, 1);
5416 t_chain = CALL_EXPR_ARG (exp, 2);
5418 r_descr = expand_normal (t_descr);
5419 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5420 MEM_NOTRAP_P (m_descr) = 1;
5422 r_func = expand_normal (t_func);
5423 r_chain = expand_normal (t_chain);
5425 /* Generate insns to initialize the descriptor. */
5426 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5427 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5428 POINTER_SIZE / BITS_PER_UNIT), r_func);
5430 return const0_rtx;
5433 /* Expand a call to the builtin descriptor adjustment routine. */
5435 static rtx
5436 expand_builtin_adjust_descriptor (tree exp)
5438 rtx tramp;
5440 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5441 return NULL_RTX;
5443 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5445 /* Unalign the descriptor to allow runtime identification. */
5446 tramp = plus_constant (ptr_mode, tramp,
5447 targetm.calls.custom_function_descriptors);
5449 return force_operand (tramp, NULL_RTX);
5452 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5453 function. The function first checks whether the back end provides
5454 an insn to implement signbit for the respective mode. If not, it
5455 checks whether the floating point format of the value is such that
5456 the sign bit can be extracted. If that is not the case, error out.
5457 EXP is the expression that is a call to the builtin function; if
5458 convenient, the result should be placed in TARGET. */
5459 static rtx
5460 expand_builtin_signbit (tree exp, rtx target)
5462 const struct real_format *fmt;
5463 scalar_float_mode fmode;
5464 scalar_int_mode rmode, imode;
5465 tree arg;
5466 int word, bitpos;
5467 enum insn_code icode;
5468 rtx temp;
5469 location_t loc = EXPR_LOCATION (exp);
5471 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5472 return NULL_RTX;
5474 arg = CALL_EXPR_ARG (exp, 0);
5475 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5476 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5477 fmt = REAL_MODE_FORMAT (fmode);
5479 arg = builtin_save_expr (arg);
5481 /* Expand the argument yielding a RTX expression. */
5482 temp = expand_normal (arg);
5484 /* Check if the back end provides an insn that handles signbit for the
5485 argument's mode. */
5486 icode = optab_handler (signbit_optab, fmode);
5487 if (icode != CODE_FOR_nothing)
5489 rtx_insn *last = get_last_insn ();
5490 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5491 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5492 return target;
5493 delete_insns_since (last);
5496 /* For floating point formats without a sign bit, implement signbit
5497 as "ARG < 0.0". */
5498 bitpos = fmt->signbit_ro;
5499 if (bitpos < 0)
5501 /* But we can't do this if the format supports signed zero. */
5502 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5504 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5505 build_real (TREE_TYPE (arg), dconst0));
5506 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5509 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5511 imode = int_mode_for_mode (fmode).require ();
5512 temp = gen_lowpart (imode, temp);
5514 else
5516 imode = word_mode;
5517 /* Handle targets with different FP word orders. */
5518 if (FLOAT_WORDS_BIG_ENDIAN)
5519 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5520 else
5521 word = bitpos / BITS_PER_WORD;
5522 temp = operand_subword_force (temp, word, fmode);
5523 bitpos = bitpos % BITS_PER_WORD;
5526 /* Force the intermediate word_mode (or narrower) result into a
5527 register. This avoids attempting to create paradoxical SUBREGs
5528 of floating point modes below. */
5529 temp = force_reg (imode, temp);
5531 /* If the bitpos is within the "result mode" lowpart, the operation
5532 can be implement with a single bitwise AND. Otherwise, we need
5533 a right shift and an AND. */
5535 if (bitpos < GET_MODE_BITSIZE (rmode))
5537 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5539 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5540 temp = gen_lowpart (rmode, temp);
5541 temp = expand_binop (rmode, and_optab, temp,
5542 immed_wide_int_const (mask, rmode),
5543 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5545 else
5547 /* Perform a logical right shift to place the signbit in the least
5548 significant bit, then truncate the result to the desired mode
5549 and mask just this bit. */
5550 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5551 temp = gen_lowpart (rmode, temp);
5552 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5553 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5556 return temp;
5559 /* Expand fork or exec calls. TARGET is the desired target of the
5560 call. EXP is the call. FN is the
5561 identificator of the actual function. IGNORE is nonzero if the
5562 value is to be ignored. */
5564 static rtx
5565 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5567 tree id, decl;
5568 tree call;
5570 /* If we are not profiling, just call the function. */
5571 if (!profile_arc_flag)
5572 return NULL_RTX;
5574 /* Otherwise call the wrapper. This should be equivalent for the rest of
5575 compiler, so the code does not diverge, and the wrapper may run the
5576 code necessary for keeping the profiling sane. */
5578 switch (DECL_FUNCTION_CODE (fn))
5580 case BUILT_IN_FORK:
5581 id = get_identifier ("__gcov_fork");
5582 break;
5584 case BUILT_IN_EXECL:
5585 id = get_identifier ("__gcov_execl");
5586 break;
5588 case BUILT_IN_EXECV:
5589 id = get_identifier ("__gcov_execv");
5590 break;
5592 case BUILT_IN_EXECLP:
5593 id = get_identifier ("__gcov_execlp");
5594 break;
5596 case BUILT_IN_EXECLE:
5597 id = get_identifier ("__gcov_execle");
5598 break;
5600 case BUILT_IN_EXECVP:
5601 id = get_identifier ("__gcov_execvp");
5602 break;
5604 case BUILT_IN_EXECVE:
5605 id = get_identifier ("__gcov_execve");
5606 break;
5608 default:
5609 gcc_unreachable ();
5612 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5613 FUNCTION_DECL, id, TREE_TYPE (fn));
5614 DECL_EXTERNAL (decl) = 1;
5615 TREE_PUBLIC (decl) = 1;
5616 DECL_ARTIFICIAL (decl) = 1;
5617 TREE_NOTHROW (decl) = 1;
5618 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5619 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5620 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5621 return expand_call (call, target, ignore);
5626 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5627 the pointer in these functions is void*, the tree optimizers may remove
5628 casts. The mode computed in expand_builtin isn't reliable either, due
5629 to __sync_bool_compare_and_swap.
5631 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5632 group of builtins. This gives us log2 of the mode size. */
5634 static inline machine_mode
5635 get_builtin_sync_mode (int fcode_diff)
5637 /* The size is not negotiable, so ask not to get BLKmode in return
5638 if the target indicates that a smaller size would be better. */
5639 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5642 /* Expand the memory expression LOC and return the appropriate memory operand
5643 for the builtin_sync operations. */
5645 static rtx
5646 get_builtin_sync_mem (tree loc, machine_mode mode)
5648 rtx addr, mem;
5650 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5651 addr = convert_memory_address (Pmode, addr);
5653 /* Note that we explicitly do not want any alias information for this
5654 memory, so that we kill all other live memories. Otherwise we don't
5655 satisfy the full barrier semantics of the intrinsic. */
5656 mem = validize_mem (gen_rtx_MEM (mode, addr));
5658 /* The alignment needs to be at least according to that of the mode. */
5659 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5660 get_pointer_alignment (loc)));
5661 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5662 MEM_VOLATILE_P (mem) = 1;
5664 return mem;
5667 /* Make sure an argument is in the right mode.
5668 EXP is the tree argument.
5669 MODE is the mode it should be in. */
5671 static rtx
5672 expand_expr_force_mode (tree exp, machine_mode mode)
5674 rtx val;
5675 machine_mode old_mode;
5677 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5678 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5679 of CONST_INTs, where we know the old_mode only from the call argument. */
5681 old_mode = GET_MODE (val);
5682 if (old_mode == VOIDmode)
5683 old_mode = TYPE_MODE (TREE_TYPE (exp));
5684 val = convert_modes (mode, old_mode, val, 1);
5685 return val;
5689 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5690 EXP is the CALL_EXPR. CODE is the rtx code
5691 that corresponds to the arithmetic or logical operation from the name;
5692 an exception here is that NOT actually means NAND. TARGET is an optional
5693 place for us to store the results; AFTER is true if this is the
5694 fetch_and_xxx form. */
5696 static rtx
5697 expand_builtin_sync_operation (machine_mode mode, tree exp,
5698 enum rtx_code code, bool after,
5699 rtx target)
5701 rtx val, mem;
5702 location_t loc = EXPR_LOCATION (exp);
5704 if (code == NOT && warn_sync_nand)
5706 tree fndecl = get_callee_fndecl (exp);
5707 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5709 static bool warned_f_a_n, warned_n_a_f;
5711 switch (fcode)
5713 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5714 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5715 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5716 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5717 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5718 if (warned_f_a_n)
5719 break;
5721 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5722 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5723 warned_f_a_n = true;
5724 break;
5726 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5727 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5728 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5729 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5730 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5731 if (warned_n_a_f)
5732 break;
5734 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5735 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5736 warned_n_a_f = true;
5737 break;
5739 default:
5740 gcc_unreachable ();
5744 /* Expand the operands. */
5745 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5746 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5748 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5749 after);
5752 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5753 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5754 true if this is the boolean form. TARGET is a place for us to store the
5755 results; this is NOT optional if IS_BOOL is true. */
5757 static rtx
5758 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5759 bool is_bool, rtx target)
5761 rtx old_val, new_val, mem;
5762 rtx *pbool, *poval;
5764 /* Expand the operands. */
5765 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5766 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5767 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5769 pbool = poval = NULL;
5770 if (target != const0_rtx)
5772 if (is_bool)
5773 pbool = &target;
5774 else
5775 poval = &target;
5777 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5778 false, MEMMODEL_SYNC_SEQ_CST,
5779 MEMMODEL_SYNC_SEQ_CST))
5780 return NULL_RTX;
5782 return target;
5785 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5786 general form is actually an atomic exchange, and some targets only
5787 support a reduced form with the second argument being a constant 1.
5788 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5789 the results. */
5791 static rtx
5792 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5793 rtx target)
5795 rtx val, mem;
5797 /* Expand the operands. */
5798 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5799 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5801 return expand_sync_lock_test_and_set (target, mem, val);
5804 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5806 static void
5807 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5809 rtx mem;
5811 /* Expand the operands. */
5812 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5814 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5817 /* Given an integer representing an ``enum memmodel'', verify its
5818 correctness and return the memory model enum. */
5820 static enum memmodel
5821 get_memmodel (tree exp)
5823 rtx op;
5824 unsigned HOST_WIDE_INT val;
5825 source_location loc
5826 = expansion_point_location_if_in_system_header (input_location);
5828 /* If the parameter is not a constant, it's a run time value so we'll just
5829 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5830 if (TREE_CODE (exp) != INTEGER_CST)
5831 return MEMMODEL_SEQ_CST;
5833 op = expand_normal (exp);
5835 val = INTVAL (op);
5836 if (targetm.memmodel_check)
5837 val = targetm.memmodel_check (val);
5838 else if (val & ~MEMMODEL_MASK)
5840 warning_at (loc, OPT_Winvalid_memory_model,
5841 "unknown architecture specifier in memory model to builtin");
5842 return MEMMODEL_SEQ_CST;
5845 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5846 if (memmodel_base (val) >= MEMMODEL_LAST)
5848 warning_at (loc, OPT_Winvalid_memory_model,
5849 "invalid memory model argument to builtin");
5850 return MEMMODEL_SEQ_CST;
5853 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5854 be conservative and promote consume to acquire. */
5855 if (val == MEMMODEL_CONSUME)
5856 val = MEMMODEL_ACQUIRE;
5858 return (enum memmodel) val;
5861 /* Expand the __atomic_exchange intrinsic:
5862 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5863 EXP is the CALL_EXPR.
5864 TARGET is an optional place for us to store the results. */
5866 static rtx
5867 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5869 rtx val, mem;
5870 enum memmodel model;
5872 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5874 if (!flag_inline_atomics)
5875 return NULL_RTX;
5877 /* Expand the operands. */
5878 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5879 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5881 return expand_atomic_exchange (target, mem, val, model);
5884 /* Expand the __atomic_compare_exchange intrinsic:
5885 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5886 TYPE desired, BOOL weak,
5887 enum memmodel success,
5888 enum memmodel failure)
5889 EXP is the CALL_EXPR.
5890 TARGET is an optional place for us to store the results. */
5892 static rtx
5893 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5894 rtx target)
5896 rtx expect, desired, mem, oldval;
5897 rtx_code_label *label;
5898 enum memmodel success, failure;
5899 tree weak;
5900 bool is_weak;
5901 source_location loc
5902 = expansion_point_location_if_in_system_header (input_location);
5904 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5905 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5907 if (failure > success)
5909 warning_at (loc, OPT_Winvalid_memory_model,
5910 "failure memory model cannot be stronger than success "
5911 "memory model for %<__atomic_compare_exchange%>");
5912 success = MEMMODEL_SEQ_CST;
5915 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5917 warning_at (loc, OPT_Winvalid_memory_model,
5918 "invalid failure memory model for "
5919 "%<__atomic_compare_exchange%>");
5920 failure = MEMMODEL_SEQ_CST;
5921 success = MEMMODEL_SEQ_CST;
5925 if (!flag_inline_atomics)
5926 return NULL_RTX;
5928 /* Expand the operands. */
5929 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5931 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5932 expect = convert_memory_address (Pmode, expect);
5933 expect = gen_rtx_MEM (mode, expect);
5934 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5936 weak = CALL_EXPR_ARG (exp, 3);
5937 is_weak = false;
5938 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5939 is_weak = true;
5941 if (target == const0_rtx)
5942 target = NULL;
5944 /* Lest the rtl backend create a race condition with an imporoper store
5945 to memory, always create a new pseudo for OLDVAL. */
5946 oldval = NULL;
5948 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5949 is_weak, success, failure))
5950 return NULL_RTX;
5952 /* Conditionally store back to EXPECT, lest we create a race condition
5953 with an improper store to memory. */
5954 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5955 the normal case where EXPECT is totally private, i.e. a register. At
5956 which point the store can be unconditional. */
5957 label = gen_label_rtx ();
5958 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5959 GET_MODE (target), 1, label);
5960 emit_move_insn (expect, oldval);
5961 emit_label (label);
5963 return target;
5966 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5967 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5968 call. The weak parameter must be dropped to match the expected parameter
5969 list and the expected argument changed from value to pointer to memory
5970 slot. */
5972 static void
5973 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5975 unsigned int z;
5976 vec<tree, va_gc> *vec;
5978 vec_alloc (vec, 5);
5979 vec->quick_push (gimple_call_arg (call, 0));
5980 tree expected = gimple_call_arg (call, 1);
5981 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5982 TREE_TYPE (expected));
5983 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5984 if (expd != x)
5985 emit_move_insn (x, expd);
5986 tree v = make_tree (TREE_TYPE (expected), x);
5987 vec->quick_push (build1 (ADDR_EXPR,
5988 build_pointer_type (TREE_TYPE (expected)), v));
5989 vec->quick_push (gimple_call_arg (call, 2));
5990 /* Skip the boolean weak parameter. */
5991 for (z = 4; z < 6; z++)
5992 vec->quick_push (gimple_call_arg (call, z));
5993 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
5994 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode));
5995 gcc_assert (bytes_log2 < 5);
5996 built_in_function fncode
5997 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5998 + bytes_log2);
5999 tree fndecl = builtin_decl_explicit (fncode);
6000 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6001 fndecl);
6002 tree exp = build_call_vec (boolean_type_node, fn, vec);
6003 tree lhs = gimple_call_lhs (call);
6004 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6005 if (lhs)
6007 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6008 if (GET_MODE (boolret) != mode)
6009 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6010 x = force_reg (mode, x);
6011 write_complex_part (target, boolret, true);
6012 write_complex_part (target, x, false);
6016 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6018 void
6019 expand_ifn_atomic_compare_exchange (gcall *call)
6021 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6022 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6023 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6024 rtx expect, desired, mem, oldval, boolret;
6025 enum memmodel success, failure;
6026 tree lhs;
6027 bool is_weak;
6028 source_location loc
6029 = expansion_point_location_if_in_system_header (gimple_location (call));
6031 success = get_memmodel (gimple_call_arg (call, 4));
6032 failure = get_memmodel (gimple_call_arg (call, 5));
6034 if (failure > success)
6036 warning_at (loc, OPT_Winvalid_memory_model,
6037 "failure memory model cannot be stronger than success "
6038 "memory model for %<__atomic_compare_exchange%>");
6039 success = MEMMODEL_SEQ_CST;
6042 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6044 warning_at (loc, OPT_Winvalid_memory_model,
6045 "invalid failure memory model for "
6046 "%<__atomic_compare_exchange%>");
6047 failure = MEMMODEL_SEQ_CST;
6048 success = MEMMODEL_SEQ_CST;
6051 if (!flag_inline_atomics)
6053 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6054 return;
6057 /* Expand the operands. */
6058 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6060 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6061 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6063 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6065 boolret = NULL;
6066 oldval = NULL;
6068 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6069 is_weak, success, failure))
6071 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6072 return;
6075 lhs = gimple_call_lhs (call);
6076 if (lhs)
6078 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6079 if (GET_MODE (boolret) != mode)
6080 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6081 write_complex_part (target, boolret, true);
6082 write_complex_part (target, oldval, false);
6086 /* Expand the __atomic_load intrinsic:
6087 TYPE __atomic_load (TYPE *object, enum memmodel)
6088 EXP is the CALL_EXPR.
6089 TARGET is an optional place for us to store the results. */
6091 static rtx
6092 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6094 rtx mem;
6095 enum memmodel model;
6097 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6098 if (is_mm_release (model) || is_mm_acq_rel (model))
6100 source_location loc
6101 = expansion_point_location_if_in_system_header (input_location);
6102 warning_at (loc, OPT_Winvalid_memory_model,
6103 "invalid memory model for %<__atomic_load%>");
6104 model = MEMMODEL_SEQ_CST;
6107 if (!flag_inline_atomics)
6108 return NULL_RTX;
6110 /* Expand the operand. */
6111 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6113 return expand_atomic_load (target, mem, model);
6117 /* Expand the __atomic_store intrinsic:
6118 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6119 EXP is the CALL_EXPR.
6120 TARGET is an optional place for us to store the results. */
6122 static rtx
6123 expand_builtin_atomic_store (machine_mode mode, tree exp)
6125 rtx mem, val;
6126 enum memmodel model;
6128 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6129 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6130 || is_mm_release (model)))
6132 source_location loc
6133 = expansion_point_location_if_in_system_header (input_location);
6134 warning_at (loc, OPT_Winvalid_memory_model,
6135 "invalid memory model for %<__atomic_store%>");
6136 model = MEMMODEL_SEQ_CST;
6139 if (!flag_inline_atomics)
6140 return NULL_RTX;
6142 /* Expand the operands. */
6143 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6144 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6146 return expand_atomic_store (mem, val, model, false);
6149 /* Expand the __atomic_fetch_XXX intrinsic:
6150 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6151 EXP is the CALL_EXPR.
6152 TARGET is an optional place for us to store the results.
6153 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6154 FETCH_AFTER is true if returning the result of the operation.
6155 FETCH_AFTER is false if returning the value before the operation.
6156 IGNORE is true if the result is not used.
6157 EXT_CALL is the correct builtin for an external call if this cannot be
6158 resolved to an instruction sequence. */
6160 static rtx
6161 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6162 enum rtx_code code, bool fetch_after,
6163 bool ignore, enum built_in_function ext_call)
6165 rtx val, mem, ret;
6166 enum memmodel model;
6167 tree fndecl;
6168 tree addr;
6170 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6172 /* Expand the operands. */
6173 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6174 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6176 /* Only try generating instructions if inlining is turned on. */
6177 if (flag_inline_atomics)
6179 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6180 if (ret)
6181 return ret;
6184 /* Return if a different routine isn't needed for the library call. */
6185 if (ext_call == BUILT_IN_NONE)
6186 return NULL_RTX;
6188 /* Change the call to the specified function. */
6189 fndecl = get_callee_fndecl (exp);
6190 addr = CALL_EXPR_FN (exp);
6191 STRIP_NOPS (addr);
6193 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6194 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6196 /* If we will emit code after the call, the call can not be a tail call.
6197 If it is emitted as a tail call, a barrier is emitted after it, and
6198 then all trailing code is removed. */
6199 if (!ignore)
6200 CALL_EXPR_TAILCALL (exp) = 0;
6202 /* Expand the call here so we can emit trailing code. */
6203 ret = expand_call (exp, target, ignore);
6205 /* Replace the original function just in case it matters. */
6206 TREE_OPERAND (addr, 0) = fndecl;
6208 /* Then issue the arithmetic correction to return the right result. */
6209 if (!ignore)
6211 if (code == NOT)
6213 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6214 OPTAB_LIB_WIDEN);
6215 ret = expand_simple_unop (mode, NOT, ret, target, true);
6217 else
6218 ret = expand_simple_binop (mode, code, ret, val, target, true,
6219 OPTAB_LIB_WIDEN);
6221 return ret;
6224 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6226 void
6227 expand_ifn_atomic_bit_test_and (gcall *call)
6229 tree ptr = gimple_call_arg (call, 0);
6230 tree bit = gimple_call_arg (call, 1);
6231 tree flag = gimple_call_arg (call, 2);
6232 tree lhs = gimple_call_lhs (call);
6233 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6234 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6235 enum rtx_code code;
6236 optab optab;
6237 struct expand_operand ops[5];
6239 gcc_assert (flag_inline_atomics);
6241 if (gimple_call_num_args (call) == 4)
6242 model = get_memmodel (gimple_call_arg (call, 3));
6244 rtx mem = get_builtin_sync_mem (ptr, mode);
6245 rtx val = expand_expr_force_mode (bit, mode);
6247 switch (gimple_call_internal_fn (call))
6249 case IFN_ATOMIC_BIT_TEST_AND_SET:
6250 code = IOR;
6251 optab = atomic_bit_test_and_set_optab;
6252 break;
6253 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6254 code = XOR;
6255 optab = atomic_bit_test_and_complement_optab;
6256 break;
6257 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6258 code = AND;
6259 optab = atomic_bit_test_and_reset_optab;
6260 break;
6261 default:
6262 gcc_unreachable ();
6265 if (lhs == NULL_TREE)
6267 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6268 val, NULL_RTX, true, OPTAB_DIRECT);
6269 if (code == AND)
6270 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6271 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6272 return;
6275 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6276 enum insn_code icode = direct_optab_handler (optab, mode);
6277 gcc_assert (icode != CODE_FOR_nothing);
6278 create_output_operand (&ops[0], target, mode);
6279 create_fixed_operand (&ops[1], mem);
6280 create_convert_operand_to (&ops[2], val, mode, true);
6281 create_integer_operand (&ops[3], model);
6282 create_integer_operand (&ops[4], integer_onep (flag));
6283 if (maybe_expand_insn (icode, 5, ops))
6284 return;
6286 rtx bitval = val;
6287 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6288 val, NULL_RTX, true, OPTAB_DIRECT);
6289 rtx maskval = val;
6290 if (code == AND)
6291 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6292 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6293 code, model, false);
6294 if (integer_onep (flag))
6296 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6297 NULL_RTX, true, OPTAB_DIRECT);
6298 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6299 true, OPTAB_DIRECT);
6301 else
6302 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6303 OPTAB_DIRECT);
6304 if (result != target)
6305 emit_move_insn (target, result);
6308 /* Expand an atomic clear operation.
6309 void _atomic_clear (BOOL *obj, enum memmodel)
6310 EXP is the call expression. */
6312 static rtx
6313 expand_builtin_atomic_clear (tree exp)
6315 machine_mode mode;
6316 rtx mem, ret;
6317 enum memmodel model;
6319 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6320 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6321 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6323 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6325 source_location loc
6326 = expansion_point_location_if_in_system_header (input_location);
6327 warning_at (loc, OPT_Winvalid_memory_model,
6328 "invalid memory model for %<__atomic_store%>");
6329 model = MEMMODEL_SEQ_CST;
6332 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6333 Failing that, a store is issued by __atomic_store. The only way this can
6334 fail is if the bool type is larger than a word size. Unlikely, but
6335 handle it anyway for completeness. Assume a single threaded model since
6336 there is no atomic support in this case, and no barriers are required. */
6337 ret = expand_atomic_store (mem, const0_rtx, model, true);
6338 if (!ret)
6339 emit_move_insn (mem, const0_rtx);
6340 return const0_rtx;
6343 /* Expand an atomic test_and_set operation.
6344 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6345 EXP is the call expression. */
6347 static rtx
6348 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6350 rtx mem;
6351 enum memmodel model;
6352 machine_mode mode;
6354 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6355 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6356 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6358 return expand_atomic_test_and_set (target, mem, model);
6362 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6363 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6365 static tree
6366 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6368 int size;
6369 machine_mode mode;
6370 unsigned int mode_align, type_align;
6372 if (TREE_CODE (arg0) != INTEGER_CST)
6373 return NULL_TREE;
6375 /* We need a corresponding integer mode for the access to be lock-free. */
6376 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6377 if (!int_mode_for_size (size, 0).exists (&mode))
6378 return boolean_false_node;
6380 mode_align = GET_MODE_ALIGNMENT (mode);
6382 if (TREE_CODE (arg1) == INTEGER_CST)
6384 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6386 /* Either this argument is null, or it's a fake pointer encoding
6387 the alignment of the object. */
6388 val = least_bit_hwi (val);
6389 val *= BITS_PER_UNIT;
6391 if (val == 0 || mode_align < val)
6392 type_align = mode_align;
6393 else
6394 type_align = val;
6396 else
6398 tree ttype = TREE_TYPE (arg1);
6400 /* This function is usually invoked and folded immediately by the front
6401 end before anything else has a chance to look at it. The pointer
6402 parameter at this point is usually cast to a void *, so check for that
6403 and look past the cast. */
6404 if (CONVERT_EXPR_P (arg1)
6405 && POINTER_TYPE_P (ttype)
6406 && VOID_TYPE_P (TREE_TYPE (ttype))
6407 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6408 arg1 = TREE_OPERAND (arg1, 0);
6410 ttype = TREE_TYPE (arg1);
6411 gcc_assert (POINTER_TYPE_P (ttype));
6413 /* Get the underlying type of the object. */
6414 ttype = TREE_TYPE (ttype);
6415 type_align = TYPE_ALIGN (ttype);
6418 /* If the object has smaller alignment, the lock free routines cannot
6419 be used. */
6420 if (type_align < mode_align)
6421 return boolean_false_node;
6423 /* Check if a compare_and_swap pattern exists for the mode which represents
6424 the required size. The pattern is not allowed to fail, so the existence
6425 of the pattern indicates support is present. Also require that an
6426 atomic load exists for the required size. */
6427 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6428 return boolean_true_node;
6429 else
6430 return boolean_false_node;
6433 /* Return true if the parameters to call EXP represent an object which will
6434 always generate lock free instructions. The first argument represents the
6435 size of the object, and the second parameter is a pointer to the object
6436 itself. If NULL is passed for the object, then the result is based on
6437 typical alignment for an object of the specified size. Otherwise return
6438 false. */
6440 static rtx
6441 expand_builtin_atomic_always_lock_free (tree exp)
6443 tree size;
6444 tree arg0 = CALL_EXPR_ARG (exp, 0);
6445 tree arg1 = CALL_EXPR_ARG (exp, 1);
6447 if (TREE_CODE (arg0) != INTEGER_CST)
6449 error ("non-constant argument 1 to __atomic_always_lock_free");
6450 return const0_rtx;
6453 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6454 if (size == boolean_true_node)
6455 return const1_rtx;
6456 return const0_rtx;
6459 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6460 is lock free on this architecture. */
6462 static tree
6463 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6465 if (!flag_inline_atomics)
6466 return NULL_TREE;
6468 /* If it isn't always lock free, don't generate a result. */
6469 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6470 return boolean_true_node;
6472 return NULL_TREE;
6475 /* Return true if the parameters to call EXP represent an object which will
6476 always generate lock free instructions. The first argument represents the
6477 size of the object, and the second parameter is a pointer to the object
6478 itself. If NULL is passed for the object, then the result is based on
6479 typical alignment for an object of the specified size. Otherwise return
6480 NULL*/
6482 static rtx
6483 expand_builtin_atomic_is_lock_free (tree exp)
6485 tree size;
6486 tree arg0 = CALL_EXPR_ARG (exp, 0);
6487 tree arg1 = CALL_EXPR_ARG (exp, 1);
6489 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6491 error ("non-integer argument 1 to __atomic_is_lock_free");
6492 return NULL_RTX;
6495 if (!flag_inline_atomics)
6496 return NULL_RTX;
6498 /* If the value is known at compile time, return the RTX for it. */
6499 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6500 if (size == boolean_true_node)
6501 return const1_rtx;
6503 return NULL_RTX;
6506 /* Expand the __atomic_thread_fence intrinsic:
6507 void __atomic_thread_fence (enum memmodel)
6508 EXP is the CALL_EXPR. */
6510 static void
6511 expand_builtin_atomic_thread_fence (tree exp)
6513 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6514 expand_mem_thread_fence (model);
6517 /* Expand the __atomic_signal_fence intrinsic:
6518 void __atomic_signal_fence (enum memmodel)
6519 EXP is the CALL_EXPR. */
6521 static void
6522 expand_builtin_atomic_signal_fence (tree exp)
6524 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6525 expand_mem_signal_fence (model);
6528 /* Expand the __sync_synchronize intrinsic. */
6530 static void
6531 expand_builtin_sync_synchronize (void)
6533 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6536 static rtx
6537 expand_builtin_thread_pointer (tree exp, rtx target)
6539 enum insn_code icode;
6540 if (!validate_arglist (exp, VOID_TYPE))
6541 return const0_rtx;
6542 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6543 if (icode != CODE_FOR_nothing)
6545 struct expand_operand op;
6546 /* If the target is not sutitable then create a new target. */
6547 if (target == NULL_RTX
6548 || !REG_P (target)
6549 || GET_MODE (target) != Pmode)
6550 target = gen_reg_rtx (Pmode);
6551 create_output_operand (&op, target, Pmode);
6552 expand_insn (icode, 1, &op);
6553 return target;
6555 error ("__builtin_thread_pointer is not supported on this target");
6556 return const0_rtx;
6559 static void
6560 expand_builtin_set_thread_pointer (tree exp)
6562 enum insn_code icode;
6563 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6564 return;
6565 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6566 if (icode != CODE_FOR_nothing)
6568 struct expand_operand op;
6569 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6570 Pmode, EXPAND_NORMAL);
6571 create_input_operand (&op, val, Pmode);
6572 expand_insn (icode, 1, &op);
6573 return;
6575 error ("__builtin_set_thread_pointer is not supported on this target");
6579 /* Emit code to restore the current value of stack. */
6581 static void
6582 expand_stack_restore (tree var)
6584 rtx_insn *prev;
6585 rtx sa = expand_normal (var);
6587 sa = convert_memory_address (Pmode, sa);
6589 prev = get_last_insn ();
6590 emit_stack_restore (SAVE_BLOCK, sa);
6592 record_new_stack_level ();
6594 fixup_args_size_notes (prev, get_last_insn (), 0);
6597 /* Emit code to save the current value of stack. */
6599 static rtx
6600 expand_stack_save (void)
6602 rtx ret = NULL_RTX;
6604 emit_stack_save (SAVE_BLOCK, &ret);
6605 return ret;
6609 /* Expand an expression EXP that calls a built-in function,
6610 with result going to TARGET if that's convenient
6611 (and in mode MODE if that's convenient).
6612 SUBTARGET may be used as the target for computing one of EXP's operands.
6613 IGNORE is nonzero if the value is to be ignored. */
6616 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
6617 int ignore)
6619 tree fndecl = get_callee_fndecl (exp);
6620 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6621 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6622 int flags;
6624 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6625 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6627 /* When ASan is enabled, we don't want to expand some memory/string
6628 builtins and rely on libsanitizer's hooks. This allows us to avoid
6629 redundant checks and be sure, that possible overflow will be detected
6630 by ASan. */
6632 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
6633 return expand_call (exp, target, ignore);
6635 /* When not optimizing, generate calls to library functions for a certain
6636 set of builtins. */
6637 if (!optimize
6638 && !called_as_built_in (fndecl)
6639 && fcode != BUILT_IN_FORK
6640 && fcode != BUILT_IN_EXECL
6641 && fcode != BUILT_IN_EXECV
6642 && fcode != BUILT_IN_EXECLP
6643 && fcode != BUILT_IN_EXECLE
6644 && fcode != BUILT_IN_EXECVP
6645 && fcode != BUILT_IN_EXECVE
6646 && !ALLOCA_FUNCTION_CODE_P (fcode)
6647 && fcode != BUILT_IN_FREE
6648 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
6649 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
6650 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
6651 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
6652 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6653 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
6654 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
6655 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
6656 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
6657 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
6658 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
6659 && fcode != BUILT_IN_CHKP_BNDRET)
6660 return expand_call (exp, target, ignore);
6662 /* The built-in function expanders test for target == const0_rtx
6663 to determine whether the function's result will be ignored. */
6664 if (ignore)
6665 target = const0_rtx;
6667 /* If the result of a pure or const built-in function is ignored, and
6668 none of its arguments are volatile, we can avoid expanding the
6669 built-in call and just evaluate the arguments for side-effects. */
6670 if (target == const0_rtx
6671 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6672 && !(flags & ECF_LOOPING_CONST_OR_PURE))
6674 bool volatilep = false;
6675 tree arg;
6676 call_expr_arg_iterator iter;
6678 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6679 if (TREE_THIS_VOLATILE (arg))
6681 volatilep = true;
6682 break;
6685 if (! volatilep)
6687 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6688 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6689 return const0_rtx;
6693 /* expand_builtin_with_bounds is supposed to be used for
6694 instrumented builtin calls. */
6695 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
6697 switch (fcode)
6699 CASE_FLT_FN (BUILT_IN_FABS):
6700 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
6701 case BUILT_IN_FABSD32:
6702 case BUILT_IN_FABSD64:
6703 case BUILT_IN_FABSD128:
6704 target = expand_builtin_fabs (exp, target, subtarget);
6705 if (target)
6706 return target;
6707 break;
6709 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6710 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
6711 target = expand_builtin_copysign (exp, target, subtarget);
6712 if (target)
6713 return target;
6714 break;
6716 /* Just do a normal library call if we were unable to fold
6717 the values. */
6718 CASE_FLT_FN (BUILT_IN_CABS):
6719 break;
6721 CASE_FLT_FN (BUILT_IN_FMA):
6722 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
6723 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6724 if (target)
6725 return target;
6726 break;
6728 CASE_FLT_FN (BUILT_IN_ILOGB):
6729 if (! flag_unsafe_math_optimizations)
6730 break;
6731 gcc_fallthrough ();
6732 CASE_FLT_FN (BUILT_IN_ISINF):
6733 CASE_FLT_FN (BUILT_IN_FINITE):
6734 case BUILT_IN_ISFINITE:
6735 case BUILT_IN_ISNORMAL:
6736 target = expand_builtin_interclass_mathfn (exp, target);
6737 if (target)
6738 return target;
6739 break;
6741 CASE_FLT_FN (BUILT_IN_ICEIL):
6742 CASE_FLT_FN (BUILT_IN_LCEIL):
6743 CASE_FLT_FN (BUILT_IN_LLCEIL):
6744 CASE_FLT_FN (BUILT_IN_LFLOOR):
6745 CASE_FLT_FN (BUILT_IN_IFLOOR):
6746 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6747 target = expand_builtin_int_roundingfn (exp, target);
6748 if (target)
6749 return target;
6750 break;
6752 CASE_FLT_FN (BUILT_IN_IRINT):
6753 CASE_FLT_FN (BUILT_IN_LRINT):
6754 CASE_FLT_FN (BUILT_IN_LLRINT):
6755 CASE_FLT_FN (BUILT_IN_IROUND):
6756 CASE_FLT_FN (BUILT_IN_LROUND):
6757 CASE_FLT_FN (BUILT_IN_LLROUND):
6758 target = expand_builtin_int_roundingfn_2 (exp, target);
6759 if (target)
6760 return target;
6761 break;
6763 CASE_FLT_FN (BUILT_IN_POWI):
6764 target = expand_builtin_powi (exp, target);
6765 if (target)
6766 return target;
6767 break;
6769 CASE_FLT_FN (BUILT_IN_CEXPI):
6770 target = expand_builtin_cexpi (exp, target);
6771 gcc_assert (target);
6772 return target;
6774 CASE_FLT_FN (BUILT_IN_SIN):
6775 CASE_FLT_FN (BUILT_IN_COS):
6776 if (! flag_unsafe_math_optimizations)
6777 break;
6778 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6779 if (target)
6780 return target;
6781 break;
6783 CASE_FLT_FN (BUILT_IN_SINCOS):
6784 if (! flag_unsafe_math_optimizations)
6785 break;
6786 target = expand_builtin_sincos (exp);
6787 if (target)
6788 return target;
6789 break;
6791 case BUILT_IN_APPLY_ARGS:
6792 return expand_builtin_apply_args ();
6794 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6795 FUNCTION with a copy of the parameters described by
6796 ARGUMENTS, and ARGSIZE. It returns a block of memory
6797 allocated on the stack into which is stored all the registers
6798 that might possibly be used for returning the result of a
6799 function. ARGUMENTS is the value returned by
6800 __builtin_apply_args. ARGSIZE is the number of bytes of
6801 arguments that must be copied. ??? How should this value be
6802 computed? We'll also need a safe worst case value for varargs
6803 functions. */
6804 case BUILT_IN_APPLY:
6805 if (!validate_arglist (exp, POINTER_TYPE,
6806 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6807 && !validate_arglist (exp, REFERENCE_TYPE,
6808 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6809 return const0_rtx;
6810 else
6812 rtx ops[3];
6814 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6815 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6816 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6818 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6821 /* __builtin_return (RESULT) causes the function to return the
6822 value described by RESULT. RESULT is address of the block of
6823 memory returned by __builtin_apply. */
6824 case BUILT_IN_RETURN:
6825 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6826 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6827 return const0_rtx;
6829 case BUILT_IN_SAVEREGS:
6830 return expand_builtin_saveregs ();
6832 case BUILT_IN_VA_ARG_PACK:
6833 /* All valid uses of __builtin_va_arg_pack () are removed during
6834 inlining. */
6835 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6836 return const0_rtx;
6838 case BUILT_IN_VA_ARG_PACK_LEN:
6839 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6840 inlining. */
6841 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6842 return const0_rtx;
6844 /* Return the address of the first anonymous stack arg. */
6845 case BUILT_IN_NEXT_ARG:
6846 if (fold_builtin_next_arg (exp, false))
6847 return const0_rtx;
6848 return expand_builtin_next_arg ();
6850 case BUILT_IN_CLEAR_CACHE:
6851 target = expand_builtin___clear_cache (exp);
6852 if (target)
6853 return target;
6854 break;
6856 case BUILT_IN_CLASSIFY_TYPE:
6857 return expand_builtin_classify_type (exp);
6859 case BUILT_IN_CONSTANT_P:
6860 return const0_rtx;
6862 case BUILT_IN_FRAME_ADDRESS:
6863 case BUILT_IN_RETURN_ADDRESS:
6864 return expand_builtin_frame_address (fndecl, exp);
6866 /* Returns the address of the area where the structure is returned.
6867 0 otherwise. */
6868 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6869 if (call_expr_nargs (exp) != 0
6870 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6871 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6872 return const0_rtx;
6873 else
6874 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6876 CASE_BUILT_IN_ALLOCA:
6877 target = expand_builtin_alloca (exp);
6878 if (target)
6879 return target;
6880 break;
6882 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
6883 return expand_asan_emit_allocas_unpoison (exp);
6885 case BUILT_IN_STACK_SAVE:
6886 return expand_stack_save ();
6888 case BUILT_IN_STACK_RESTORE:
6889 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6890 return const0_rtx;
6892 case BUILT_IN_BSWAP16:
6893 case BUILT_IN_BSWAP32:
6894 case BUILT_IN_BSWAP64:
6895 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6896 if (target)
6897 return target;
6898 break;
6900 CASE_INT_FN (BUILT_IN_FFS):
6901 target = expand_builtin_unop (target_mode, exp, target,
6902 subtarget, ffs_optab);
6903 if (target)
6904 return target;
6905 break;
6907 CASE_INT_FN (BUILT_IN_CLZ):
6908 target = expand_builtin_unop (target_mode, exp, target,
6909 subtarget, clz_optab);
6910 if (target)
6911 return target;
6912 break;
6914 CASE_INT_FN (BUILT_IN_CTZ):
6915 target = expand_builtin_unop (target_mode, exp, target,
6916 subtarget, ctz_optab);
6917 if (target)
6918 return target;
6919 break;
6921 CASE_INT_FN (BUILT_IN_CLRSB):
6922 target = expand_builtin_unop (target_mode, exp, target,
6923 subtarget, clrsb_optab);
6924 if (target)
6925 return target;
6926 break;
6928 CASE_INT_FN (BUILT_IN_POPCOUNT):
6929 target = expand_builtin_unop (target_mode, exp, target,
6930 subtarget, popcount_optab);
6931 if (target)
6932 return target;
6933 break;
6935 CASE_INT_FN (BUILT_IN_PARITY):
6936 target = expand_builtin_unop (target_mode, exp, target,
6937 subtarget, parity_optab);
6938 if (target)
6939 return target;
6940 break;
6942 case BUILT_IN_STRLEN:
6943 target = expand_builtin_strlen (exp, target, target_mode);
6944 if (target)
6945 return target;
6946 break;
6948 case BUILT_IN_STRCAT:
6949 target = expand_builtin_strcat (exp, target);
6950 if (target)
6951 return target;
6952 break;
6954 case BUILT_IN_STRCPY:
6955 target = expand_builtin_strcpy (exp, target);
6956 if (target)
6957 return target;
6958 break;
6960 case BUILT_IN_STRNCAT:
6961 target = expand_builtin_strncat (exp, target);
6962 if (target)
6963 return target;
6964 break;
6966 case BUILT_IN_STRNCPY:
6967 target = expand_builtin_strncpy (exp, target);
6968 if (target)
6969 return target;
6970 break;
6972 case BUILT_IN_STPCPY:
6973 target = expand_builtin_stpcpy (exp, target, mode);
6974 if (target)
6975 return target;
6976 break;
6978 case BUILT_IN_STPNCPY:
6979 target = expand_builtin_stpncpy (exp, target);
6980 if (target)
6981 return target;
6982 break;
6984 case BUILT_IN_MEMCHR:
6985 target = expand_builtin_memchr (exp, target);
6986 if (target)
6987 return target;
6988 break;
6990 case BUILT_IN_MEMCPY:
6991 target = expand_builtin_memcpy (exp, target);
6992 if (target)
6993 return target;
6994 break;
6996 case BUILT_IN_MEMMOVE:
6997 target = expand_builtin_memmove (exp, target);
6998 if (target)
6999 return target;
7000 break;
7002 case BUILT_IN_MEMPCPY:
7003 target = expand_builtin_mempcpy (exp, target);
7004 if (target)
7005 return target;
7006 break;
7008 case BUILT_IN_MEMSET:
7009 target = expand_builtin_memset (exp, target, mode);
7010 if (target)
7011 return target;
7012 break;
7014 case BUILT_IN_BZERO:
7015 target = expand_builtin_bzero (exp);
7016 if (target)
7017 return target;
7018 break;
7020 case BUILT_IN_STRCMP:
7021 target = expand_builtin_strcmp (exp, target);
7022 if (target)
7023 return target;
7024 break;
7026 case BUILT_IN_STRNCMP:
7027 target = expand_builtin_strncmp (exp, target, mode);
7028 if (target)
7029 return target;
7030 break;
7032 case BUILT_IN_BCMP:
7033 case BUILT_IN_MEMCMP:
7034 case BUILT_IN_MEMCMP_EQ:
7035 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7036 if (target)
7037 return target;
7038 if (fcode == BUILT_IN_MEMCMP_EQ)
7040 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7041 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7043 break;
7045 case BUILT_IN_SETJMP:
7046 /* This should have been lowered to the builtins below. */
7047 gcc_unreachable ();
7049 case BUILT_IN_SETJMP_SETUP:
7050 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7051 and the receiver label. */
7052 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7054 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7055 VOIDmode, EXPAND_NORMAL);
7056 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7057 rtx_insn *label_r = label_rtx (label);
7059 /* This is copied from the handling of non-local gotos. */
7060 expand_builtin_setjmp_setup (buf_addr, label_r);
7061 nonlocal_goto_handler_labels
7062 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7063 nonlocal_goto_handler_labels);
7064 /* ??? Do not let expand_label treat us as such since we would
7065 not want to be both on the list of non-local labels and on
7066 the list of forced labels. */
7067 FORCED_LABEL (label) = 0;
7068 return const0_rtx;
7070 break;
7072 case BUILT_IN_SETJMP_RECEIVER:
7073 /* __builtin_setjmp_receiver is passed the receiver label. */
7074 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7076 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7077 rtx_insn *label_r = label_rtx (label);
7079 expand_builtin_setjmp_receiver (label_r);
7080 return const0_rtx;
7082 break;
7084 /* __builtin_longjmp is passed a pointer to an array of five words.
7085 It's similar to the C library longjmp function but works with
7086 __builtin_setjmp above. */
7087 case BUILT_IN_LONGJMP:
7088 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7090 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7091 VOIDmode, EXPAND_NORMAL);
7092 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7094 if (value != const1_rtx)
7096 error ("%<__builtin_longjmp%> second argument must be 1");
7097 return const0_rtx;
7100 expand_builtin_longjmp (buf_addr, value);
7101 return const0_rtx;
7103 break;
7105 case BUILT_IN_NONLOCAL_GOTO:
7106 target = expand_builtin_nonlocal_goto (exp);
7107 if (target)
7108 return target;
7109 break;
7111 /* This updates the setjmp buffer that is its argument with the value
7112 of the current stack pointer. */
7113 case BUILT_IN_UPDATE_SETJMP_BUF:
7114 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7116 rtx buf_addr
7117 = expand_normal (CALL_EXPR_ARG (exp, 0));
7119 expand_builtin_update_setjmp_buf (buf_addr);
7120 return const0_rtx;
7122 break;
7124 case BUILT_IN_TRAP:
7125 expand_builtin_trap ();
7126 return const0_rtx;
7128 case BUILT_IN_UNREACHABLE:
7129 expand_builtin_unreachable ();
7130 return const0_rtx;
7132 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7133 case BUILT_IN_SIGNBITD32:
7134 case BUILT_IN_SIGNBITD64:
7135 case BUILT_IN_SIGNBITD128:
7136 target = expand_builtin_signbit (exp, target);
7137 if (target)
7138 return target;
7139 break;
7141 /* Various hooks for the DWARF 2 __throw routine. */
7142 case BUILT_IN_UNWIND_INIT:
7143 expand_builtin_unwind_init ();
7144 return const0_rtx;
7145 case BUILT_IN_DWARF_CFA:
7146 return virtual_cfa_rtx;
7147 #ifdef DWARF2_UNWIND_INFO
7148 case BUILT_IN_DWARF_SP_COLUMN:
7149 return expand_builtin_dwarf_sp_column ();
7150 case BUILT_IN_INIT_DWARF_REG_SIZES:
7151 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7152 return const0_rtx;
7153 #endif
7154 case BUILT_IN_FROB_RETURN_ADDR:
7155 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7156 case BUILT_IN_EXTRACT_RETURN_ADDR:
7157 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7158 case BUILT_IN_EH_RETURN:
7159 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7160 CALL_EXPR_ARG (exp, 1));
7161 return const0_rtx;
7162 case BUILT_IN_EH_RETURN_DATA_REGNO:
7163 return expand_builtin_eh_return_data_regno (exp);
7164 case BUILT_IN_EXTEND_POINTER:
7165 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7166 case BUILT_IN_EH_POINTER:
7167 return expand_builtin_eh_pointer (exp);
7168 case BUILT_IN_EH_FILTER:
7169 return expand_builtin_eh_filter (exp);
7170 case BUILT_IN_EH_COPY_VALUES:
7171 return expand_builtin_eh_copy_values (exp);
7173 case BUILT_IN_VA_START:
7174 return expand_builtin_va_start (exp);
7175 case BUILT_IN_VA_END:
7176 return expand_builtin_va_end (exp);
7177 case BUILT_IN_VA_COPY:
7178 return expand_builtin_va_copy (exp);
7179 case BUILT_IN_EXPECT:
7180 return expand_builtin_expect (exp, target);
7181 case BUILT_IN_ASSUME_ALIGNED:
7182 return expand_builtin_assume_aligned (exp, target);
7183 case BUILT_IN_PREFETCH:
7184 expand_builtin_prefetch (exp);
7185 return const0_rtx;
7187 case BUILT_IN_INIT_TRAMPOLINE:
7188 return expand_builtin_init_trampoline (exp, true);
7189 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7190 return expand_builtin_init_trampoline (exp, false);
7191 case BUILT_IN_ADJUST_TRAMPOLINE:
7192 return expand_builtin_adjust_trampoline (exp);
7194 case BUILT_IN_INIT_DESCRIPTOR:
7195 return expand_builtin_init_descriptor (exp);
7196 case BUILT_IN_ADJUST_DESCRIPTOR:
7197 return expand_builtin_adjust_descriptor (exp);
7199 case BUILT_IN_FORK:
7200 case BUILT_IN_EXECL:
7201 case BUILT_IN_EXECV:
7202 case BUILT_IN_EXECLP:
7203 case BUILT_IN_EXECLE:
7204 case BUILT_IN_EXECVP:
7205 case BUILT_IN_EXECVE:
7206 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7207 if (target)
7208 return target;
7209 break;
7211 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7212 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7213 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7214 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7215 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7216 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7217 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7218 if (target)
7219 return target;
7220 break;
7222 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7223 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7224 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7225 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7226 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7227 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7228 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7229 if (target)
7230 return target;
7231 break;
7233 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7234 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7235 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7236 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7237 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7238 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7239 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7240 if (target)
7241 return target;
7242 break;
7244 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7245 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7246 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7247 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7248 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7249 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7250 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7251 if (target)
7252 return target;
7253 break;
7255 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7256 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7257 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7258 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7259 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7260 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7261 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7262 if (target)
7263 return target;
7264 break;
7266 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7267 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7268 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7269 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7270 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7271 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7272 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7273 if (target)
7274 return target;
7275 break;
7277 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7278 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7279 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7280 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7281 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7282 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7283 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7284 if (target)
7285 return target;
7286 break;
7288 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7289 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7290 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7291 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7292 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7293 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7294 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7295 if (target)
7296 return target;
7297 break;
7299 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7300 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7301 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7302 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7303 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7304 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7305 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7306 if (target)
7307 return target;
7308 break;
7310 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7311 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7312 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7313 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7314 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7315 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7316 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7317 if (target)
7318 return target;
7319 break;
7321 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7322 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7323 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7324 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7325 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7326 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7327 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7328 if (target)
7329 return target;
7330 break;
7332 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7333 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7334 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7335 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7336 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7337 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7338 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7339 if (target)
7340 return target;
7341 break;
7343 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7344 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7345 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7346 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7347 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7348 if (mode == VOIDmode)
7349 mode = TYPE_MODE (boolean_type_node);
7350 if (!target || !register_operand (target, mode))
7351 target = gen_reg_rtx (mode);
7353 mode = get_builtin_sync_mode
7354 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7355 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7356 if (target)
7357 return target;
7358 break;
7360 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7361 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7362 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7363 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7364 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7365 mode = get_builtin_sync_mode
7366 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7367 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7368 if (target)
7369 return target;
7370 break;
7372 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7373 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7374 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7375 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7376 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7377 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7378 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7379 if (target)
7380 return target;
7381 break;
7383 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7384 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7385 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7386 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7387 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7388 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7389 expand_builtin_sync_lock_release (mode, exp);
7390 return const0_rtx;
7392 case BUILT_IN_SYNC_SYNCHRONIZE:
7393 expand_builtin_sync_synchronize ();
7394 return const0_rtx;
7396 case BUILT_IN_ATOMIC_EXCHANGE_1:
7397 case BUILT_IN_ATOMIC_EXCHANGE_2:
7398 case BUILT_IN_ATOMIC_EXCHANGE_4:
7399 case BUILT_IN_ATOMIC_EXCHANGE_8:
7400 case BUILT_IN_ATOMIC_EXCHANGE_16:
7401 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7402 target = expand_builtin_atomic_exchange (mode, exp, target);
7403 if (target)
7404 return target;
7405 break;
7407 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7408 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7409 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7410 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7411 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7413 unsigned int nargs, z;
7414 vec<tree, va_gc> *vec;
7416 mode =
7417 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7418 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7419 if (target)
7420 return target;
7422 /* If this is turned into an external library call, the weak parameter
7423 must be dropped to match the expected parameter list. */
7424 nargs = call_expr_nargs (exp);
7425 vec_alloc (vec, nargs - 1);
7426 for (z = 0; z < 3; z++)
7427 vec->quick_push (CALL_EXPR_ARG (exp, z));
7428 /* Skip the boolean weak parameter. */
7429 for (z = 4; z < 6; z++)
7430 vec->quick_push (CALL_EXPR_ARG (exp, z));
7431 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7432 break;
7435 case BUILT_IN_ATOMIC_LOAD_1:
7436 case BUILT_IN_ATOMIC_LOAD_2:
7437 case BUILT_IN_ATOMIC_LOAD_4:
7438 case BUILT_IN_ATOMIC_LOAD_8:
7439 case BUILT_IN_ATOMIC_LOAD_16:
7440 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7441 target = expand_builtin_atomic_load (mode, exp, target);
7442 if (target)
7443 return target;
7444 break;
7446 case BUILT_IN_ATOMIC_STORE_1:
7447 case BUILT_IN_ATOMIC_STORE_2:
7448 case BUILT_IN_ATOMIC_STORE_4:
7449 case BUILT_IN_ATOMIC_STORE_8:
7450 case BUILT_IN_ATOMIC_STORE_16:
7451 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7452 target = expand_builtin_atomic_store (mode, exp);
7453 if (target)
7454 return const0_rtx;
7455 break;
7457 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7458 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7459 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7460 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7461 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7463 enum built_in_function lib;
7464 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7465 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7466 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7467 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7468 ignore, lib);
7469 if (target)
7470 return target;
7471 break;
7473 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7474 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7475 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7476 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7477 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7479 enum built_in_function lib;
7480 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7481 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7482 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7483 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7484 ignore, lib);
7485 if (target)
7486 return target;
7487 break;
7489 case BUILT_IN_ATOMIC_AND_FETCH_1:
7490 case BUILT_IN_ATOMIC_AND_FETCH_2:
7491 case BUILT_IN_ATOMIC_AND_FETCH_4:
7492 case BUILT_IN_ATOMIC_AND_FETCH_8:
7493 case BUILT_IN_ATOMIC_AND_FETCH_16:
7495 enum built_in_function lib;
7496 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7497 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7498 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7499 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7500 ignore, lib);
7501 if (target)
7502 return target;
7503 break;
7505 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7506 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7507 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7508 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7509 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7511 enum built_in_function lib;
7512 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7513 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7514 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7515 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7516 ignore, lib);
7517 if (target)
7518 return target;
7519 break;
7521 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7522 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7523 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7524 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7525 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7527 enum built_in_function lib;
7528 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7529 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7530 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7531 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7532 ignore, lib);
7533 if (target)
7534 return target;
7535 break;
7537 case BUILT_IN_ATOMIC_OR_FETCH_1:
7538 case BUILT_IN_ATOMIC_OR_FETCH_2:
7539 case BUILT_IN_ATOMIC_OR_FETCH_4:
7540 case BUILT_IN_ATOMIC_OR_FETCH_8:
7541 case BUILT_IN_ATOMIC_OR_FETCH_16:
7543 enum built_in_function lib;
7544 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7545 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7546 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7547 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7548 ignore, lib);
7549 if (target)
7550 return target;
7551 break;
7553 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7554 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7555 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7556 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7557 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7558 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7559 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7560 ignore, BUILT_IN_NONE);
7561 if (target)
7562 return target;
7563 break;
7565 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7566 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7567 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7568 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7569 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7570 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7571 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7572 ignore, BUILT_IN_NONE);
7573 if (target)
7574 return target;
7575 break;
7577 case BUILT_IN_ATOMIC_FETCH_AND_1:
7578 case BUILT_IN_ATOMIC_FETCH_AND_2:
7579 case BUILT_IN_ATOMIC_FETCH_AND_4:
7580 case BUILT_IN_ATOMIC_FETCH_AND_8:
7581 case BUILT_IN_ATOMIC_FETCH_AND_16:
7582 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7583 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7584 ignore, BUILT_IN_NONE);
7585 if (target)
7586 return target;
7587 break;
7589 case BUILT_IN_ATOMIC_FETCH_NAND_1:
7590 case BUILT_IN_ATOMIC_FETCH_NAND_2:
7591 case BUILT_IN_ATOMIC_FETCH_NAND_4:
7592 case BUILT_IN_ATOMIC_FETCH_NAND_8:
7593 case BUILT_IN_ATOMIC_FETCH_NAND_16:
7594 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
7595 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
7596 ignore, BUILT_IN_NONE);
7597 if (target)
7598 return target;
7599 break;
7601 case BUILT_IN_ATOMIC_FETCH_XOR_1:
7602 case BUILT_IN_ATOMIC_FETCH_XOR_2:
7603 case BUILT_IN_ATOMIC_FETCH_XOR_4:
7604 case BUILT_IN_ATOMIC_FETCH_XOR_8:
7605 case BUILT_IN_ATOMIC_FETCH_XOR_16:
7606 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
7607 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
7608 ignore, BUILT_IN_NONE);
7609 if (target)
7610 return target;
7611 break;
7613 case BUILT_IN_ATOMIC_FETCH_OR_1:
7614 case BUILT_IN_ATOMIC_FETCH_OR_2:
7615 case BUILT_IN_ATOMIC_FETCH_OR_4:
7616 case BUILT_IN_ATOMIC_FETCH_OR_8:
7617 case BUILT_IN_ATOMIC_FETCH_OR_16:
7618 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
7619 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
7620 ignore, BUILT_IN_NONE);
7621 if (target)
7622 return target;
7623 break;
7625 case BUILT_IN_ATOMIC_TEST_AND_SET:
7626 return expand_builtin_atomic_test_and_set (exp, target);
7628 case BUILT_IN_ATOMIC_CLEAR:
7629 return expand_builtin_atomic_clear (exp);
7631 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
7632 return expand_builtin_atomic_always_lock_free (exp);
7634 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
7635 target = expand_builtin_atomic_is_lock_free (exp);
7636 if (target)
7637 return target;
7638 break;
7640 case BUILT_IN_ATOMIC_THREAD_FENCE:
7641 expand_builtin_atomic_thread_fence (exp);
7642 return const0_rtx;
7644 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
7645 expand_builtin_atomic_signal_fence (exp);
7646 return const0_rtx;
7648 case BUILT_IN_OBJECT_SIZE:
7649 return expand_builtin_object_size (exp);
7651 case BUILT_IN_MEMCPY_CHK:
7652 case BUILT_IN_MEMPCPY_CHK:
7653 case BUILT_IN_MEMMOVE_CHK:
7654 case BUILT_IN_MEMSET_CHK:
7655 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7656 if (target)
7657 return target;
7658 break;
7660 case BUILT_IN_STRCPY_CHK:
7661 case BUILT_IN_STPCPY_CHK:
7662 case BUILT_IN_STRNCPY_CHK:
7663 case BUILT_IN_STPNCPY_CHK:
7664 case BUILT_IN_STRCAT_CHK:
7665 case BUILT_IN_STRNCAT_CHK:
7666 case BUILT_IN_SNPRINTF_CHK:
7667 case BUILT_IN_VSNPRINTF_CHK:
7668 maybe_emit_chk_warning (exp, fcode);
7669 break;
7671 case BUILT_IN_SPRINTF_CHK:
7672 case BUILT_IN_VSPRINTF_CHK:
7673 maybe_emit_sprintf_chk_warning (exp, fcode);
7674 break;
7676 case BUILT_IN_FREE:
7677 if (warn_free_nonheap_object)
7678 maybe_emit_free_warning (exp);
7679 break;
7681 case BUILT_IN_THREAD_POINTER:
7682 return expand_builtin_thread_pointer (exp, target);
7684 case BUILT_IN_SET_THREAD_POINTER:
7685 expand_builtin_set_thread_pointer (exp);
7686 return const0_rtx;
7688 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
7689 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
7690 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
7691 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
7692 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7693 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7694 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7695 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7696 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7697 case BUILT_IN_CHKP_GET_PTR_LBOUND:
7698 case BUILT_IN_CHKP_GET_PTR_UBOUND:
7699 /* We allow user CHKP builtins if Pointer Bounds
7700 Checker is off. */
7701 if (!chkp_function_instrumented_p (current_function_decl))
7703 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7704 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7705 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7706 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7707 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7708 return expand_normal (CALL_EXPR_ARG (exp, 0));
7709 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7710 return expand_normal (size_zero_node);
7711 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7712 return expand_normal (size_int (-1));
7713 else
7714 return const0_rtx;
7716 /* FALLTHROUGH */
7718 case BUILT_IN_CHKP_BNDMK:
7719 case BUILT_IN_CHKP_BNDSTX:
7720 case BUILT_IN_CHKP_BNDCL:
7721 case BUILT_IN_CHKP_BNDCU:
7722 case BUILT_IN_CHKP_BNDLDX:
7723 case BUILT_IN_CHKP_BNDRET:
7724 case BUILT_IN_CHKP_INTERSECT:
7725 case BUILT_IN_CHKP_NARROW:
7726 case BUILT_IN_CHKP_EXTRACT_LOWER:
7727 case BUILT_IN_CHKP_EXTRACT_UPPER:
7728 /* Software implementation of Pointer Bounds Checker is NYI.
7729 Target support is required. */
7730 error ("Your target platform does not support -fcheck-pointer-bounds");
7731 break;
7733 case BUILT_IN_ACC_ON_DEVICE:
7734 /* Do library call, if we failed to expand the builtin when
7735 folding. */
7736 break;
7738 default: /* just do library call, if unknown builtin */
7739 break;
7742 /* The switch statement above can drop through to cause the function
7743 to be called normally. */
7744 return expand_call (exp, target, ignore);
7747 /* Similar to expand_builtin but is used for instrumented calls. */
7750 expand_builtin_with_bounds (tree exp, rtx target,
7751 rtx subtarget ATTRIBUTE_UNUSED,
7752 machine_mode mode, int ignore)
7754 tree fndecl = get_callee_fndecl (exp);
7755 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7757 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7759 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7760 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7762 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7763 && fcode < END_CHKP_BUILTINS);
7765 switch (fcode)
7767 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7768 target = expand_builtin_memcpy_with_bounds (exp, target);
7769 if (target)
7770 return target;
7771 break;
7773 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7774 target = expand_builtin_mempcpy_with_bounds (exp, target);
7775 if (target)
7776 return target;
7777 break;
7779 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7780 target = expand_builtin_memset_with_bounds (exp, target, mode);
7781 if (target)
7782 return target;
7783 break;
7785 case BUILT_IN_MEMCPY_CHKP:
7786 case BUILT_IN_MEMMOVE_CHKP:
7787 case BUILT_IN_MEMPCPY_CHKP:
7788 if (call_expr_nargs (exp) > 3)
7790 /* memcpy_chkp (void *dst, size_t dstbnd,
7791 const void *src, size_t srcbnd, size_t n)
7792 and others take a pointer bound argument just after each
7793 pointer argument. */
7794 tree dest = CALL_EXPR_ARG (exp, 0);
7795 tree src = CALL_EXPR_ARG (exp, 2);
7796 tree len = CALL_EXPR_ARG (exp, 4);
7798 check_memop_access (exp, dest, src, len);
7799 break;
7802 default:
7803 break;
7806 /* The switch statement above can drop through to cause the function
7807 to be called normally. */
7808 return expand_call (exp, target, ignore);
7811 /* Determine whether a tree node represents a call to a built-in
7812 function. If the tree T is a call to a built-in function with
7813 the right number of arguments of the appropriate types, return
7814 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7815 Otherwise the return value is END_BUILTINS. */
7817 enum built_in_function
7818 builtin_mathfn_code (const_tree t)
7820 const_tree fndecl, arg, parmlist;
7821 const_tree argtype, parmtype;
7822 const_call_expr_arg_iterator iter;
7824 if (TREE_CODE (t) != CALL_EXPR
7825 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7826 return END_BUILTINS;
7828 fndecl = get_callee_fndecl (t);
7829 if (fndecl == NULL_TREE
7830 || TREE_CODE (fndecl) != FUNCTION_DECL
7831 || ! DECL_BUILT_IN (fndecl)
7832 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7833 return END_BUILTINS;
7835 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7836 init_const_call_expr_arg_iterator (t, &iter);
7837 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7839 /* If a function doesn't take a variable number of arguments,
7840 the last element in the list will have type `void'. */
7841 parmtype = TREE_VALUE (parmlist);
7842 if (VOID_TYPE_P (parmtype))
7844 if (more_const_call_expr_args_p (&iter))
7845 return END_BUILTINS;
7846 return DECL_FUNCTION_CODE (fndecl);
7849 if (! more_const_call_expr_args_p (&iter))
7850 return END_BUILTINS;
7852 arg = next_const_call_expr_arg (&iter);
7853 argtype = TREE_TYPE (arg);
7855 if (SCALAR_FLOAT_TYPE_P (parmtype))
7857 if (! SCALAR_FLOAT_TYPE_P (argtype))
7858 return END_BUILTINS;
7860 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7862 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7863 return END_BUILTINS;
7865 else if (POINTER_TYPE_P (parmtype))
7867 if (! POINTER_TYPE_P (argtype))
7868 return END_BUILTINS;
7870 else if (INTEGRAL_TYPE_P (parmtype))
7872 if (! INTEGRAL_TYPE_P (argtype))
7873 return END_BUILTINS;
7875 else
7876 return END_BUILTINS;
7879 /* Variable-length argument list. */
7880 return DECL_FUNCTION_CODE (fndecl);
7883 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7884 evaluate to a constant. */
7886 static tree
7887 fold_builtin_constant_p (tree arg)
7889 /* We return 1 for a numeric type that's known to be a constant
7890 value at compile-time or for an aggregate type that's a
7891 literal constant. */
7892 STRIP_NOPS (arg);
7894 /* If we know this is a constant, emit the constant of one. */
7895 if (CONSTANT_CLASS_P (arg)
7896 || (TREE_CODE (arg) == CONSTRUCTOR
7897 && TREE_CONSTANT (arg)))
7898 return integer_one_node;
7899 if (TREE_CODE (arg) == ADDR_EXPR)
7901 tree op = TREE_OPERAND (arg, 0);
7902 if (TREE_CODE (op) == STRING_CST
7903 || (TREE_CODE (op) == ARRAY_REF
7904 && integer_zerop (TREE_OPERAND (op, 1))
7905 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7906 return integer_one_node;
7909 /* If this expression has side effects, show we don't know it to be a
7910 constant. Likewise if it's a pointer or aggregate type since in
7911 those case we only want literals, since those are only optimized
7912 when generating RTL, not later.
7913 And finally, if we are compiling an initializer, not code, we
7914 need to return a definite result now; there's not going to be any
7915 more optimization done. */
7916 if (TREE_SIDE_EFFECTS (arg)
7917 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7918 || POINTER_TYPE_P (TREE_TYPE (arg))
7919 || cfun == 0
7920 || folding_initializer
7921 || force_folding_builtin_constant_p)
7922 return integer_zero_node;
7924 return NULL_TREE;
7927 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7928 return it as a truthvalue. */
7930 static tree
7931 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7932 tree predictor)
7934 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7936 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7937 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7938 ret_type = TREE_TYPE (TREE_TYPE (fn));
7939 pred_type = TREE_VALUE (arg_types);
7940 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7942 pred = fold_convert_loc (loc, pred_type, pred);
7943 expected = fold_convert_loc (loc, expected_type, expected);
7944 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7945 predictor);
7947 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7948 build_int_cst (ret_type, 0));
7951 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7952 NULL_TREE if no simplification is possible. */
7954 tree
7955 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7957 tree inner, fndecl, inner_arg0;
7958 enum tree_code code;
7960 /* Distribute the expected value over short-circuiting operators.
7961 See through the cast from truthvalue_type_node to long. */
7962 inner_arg0 = arg0;
7963 while (CONVERT_EXPR_P (inner_arg0)
7964 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7965 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7966 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7968 /* If this is a builtin_expect within a builtin_expect keep the
7969 inner one. See through a comparison against a constant. It
7970 might have been added to create a thruthvalue. */
7971 inner = inner_arg0;
7973 if (COMPARISON_CLASS_P (inner)
7974 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7975 inner = TREE_OPERAND (inner, 0);
7977 if (TREE_CODE (inner) == CALL_EXPR
7978 && (fndecl = get_callee_fndecl (inner))
7979 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7980 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7981 return arg0;
7983 inner = inner_arg0;
7984 code = TREE_CODE (inner);
7985 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7987 tree op0 = TREE_OPERAND (inner, 0);
7988 tree op1 = TREE_OPERAND (inner, 1);
7990 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7991 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7992 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7994 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7997 /* If the argument isn't invariant then there's nothing else we can do. */
7998 if (!TREE_CONSTANT (inner_arg0))
7999 return NULL_TREE;
8001 /* If we expect that a comparison against the argument will fold to
8002 a constant return the constant. In practice, this means a true
8003 constant or the address of a non-weak symbol. */
8004 inner = inner_arg0;
8005 STRIP_NOPS (inner);
8006 if (TREE_CODE (inner) == ADDR_EXPR)
8010 inner = TREE_OPERAND (inner, 0);
8012 while (TREE_CODE (inner) == COMPONENT_REF
8013 || TREE_CODE (inner) == ARRAY_REF);
8014 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8015 return NULL_TREE;
8018 /* Otherwise, ARG0 already has the proper type for the return value. */
8019 return arg0;
8022 /* Fold a call to __builtin_classify_type with argument ARG. */
8024 static tree
8025 fold_builtin_classify_type (tree arg)
8027 if (arg == 0)
8028 return build_int_cst (integer_type_node, no_type_class);
8030 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8033 /* Fold a call to __builtin_strlen with argument ARG. */
8035 static tree
8036 fold_builtin_strlen (location_t loc, tree type, tree arg)
8038 if (!validate_arg (arg, POINTER_TYPE))
8039 return NULL_TREE;
8040 else
8042 tree len = c_strlen (arg, 0);
8044 if (len)
8045 return fold_convert_loc (loc, type, len);
8047 return NULL_TREE;
8051 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8053 static tree
8054 fold_builtin_inf (location_t loc, tree type, int warn)
8056 REAL_VALUE_TYPE real;
8058 /* __builtin_inff is intended to be usable to define INFINITY on all
8059 targets. If an infinity is not available, INFINITY expands "to a
8060 positive constant of type float that overflows at translation
8061 time", footnote "In this case, using INFINITY will violate the
8062 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8063 Thus we pedwarn to ensure this constraint violation is
8064 diagnosed. */
8065 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8066 pedwarn (loc, 0, "target format does not support infinity");
8068 real_inf (&real);
8069 return build_real (type, real);
8072 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8073 NULL_TREE if no simplification can be made. */
8075 static tree
8076 fold_builtin_sincos (location_t loc,
8077 tree arg0, tree arg1, tree arg2)
8079 tree type;
8080 tree fndecl, call = NULL_TREE;
8082 if (!validate_arg (arg0, REAL_TYPE)
8083 || !validate_arg (arg1, POINTER_TYPE)
8084 || !validate_arg (arg2, POINTER_TYPE))
8085 return NULL_TREE;
8087 type = TREE_TYPE (arg0);
8089 /* Calculate the result when the argument is a constant. */
8090 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8091 if (fn == END_BUILTINS)
8092 return NULL_TREE;
8094 /* Canonicalize sincos to cexpi. */
8095 if (TREE_CODE (arg0) == REAL_CST)
8097 tree complex_type = build_complex_type (type);
8098 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8100 if (!call)
8102 if (!targetm.libc_has_function (function_c99_math_complex)
8103 || !builtin_decl_implicit_p (fn))
8104 return NULL_TREE;
8105 fndecl = builtin_decl_explicit (fn);
8106 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8107 call = builtin_save_expr (call);
8110 return build2 (COMPOUND_EXPR, void_type_node,
8111 build2 (MODIFY_EXPR, void_type_node,
8112 build_fold_indirect_ref_loc (loc, arg1),
8113 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8114 build2 (MODIFY_EXPR, void_type_node,
8115 build_fold_indirect_ref_loc (loc, arg2),
8116 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8119 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8120 Return NULL_TREE if no simplification can be made. */
8122 static tree
8123 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8125 if (!validate_arg (arg1, POINTER_TYPE)
8126 || !validate_arg (arg2, POINTER_TYPE)
8127 || !validate_arg (len, INTEGER_TYPE))
8128 return NULL_TREE;
8130 /* If the LEN parameter is zero, return zero. */
8131 if (integer_zerop (len))
8132 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8133 arg1, arg2);
8135 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8136 if (operand_equal_p (arg1, arg2, 0))
8137 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8139 /* If len parameter is one, return an expression corresponding to
8140 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8141 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8143 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8144 tree cst_uchar_ptr_node
8145 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8147 tree ind1
8148 = fold_convert_loc (loc, integer_type_node,
8149 build1 (INDIRECT_REF, cst_uchar_node,
8150 fold_convert_loc (loc,
8151 cst_uchar_ptr_node,
8152 arg1)));
8153 tree ind2
8154 = fold_convert_loc (loc, integer_type_node,
8155 build1 (INDIRECT_REF, cst_uchar_node,
8156 fold_convert_loc (loc,
8157 cst_uchar_ptr_node,
8158 arg2)));
8159 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8162 return NULL_TREE;
8165 /* Fold a call to builtin isascii with argument ARG. */
8167 static tree
8168 fold_builtin_isascii (location_t loc, tree arg)
8170 if (!validate_arg (arg, INTEGER_TYPE))
8171 return NULL_TREE;
8172 else
8174 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8175 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8176 build_int_cst (integer_type_node,
8177 ~ (unsigned HOST_WIDE_INT) 0x7f));
8178 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8179 arg, integer_zero_node);
8183 /* Fold a call to builtin toascii with argument ARG. */
8185 static tree
8186 fold_builtin_toascii (location_t loc, tree arg)
8188 if (!validate_arg (arg, INTEGER_TYPE))
8189 return NULL_TREE;
8191 /* Transform toascii(c) -> (c & 0x7f). */
8192 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8193 build_int_cst (integer_type_node, 0x7f));
8196 /* Fold a call to builtin isdigit with argument ARG. */
8198 static tree
8199 fold_builtin_isdigit (location_t loc, tree arg)
8201 if (!validate_arg (arg, INTEGER_TYPE))
8202 return NULL_TREE;
8203 else
8205 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8206 /* According to the C standard, isdigit is unaffected by locale.
8207 However, it definitely is affected by the target character set. */
8208 unsigned HOST_WIDE_INT target_digit0
8209 = lang_hooks.to_target_charset ('0');
8211 if (target_digit0 == 0)
8212 return NULL_TREE;
8214 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8215 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8216 build_int_cst (unsigned_type_node, target_digit0));
8217 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8218 build_int_cst (unsigned_type_node, 9));
8222 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8224 static tree
8225 fold_builtin_fabs (location_t loc, tree arg, tree type)
8227 if (!validate_arg (arg, REAL_TYPE))
8228 return NULL_TREE;
8230 arg = fold_convert_loc (loc, type, arg);
8231 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8234 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8236 static tree
8237 fold_builtin_abs (location_t loc, tree arg, tree type)
8239 if (!validate_arg (arg, INTEGER_TYPE))
8240 return NULL_TREE;
8242 arg = fold_convert_loc (loc, type, arg);
8243 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8246 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8248 static tree
8249 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
8251 /* ??? Only expand to FMA_EXPR if it's directly supported. */
8252 if (validate_arg (arg0, REAL_TYPE)
8253 && validate_arg (arg1, REAL_TYPE)
8254 && validate_arg (arg2, REAL_TYPE)
8255 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
8256 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
8258 return NULL_TREE;
8261 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8263 static tree
8264 fold_builtin_carg (location_t loc, tree arg, tree type)
8266 if (validate_arg (arg, COMPLEX_TYPE)
8267 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8269 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8271 if (atan2_fn)
8273 tree new_arg = builtin_save_expr (arg);
8274 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8275 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8276 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8280 return NULL_TREE;
8283 /* Fold a call to builtin frexp, we can assume the base is 2. */
8285 static tree
8286 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8288 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8289 return NULL_TREE;
8291 STRIP_NOPS (arg0);
8293 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8294 return NULL_TREE;
8296 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8298 /* Proceed if a valid pointer type was passed in. */
8299 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8301 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8302 tree frac, exp;
8304 switch (value->cl)
8306 case rvc_zero:
8307 /* For +-0, return (*exp = 0, +-0). */
8308 exp = integer_zero_node;
8309 frac = arg0;
8310 break;
8311 case rvc_nan:
8312 case rvc_inf:
8313 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8314 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8315 case rvc_normal:
8317 /* Since the frexp function always expects base 2, and in
8318 GCC normalized significands are already in the range
8319 [0.5, 1.0), we have exactly what frexp wants. */
8320 REAL_VALUE_TYPE frac_rvt = *value;
8321 SET_REAL_EXP (&frac_rvt, 0);
8322 frac = build_real (rettype, frac_rvt);
8323 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8325 break;
8326 default:
8327 gcc_unreachable ();
8330 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8331 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8332 TREE_SIDE_EFFECTS (arg1) = 1;
8333 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8336 return NULL_TREE;
8339 /* Fold a call to builtin modf. */
8341 static tree
8342 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8344 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8345 return NULL_TREE;
8347 STRIP_NOPS (arg0);
8349 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8350 return NULL_TREE;
8352 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8354 /* Proceed if a valid pointer type was passed in. */
8355 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8357 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8358 REAL_VALUE_TYPE trunc, frac;
8360 switch (value->cl)
8362 case rvc_nan:
8363 case rvc_zero:
8364 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8365 trunc = frac = *value;
8366 break;
8367 case rvc_inf:
8368 /* For +-Inf, return (*arg1 = arg0, +-0). */
8369 frac = dconst0;
8370 frac.sign = value->sign;
8371 trunc = *value;
8372 break;
8373 case rvc_normal:
8374 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8375 real_trunc (&trunc, VOIDmode, value);
8376 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8377 /* If the original number was negative and already
8378 integral, then the fractional part is -0.0. */
8379 if (value->sign && frac.cl == rvc_zero)
8380 frac.sign = value->sign;
8381 break;
8384 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8385 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8386 build_real (rettype, trunc));
8387 TREE_SIDE_EFFECTS (arg1) = 1;
8388 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8389 build_real (rettype, frac));
8392 return NULL_TREE;
8395 /* Given a location LOC, an interclass builtin function decl FNDECL
8396 and its single argument ARG, return an folded expression computing
8397 the same, or NULL_TREE if we either couldn't or didn't want to fold
8398 (the latter happen if there's an RTL instruction available). */
8400 static tree
8401 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8403 machine_mode mode;
8405 if (!validate_arg (arg, REAL_TYPE))
8406 return NULL_TREE;
8408 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8409 return NULL_TREE;
8411 mode = TYPE_MODE (TREE_TYPE (arg));
8413 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8415 /* If there is no optab, try generic code. */
8416 switch (DECL_FUNCTION_CODE (fndecl))
8418 tree result;
8420 CASE_FLT_FN (BUILT_IN_ISINF):
8422 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8423 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8424 tree type = TREE_TYPE (arg);
8425 REAL_VALUE_TYPE r;
8426 char buf[128];
8428 if (is_ibm_extended)
8430 /* NaN and Inf are encoded in the high-order double value
8431 only. The low-order value is not significant. */
8432 type = double_type_node;
8433 mode = DFmode;
8434 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8436 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8437 real_from_string (&r, buf);
8438 result = build_call_expr (isgr_fn, 2,
8439 fold_build1_loc (loc, ABS_EXPR, type, arg),
8440 build_real (type, r));
8441 return result;
8443 CASE_FLT_FN (BUILT_IN_FINITE):
8444 case BUILT_IN_ISFINITE:
8446 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8447 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8448 tree type = TREE_TYPE (arg);
8449 REAL_VALUE_TYPE r;
8450 char buf[128];
8452 if (is_ibm_extended)
8454 /* NaN and Inf are encoded in the high-order double value
8455 only. The low-order value is not significant. */
8456 type = double_type_node;
8457 mode = DFmode;
8458 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8460 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8461 real_from_string (&r, buf);
8462 result = build_call_expr (isle_fn, 2,
8463 fold_build1_loc (loc, ABS_EXPR, type, arg),
8464 build_real (type, r));
8465 /*result = fold_build2_loc (loc, UNGT_EXPR,
8466 TREE_TYPE (TREE_TYPE (fndecl)),
8467 fold_build1_loc (loc, ABS_EXPR, type, arg),
8468 build_real (type, r));
8469 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8470 TREE_TYPE (TREE_TYPE (fndecl)),
8471 result);*/
8472 return result;
8474 case BUILT_IN_ISNORMAL:
8476 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8477 islessequal(fabs(x),DBL_MAX). */
8478 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8479 tree type = TREE_TYPE (arg);
8480 tree orig_arg, max_exp, min_exp;
8481 machine_mode orig_mode = mode;
8482 REAL_VALUE_TYPE rmax, rmin;
8483 char buf[128];
8485 orig_arg = arg = builtin_save_expr (arg);
8486 if (is_ibm_extended)
8488 /* Use double to test the normal range of IBM extended
8489 precision. Emin for IBM extended precision is
8490 different to emin for IEEE double, being 53 higher
8491 since the low double exponent is at least 53 lower
8492 than the high double exponent. */
8493 type = double_type_node;
8494 mode = DFmode;
8495 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8497 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8499 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8500 real_from_string (&rmax, buf);
8501 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8502 real_from_string (&rmin, buf);
8503 max_exp = build_real (type, rmax);
8504 min_exp = build_real (type, rmin);
8506 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8507 if (is_ibm_extended)
8509 /* Testing the high end of the range is done just using
8510 the high double, using the same test as isfinite().
8511 For the subnormal end of the range we first test the
8512 high double, then if its magnitude is equal to the
8513 limit of 0x1p-969, we test whether the low double is
8514 non-zero and opposite sign to the high double. */
8515 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8516 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8517 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8518 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8519 arg, min_exp);
8520 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8521 complex_double_type_node, orig_arg);
8522 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8523 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8524 tree zero = build_real (type, dconst0);
8525 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8526 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8527 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8528 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8529 fold_build3 (COND_EXPR,
8530 integer_type_node,
8531 hilt, logt, lolt));
8532 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8533 eq_min, ok_lo);
8534 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8535 gt_min, eq_min);
8537 else
8539 tree const isge_fn
8540 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8541 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8543 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8544 max_exp, min_exp);
8545 return result;
8547 default:
8548 break;
8551 return NULL_TREE;
8554 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8555 ARG is the argument for the call. */
8557 static tree
8558 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8560 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8562 if (!validate_arg (arg, REAL_TYPE))
8563 return NULL_TREE;
8565 switch (builtin_index)
8567 case BUILT_IN_ISINF:
8568 if (!HONOR_INFINITIES (arg))
8569 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8571 return NULL_TREE;
8573 case BUILT_IN_ISINF_SIGN:
8575 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8576 /* In a boolean context, GCC will fold the inner COND_EXPR to
8577 1. So e.g. "if (isinf_sign(x))" would be folded to just
8578 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8579 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8580 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8581 tree tmp = NULL_TREE;
8583 arg = builtin_save_expr (arg);
8585 if (signbit_fn && isinf_fn)
8587 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8588 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8590 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8591 signbit_call, integer_zero_node);
8592 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8593 isinf_call, integer_zero_node);
8595 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8596 integer_minus_one_node, integer_one_node);
8597 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8598 isinf_call, tmp,
8599 integer_zero_node);
8602 return tmp;
8605 case BUILT_IN_ISFINITE:
8606 if (!HONOR_NANS (arg)
8607 && !HONOR_INFINITIES (arg))
8608 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8610 return NULL_TREE;
8612 case BUILT_IN_ISNAN:
8613 if (!HONOR_NANS (arg))
8614 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8617 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8618 if (is_ibm_extended)
8620 /* NaN and Inf are encoded in the high-order double value
8621 only. The low-order value is not significant. */
8622 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8625 arg = builtin_save_expr (arg);
8626 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8628 default:
8629 gcc_unreachable ();
8633 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8634 This builtin will generate code to return the appropriate floating
8635 point classification depending on the value of the floating point
8636 number passed in. The possible return values must be supplied as
8637 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8638 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8639 one floating point argument which is "type generic". */
8641 static tree
8642 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8644 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8645 arg, type, res, tmp;
8646 machine_mode mode;
8647 REAL_VALUE_TYPE r;
8648 char buf[128];
8650 /* Verify the required arguments in the original call. */
8651 if (nargs != 6
8652 || !validate_arg (args[0], INTEGER_TYPE)
8653 || !validate_arg (args[1], INTEGER_TYPE)
8654 || !validate_arg (args[2], INTEGER_TYPE)
8655 || !validate_arg (args[3], INTEGER_TYPE)
8656 || !validate_arg (args[4], INTEGER_TYPE)
8657 || !validate_arg (args[5], REAL_TYPE))
8658 return NULL_TREE;
8660 fp_nan = args[0];
8661 fp_infinite = args[1];
8662 fp_normal = args[2];
8663 fp_subnormal = args[3];
8664 fp_zero = args[4];
8665 arg = args[5];
8666 type = TREE_TYPE (arg);
8667 mode = TYPE_MODE (type);
8668 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8670 /* fpclassify(x) ->
8671 isnan(x) ? FP_NAN :
8672 (fabs(x) == Inf ? FP_INFINITE :
8673 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8674 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8676 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8677 build_real (type, dconst0));
8678 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8679 tmp, fp_zero, fp_subnormal);
8681 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8682 real_from_string (&r, buf);
8683 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8684 arg, build_real (type, r));
8685 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8687 if (HONOR_INFINITIES (mode))
8689 real_inf (&r);
8690 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8691 build_real (type, r));
8692 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8693 fp_infinite, res);
8696 if (HONOR_NANS (mode))
8698 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8699 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8702 return res;
8705 /* Fold a call to an unordered comparison function such as
8706 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8707 being called and ARG0 and ARG1 are the arguments for the call.
8708 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8709 the opposite of the desired result. UNORDERED_CODE is used
8710 for modes that can hold NaNs and ORDERED_CODE is used for
8711 the rest. */
8713 static tree
8714 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8715 enum tree_code unordered_code,
8716 enum tree_code ordered_code)
8718 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8719 enum tree_code code;
8720 tree type0, type1;
8721 enum tree_code code0, code1;
8722 tree cmp_type = NULL_TREE;
8724 type0 = TREE_TYPE (arg0);
8725 type1 = TREE_TYPE (arg1);
8727 code0 = TREE_CODE (type0);
8728 code1 = TREE_CODE (type1);
8730 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8731 /* Choose the wider of two real types. */
8732 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8733 ? type0 : type1;
8734 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8735 cmp_type = type0;
8736 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8737 cmp_type = type1;
8739 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8740 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8742 if (unordered_code == UNORDERED_EXPR)
8744 if (!HONOR_NANS (arg0))
8745 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8746 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8749 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8750 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8751 fold_build2_loc (loc, code, type, arg0, arg1));
8754 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8755 arithmetics if it can never overflow, or into internal functions that
8756 return both result of arithmetics and overflowed boolean flag in
8757 a complex integer result, or some other check for overflow.
8758 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8759 checking part of that. */
8761 static tree
8762 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8763 tree arg0, tree arg1, tree arg2)
8765 enum internal_fn ifn = IFN_LAST;
8766 /* The code of the expression corresponding to the type-generic
8767 built-in, or ERROR_MARK for the type-specific ones. */
8768 enum tree_code opcode = ERROR_MARK;
8769 bool ovf_only = false;
8771 switch (fcode)
8773 case BUILT_IN_ADD_OVERFLOW_P:
8774 ovf_only = true;
8775 /* FALLTHRU */
8776 case BUILT_IN_ADD_OVERFLOW:
8777 opcode = PLUS_EXPR;
8778 /* FALLTHRU */
8779 case BUILT_IN_SADD_OVERFLOW:
8780 case BUILT_IN_SADDL_OVERFLOW:
8781 case BUILT_IN_SADDLL_OVERFLOW:
8782 case BUILT_IN_UADD_OVERFLOW:
8783 case BUILT_IN_UADDL_OVERFLOW:
8784 case BUILT_IN_UADDLL_OVERFLOW:
8785 ifn = IFN_ADD_OVERFLOW;
8786 break;
8787 case BUILT_IN_SUB_OVERFLOW_P:
8788 ovf_only = true;
8789 /* FALLTHRU */
8790 case BUILT_IN_SUB_OVERFLOW:
8791 opcode = MINUS_EXPR;
8792 /* FALLTHRU */
8793 case BUILT_IN_SSUB_OVERFLOW:
8794 case BUILT_IN_SSUBL_OVERFLOW:
8795 case BUILT_IN_SSUBLL_OVERFLOW:
8796 case BUILT_IN_USUB_OVERFLOW:
8797 case BUILT_IN_USUBL_OVERFLOW:
8798 case BUILT_IN_USUBLL_OVERFLOW:
8799 ifn = IFN_SUB_OVERFLOW;
8800 break;
8801 case BUILT_IN_MUL_OVERFLOW_P:
8802 ovf_only = true;
8803 /* FALLTHRU */
8804 case BUILT_IN_MUL_OVERFLOW:
8805 opcode = MULT_EXPR;
8806 /* FALLTHRU */
8807 case BUILT_IN_SMUL_OVERFLOW:
8808 case BUILT_IN_SMULL_OVERFLOW:
8809 case BUILT_IN_SMULLL_OVERFLOW:
8810 case BUILT_IN_UMUL_OVERFLOW:
8811 case BUILT_IN_UMULL_OVERFLOW:
8812 case BUILT_IN_UMULLL_OVERFLOW:
8813 ifn = IFN_MUL_OVERFLOW;
8814 break;
8815 default:
8816 gcc_unreachable ();
8819 /* For the "generic" overloads, the first two arguments can have different
8820 types and the last argument determines the target type to use to check
8821 for overflow. The arguments of the other overloads all have the same
8822 type. */
8823 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
8825 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8826 arguments are constant, attempt to fold the built-in call into a constant
8827 expression indicating whether or not it detected an overflow. */
8828 if (ovf_only
8829 && TREE_CODE (arg0) == INTEGER_CST
8830 && TREE_CODE (arg1) == INTEGER_CST)
8831 /* Perform the computation in the target type and check for overflow. */
8832 return omit_one_operand_loc (loc, boolean_type_node,
8833 arith_overflowed_p (opcode, type, arg0, arg1)
8834 ? boolean_true_node : boolean_false_node,
8835 arg2);
8837 tree ctype = build_complex_type (type);
8838 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8839 2, arg0, arg1);
8840 tree tgt = save_expr (call);
8841 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8842 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8843 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8845 if (ovf_only)
8846 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
8848 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8849 tree store
8850 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8851 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8854 /* Fold a call to __builtin_FILE to a constant string. */
8856 static inline tree
8857 fold_builtin_FILE (location_t loc)
8859 if (const char *fname = LOCATION_FILE (loc))
8860 return build_string_literal (strlen (fname) + 1, fname);
8862 return build_string_literal (1, "");
8865 /* Fold a call to __builtin_FUNCTION to a constant string. */
8867 static inline tree
8868 fold_builtin_FUNCTION ()
8870 const char *name = "";
8872 if (current_function_decl)
8873 name = lang_hooks.decl_printable_name (current_function_decl, 0);
8875 return build_string_literal (strlen (name) + 1, name);
8878 /* Fold a call to __builtin_LINE to an integer constant. */
8880 static inline tree
8881 fold_builtin_LINE (location_t loc, tree type)
8883 return build_int_cst (type, LOCATION_LINE (loc));
8886 /* Fold a call to built-in function FNDECL with 0 arguments.
8887 This function returns NULL_TREE if no simplification was possible. */
8889 static tree
8890 fold_builtin_0 (location_t loc, tree fndecl)
8892 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8893 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8894 switch (fcode)
8896 case BUILT_IN_FILE:
8897 return fold_builtin_FILE (loc);
8899 case BUILT_IN_FUNCTION:
8900 return fold_builtin_FUNCTION ();
8902 case BUILT_IN_LINE:
8903 return fold_builtin_LINE (loc, type);
8905 CASE_FLT_FN (BUILT_IN_INF):
8906 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
8907 case BUILT_IN_INFD32:
8908 case BUILT_IN_INFD64:
8909 case BUILT_IN_INFD128:
8910 return fold_builtin_inf (loc, type, true);
8912 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8913 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
8914 return fold_builtin_inf (loc, type, false);
8916 case BUILT_IN_CLASSIFY_TYPE:
8917 return fold_builtin_classify_type (NULL_TREE);
8919 default:
8920 break;
8922 return NULL_TREE;
8925 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8926 This function returns NULL_TREE if no simplification was possible. */
8928 static tree
8929 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8931 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8932 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8934 if (TREE_CODE (arg0) == ERROR_MARK)
8935 return NULL_TREE;
8937 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
8938 return ret;
8940 switch (fcode)
8942 case BUILT_IN_CONSTANT_P:
8944 tree val = fold_builtin_constant_p (arg0);
8946 /* Gimplification will pull the CALL_EXPR for the builtin out of
8947 an if condition. When not optimizing, we'll not CSE it back.
8948 To avoid link error types of regressions, return false now. */
8949 if (!val && !optimize)
8950 val = integer_zero_node;
8952 return val;
8955 case BUILT_IN_CLASSIFY_TYPE:
8956 return fold_builtin_classify_type (arg0);
8958 case BUILT_IN_STRLEN:
8959 return fold_builtin_strlen (loc, type, arg0);
8961 CASE_FLT_FN (BUILT_IN_FABS):
8962 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8963 case BUILT_IN_FABSD32:
8964 case BUILT_IN_FABSD64:
8965 case BUILT_IN_FABSD128:
8966 return fold_builtin_fabs (loc, arg0, type);
8968 case BUILT_IN_ABS:
8969 case BUILT_IN_LABS:
8970 case BUILT_IN_LLABS:
8971 case BUILT_IN_IMAXABS:
8972 return fold_builtin_abs (loc, arg0, type);
8974 CASE_FLT_FN (BUILT_IN_CONJ):
8975 if (validate_arg (arg0, COMPLEX_TYPE)
8976 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8977 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8978 break;
8980 CASE_FLT_FN (BUILT_IN_CREAL):
8981 if (validate_arg (arg0, COMPLEX_TYPE)
8982 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8983 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8984 break;
8986 CASE_FLT_FN (BUILT_IN_CIMAG):
8987 if (validate_arg (arg0, COMPLEX_TYPE)
8988 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8989 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8990 break;
8992 CASE_FLT_FN (BUILT_IN_CARG):
8993 return fold_builtin_carg (loc, arg0, type);
8995 case BUILT_IN_ISASCII:
8996 return fold_builtin_isascii (loc, arg0);
8998 case BUILT_IN_TOASCII:
8999 return fold_builtin_toascii (loc, arg0);
9001 case BUILT_IN_ISDIGIT:
9002 return fold_builtin_isdigit (loc, arg0);
9004 CASE_FLT_FN (BUILT_IN_FINITE):
9005 case BUILT_IN_FINITED32:
9006 case BUILT_IN_FINITED64:
9007 case BUILT_IN_FINITED128:
9008 case BUILT_IN_ISFINITE:
9010 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9011 if (ret)
9012 return ret;
9013 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9016 CASE_FLT_FN (BUILT_IN_ISINF):
9017 case BUILT_IN_ISINFD32:
9018 case BUILT_IN_ISINFD64:
9019 case BUILT_IN_ISINFD128:
9021 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9022 if (ret)
9023 return ret;
9024 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9027 case BUILT_IN_ISNORMAL:
9028 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9030 case BUILT_IN_ISINF_SIGN:
9031 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9033 CASE_FLT_FN (BUILT_IN_ISNAN):
9034 case BUILT_IN_ISNAND32:
9035 case BUILT_IN_ISNAND64:
9036 case BUILT_IN_ISNAND128:
9037 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9039 case BUILT_IN_FREE:
9040 if (integer_zerop (arg0))
9041 return build_empty_stmt (loc);
9042 break;
9044 default:
9045 break;
9048 return NULL_TREE;
9052 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9053 This function returns NULL_TREE if no simplification was possible. */
9055 static tree
9056 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
9058 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9059 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9061 if (TREE_CODE (arg0) == ERROR_MARK
9062 || TREE_CODE (arg1) == ERROR_MARK)
9063 return NULL_TREE;
9065 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9066 return ret;
9068 switch (fcode)
9070 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9071 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9072 if (validate_arg (arg0, REAL_TYPE)
9073 && validate_arg (arg1, POINTER_TYPE))
9074 return do_mpfr_lgamma_r (arg0, arg1, type);
9075 break;
9077 CASE_FLT_FN (BUILT_IN_FREXP):
9078 return fold_builtin_frexp (loc, arg0, arg1, type);
9080 CASE_FLT_FN (BUILT_IN_MODF):
9081 return fold_builtin_modf (loc, arg0, arg1, type);
9083 case BUILT_IN_STRSPN:
9084 return fold_builtin_strspn (loc, arg0, arg1);
9086 case BUILT_IN_STRCSPN:
9087 return fold_builtin_strcspn (loc, arg0, arg1);
9089 case BUILT_IN_STRPBRK:
9090 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9092 case BUILT_IN_EXPECT:
9093 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
9095 case BUILT_IN_ISGREATER:
9096 return fold_builtin_unordered_cmp (loc, fndecl,
9097 arg0, arg1, UNLE_EXPR, LE_EXPR);
9098 case BUILT_IN_ISGREATEREQUAL:
9099 return fold_builtin_unordered_cmp (loc, fndecl,
9100 arg0, arg1, UNLT_EXPR, LT_EXPR);
9101 case BUILT_IN_ISLESS:
9102 return fold_builtin_unordered_cmp (loc, fndecl,
9103 arg0, arg1, UNGE_EXPR, GE_EXPR);
9104 case BUILT_IN_ISLESSEQUAL:
9105 return fold_builtin_unordered_cmp (loc, fndecl,
9106 arg0, arg1, UNGT_EXPR, GT_EXPR);
9107 case BUILT_IN_ISLESSGREATER:
9108 return fold_builtin_unordered_cmp (loc, fndecl,
9109 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9110 case BUILT_IN_ISUNORDERED:
9111 return fold_builtin_unordered_cmp (loc, fndecl,
9112 arg0, arg1, UNORDERED_EXPR,
9113 NOP_EXPR);
9115 /* We do the folding for va_start in the expander. */
9116 case BUILT_IN_VA_START:
9117 break;
9119 case BUILT_IN_OBJECT_SIZE:
9120 return fold_builtin_object_size (arg0, arg1);
9122 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9123 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9125 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9126 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9128 default:
9129 break;
9131 return NULL_TREE;
9134 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9135 and ARG2.
9136 This function returns NULL_TREE if no simplification was possible. */
9138 static tree
9139 fold_builtin_3 (location_t loc, tree fndecl,
9140 tree arg0, tree arg1, tree arg2)
9142 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9143 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9145 if (TREE_CODE (arg0) == ERROR_MARK
9146 || TREE_CODE (arg1) == ERROR_MARK
9147 || TREE_CODE (arg2) == ERROR_MARK)
9148 return NULL_TREE;
9150 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9151 arg0, arg1, arg2))
9152 return ret;
9154 switch (fcode)
9157 CASE_FLT_FN (BUILT_IN_SINCOS):
9158 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9160 CASE_FLT_FN (BUILT_IN_FMA):
9161 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
9162 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
9164 CASE_FLT_FN (BUILT_IN_REMQUO):
9165 if (validate_arg (arg0, REAL_TYPE)
9166 && validate_arg (arg1, REAL_TYPE)
9167 && validate_arg (arg2, POINTER_TYPE))
9168 return do_mpfr_remquo (arg0, arg1, arg2);
9169 break;
9171 case BUILT_IN_MEMCMP:
9172 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9174 case BUILT_IN_EXPECT:
9175 return fold_builtin_expect (loc, arg0, arg1, arg2);
9177 case BUILT_IN_ADD_OVERFLOW:
9178 case BUILT_IN_SUB_OVERFLOW:
9179 case BUILT_IN_MUL_OVERFLOW:
9180 case BUILT_IN_ADD_OVERFLOW_P:
9181 case BUILT_IN_SUB_OVERFLOW_P:
9182 case BUILT_IN_MUL_OVERFLOW_P:
9183 case BUILT_IN_SADD_OVERFLOW:
9184 case BUILT_IN_SADDL_OVERFLOW:
9185 case BUILT_IN_SADDLL_OVERFLOW:
9186 case BUILT_IN_SSUB_OVERFLOW:
9187 case BUILT_IN_SSUBL_OVERFLOW:
9188 case BUILT_IN_SSUBLL_OVERFLOW:
9189 case BUILT_IN_SMUL_OVERFLOW:
9190 case BUILT_IN_SMULL_OVERFLOW:
9191 case BUILT_IN_SMULLL_OVERFLOW:
9192 case BUILT_IN_UADD_OVERFLOW:
9193 case BUILT_IN_UADDL_OVERFLOW:
9194 case BUILT_IN_UADDLL_OVERFLOW:
9195 case BUILT_IN_USUB_OVERFLOW:
9196 case BUILT_IN_USUBL_OVERFLOW:
9197 case BUILT_IN_USUBLL_OVERFLOW:
9198 case BUILT_IN_UMUL_OVERFLOW:
9199 case BUILT_IN_UMULL_OVERFLOW:
9200 case BUILT_IN_UMULLL_OVERFLOW:
9201 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9203 default:
9204 break;
9206 return NULL_TREE;
9209 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9210 arguments. IGNORE is true if the result of the
9211 function call is ignored. This function returns NULL_TREE if no
9212 simplification was possible. */
9214 tree
9215 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9217 tree ret = NULL_TREE;
9219 switch (nargs)
9221 case 0:
9222 ret = fold_builtin_0 (loc, fndecl);
9223 break;
9224 case 1:
9225 ret = fold_builtin_1 (loc, fndecl, args[0]);
9226 break;
9227 case 2:
9228 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9229 break;
9230 case 3:
9231 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9232 break;
9233 default:
9234 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9235 break;
9237 if (ret)
9239 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9240 SET_EXPR_LOCATION (ret, loc);
9241 TREE_NO_WARNING (ret) = 1;
9242 return ret;
9244 return NULL_TREE;
9247 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9248 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9249 of arguments in ARGS to be omitted. OLDNARGS is the number of
9250 elements in ARGS. */
9252 static tree
9253 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9254 int skip, tree fndecl, int n, va_list newargs)
9256 int nargs = oldnargs - skip + n;
9257 tree *buffer;
9259 if (n > 0)
9261 int i, j;
9263 buffer = XALLOCAVEC (tree, nargs);
9264 for (i = 0; i < n; i++)
9265 buffer[i] = va_arg (newargs, tree);
9266 for (j = skip; j < oldnargs; j++, i++)
9267 buffer[i] = args[j];
9269 else
9270 buffer = args + skip;
9272 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9275 /* Return true if FNDECL shouldn't be folded right now.
9276 If a built-in function has an inline attribute always_inline
9277 wrapper, defer folding it after always_inline functions have
9278 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9279 might not be performed. */
9281 bool
9282 avoid_folding_inline_builtin (tree fndecl)
9284 return (DECL_DECLARED_INLINE_P (fndecl)
9285 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9286 && cfun
9287 && !cfun->always_inline_functions_inlined
9288 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9291 /* A wrapper function for builtin folding that prevents warnings for
9292 "statement without effect" and the like, caused by removing the
9293 call node earlier than the warning is generated. */
9295 tree
9296 fold_call_expr (location_t loc, tree exp, bool ignore)
9298 tree ret = NULL_TREE;
9299 tree fndecl = get_callee_fndecl (exp);
9300 if (fndecl
9301 && TREE_CODE (fndecl) == FUNCTION_DECL
9302 && DECL_BUILT_IN (fndecl)
9303 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9304 yet. Defer folding until we see all the arguments
9305 (after inlining). */
9306 && !CALL_EXPR_VA_ARG_PACK (exp))
9308 int nargs = call_expr_nargs (exp);
9310 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9311 instead last argument is __builtin_va_arg_pack (). Defer folding
9312 even in that case, until arguments are finalized. */
9313 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9315 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9316 if (fndecl2
9317 && TREE_CODE (fndecl2) == FUNCTION_DECL
9318 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9319 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9320 return NULL_TREE;
9323 if (avoid_folding_inline_builtin (fndecl))
9324 return NULL_TREE;
9326 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9327 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9328 CALL_EXPR_ARGP (exp), ignore);
9329 else
9331 tree *args = CALL_EXPR_ARGP (exp);
9332 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9333 if (ret)
9334 return ret;
9337 return NULL_TREE;
9340 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9341 N arguments are passed in the array ARGARRAY. Return a folded
9342 expression or NULL_TREE if no simplification was possible. */
9344 tree
9345 fold_builtin_call_array (location_t loc, tree,
9346 tree fn,
9347 int n,
9348 tree *argarray)
9350 if (TREE_CODE (fn) != ADDR_EXPR)
9351 return NULL_TREE;
9353 tree fndecl = TREE_OPERAND (fn, 0);
9354 if (TREE_CODE (fndecl) == FUNCTION_DECL
9355 && DECL_BUILT_IN (fndecl))
9357 /* If last argument is __builtin_va_arg_pack (), arguments to this
9358 function are not finalized yet. Defer folding until they are. */
9359 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9361 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9362 if (fndecl2
9363 && TREE_CODE (fndecl2) == FUNCTION_DECL
9364 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9365 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9366 return NULL_TREE;
9368 if (avoid_folding_inline_builtin (fndecl))
9369 return NULL_TREE;
9370 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9371 return targetm.fold_builtin (fndecl, n, argarray, false);
9372 else
9373 return fold_builtin_n (loc, fndecl, argarray, n, false);
9376 return NULL_TREE;
9379 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9380 along with N new arguments specified as the "..." parameters. SKIP
9381 is the number of arguments in EXP to be omitted. This function is used
9382 to do varargs-to-varargs transformations. */
9384 static tree
9385 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9387 va_list ap;
9388 tree t;
9390 va_start (ap, n);
9391 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9392 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9393 va_end (ap);
9395 return t;
9398 /* Validate a single argument ARG against a tree code CODE representing
9399 a type. Return true when argument is valid. */
9401 static bool
9402 validate_arg (const_tree arg, enum tree_code code)
9404 if (!arg)
9405 return false;
9406 else if (code == POINTER_TYPE)
9407 return POINTER_TYPE_P (TREE_TYPE (arg));
9408 else if (code == INTEGER_TYPE)
9409 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9410 return code == TREE_CODE (TREE_TYPE (arg));
9413 /* This function validates the types of a function call argument list
9414 against a specified list of tree_codes. If the last specifier is a 0,
9415 that represents an ellipses, otherwise the last specifier must be a
9416 VOID_TYPE.
9418 This is the GIMPLE version of validate_arglist. Eventually we want to
9419 completely convert builtins.c to work from GIMPLEs and the tree based
9420 validate_arglist will then be removed. */
9422 bool
9423 validate_gimple_arglist (const gcall *call, ...)
9425 enum tree_code code;
9426 bool res = 0;
9427 va_list ap;
9428 const_tree arg;
9429 size_t i;
9431 va_start (ap, call);
9432 i = 0;
9436 code = (enum tree_code) va_arg (ap, int);
9437 switch (code)
9439 case 0:
9440 /* This signifies an ellipses, any further arguments are all ok. */
9441 res = true;
9442 goto end;
9443 case VOID_TYPE:
9444 /* This signifies an endlink, if no arguments remain, return
9445 true, otherwise return false. */
9446 res = (i == gimple_call_num_args (call));
9447 goto end;
9448 default:
9449 /* If no parameters remain or the parameter's code does not
9450 match the specified code, return false. Otherwise continue
9451 checking any remaining arguments. */
9452 arg = gimple_call_arg (call, i++);
9453 if (!validate_arg (arg, code))
9454 goto end;
9455 break;
9458 while (1);
9460 /* We need gotos here since we can only have one VA_CLOSE in a
9461 function. */
9462 end: ;
9463 va_end (ap);
9465 return res;
9468 /* Default target-specific builtin expander that does nothing. */
9471 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9472 rtx target ATTRIBUTE_UNUSED,
9473 rtx subtarget ATTRIBUTE_UNUSED,
9474 machine_mode mode ATTRIBUTE_UNUSED,
9475 int ignore ATTRIBUTE_UNUSED)
9477 return NULL_RTX;
9480 /* Returns true is EXP represents data that would potentially reside
9481 in a readonly section. */
9483 bool
9484 readonly_data_expr (tree exp)
9486 STRIP_NOPS (exp);
9488 if (TREE_CODE (exp) != ADDR_EXPR)
9489 return false;
9491 exp = get_base_address (TREE_OPERAND (exp, 0));
9492 if (!exp)
9493 return false;
9495 /* Make sure we call decl_readonly_section only for trees it
9496 can handle (since it returns true for everything it doesn't
9497 understand). */
9498 if (TREE_CODE (exp) == STRING_CST
9499 || TREE_CODE (exp) == CONSTRUCTOR
9500 || (VAR_P (exp) && TREE_STATIC (exp)))
9501 return decl_readonly_section (exp, 0);
9502 else
9503 return false;
9506 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9507 to the call, and TYPE is its return type.
9509 Return NULL_TREE if no simplification was possible, otherwise return the
9510 simplified form of the call as a tree.
9512 The simplified form may be a constant or other expression which
9513 computes the same value, but in a more efficient manner (including
9514 calls to other builtin functions).
9516 The call may contain arguments which need to be evaluated, but
9517 which are not useful to determine the result of the call. In
9518 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9519 COMPOUND_EXPR will be an argument which must be evaluated.
9520 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9521 COMPOUND_EXPR in the chain will contain the tree for the simplified
9522 form of the builtin function call. */
9524 static tree
9525 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9527 if (!validate_arg (s1, POINTER_TYPE)
9528 || !validate_arg (s2, POINTER_TYPE))
9529 return NULL_TREE;
9530 else
9532 tree fn;
9533 const char *p1, *p2;
9535 p2 = c_getstr (s2);
9536 if (p2 == NULL)
9537 return NULL_TREE;
9539 p1 = c_getstr (s1);
9540 if (p1 != NULL)
9542 const char *r = strpbrk (p1, p2);
9543 tree tem;
9545 if (r == NULL)
9546 return build_int_cst (TREE_TYPE (s1), 0);
9548 /* Return an offset into the constant string argument. */
9549 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9550 return fold_convert_loc (loc, type, tem);
9553 if (p2[0] == '\0')
9554 /* strpbrk(x, "") == NULL.
9555 Evaluate and ignore s1 in case it had side-effects. */
9556 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
9558 if (p2[1] != '\0')
9559 return NULL_TREE; /* Really call strpbrk. */
9561 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9562 if (!fn)
9563 return NULL_TREE;
9565 /* New argument list transforming strpbrk(s1, s2) to
9566 strchr(s1, s2[0]). */
9567 return build_call_expr_loc (loc, fn, 2, s1,
9568 build_int_cst (integer_type_node, p2[0]));
9572 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9573 to the call.
9575 Return NULL_TREE if no simplification was possible, otherwise return the
9576 simplified form of the call as a tree.
9578 The simplified form may be a constant or other expression which
9579 computes the same value, but in a more efficient manner (including
9580 calls to other builtin functions).
9582 The call may contain arguments which need to be evaluated, but
9583 which are not useful to determine the result of the call. In
9584 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9585 COMPOUND_EXPR will be an argument which must be evaluated.
9586 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9587 COMPOUND_EXPR in the chain will contain the tree for the simplified
9588 form of the builtin function call. */
9590 static tree
9591 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9593 if (!validate_arg (s1, POINTER_TYPE)
9594 || !validate_arg (s2, POINTER_TYPE))
9595 return NULL_TREE;
9596 else
9598 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9600 /* If either argument is "", return NULL_TREE. */
9601 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9602 /* Evaluate and ignore both arguments in case either one has
9603 side-effects. */
9604 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9605 s1, s2);
9606 return NULL_TREE;
9610 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9611 to the call.
9613 Return NULL_TREE if no simplification was possible, otherwise return the
9614 simplified form of the call as a tree.
9616 The simplified form may be a constant or other expression which
9617 computes the same value, but in a more efficient manner (including
9618 calls to other builtin functions).
9620 The call may contain arguments which need to be evaluated, but
9621 which are not useful to determine the result of the call. In
9622 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9623 COMPOUND_EXPR will be an argument which must be evaluated.
9624 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9625 COMPOUND_EXPR in the chain will contain the tree for the simplified
9626 form of the builtin function call. */
9628 static tree
9629 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9631 if (!validate_arg (s1, POINTER_TYPE)
9632 || !validate_arg (s2, POINTER_TYPE))
9633 return NULL_TREE;
9634 else
9636 /* If the first argument is "", return NULL_TREE. */
9637 const char *p1 = c_getstr (s1);
9638 if (p1 && *p1 == '\0')
9640 /* Evaluate and ignore argument s2 in case it has
9641 side-effects. */
9642 return omit_one_operand_loc (loc, size_type_node,
9643 size_zero_node, s2);
9646 /* If the second argument is "", return __builtin_strlen(s1). */
9647 const char *p2 = c_getstr (s2);
9648 if (p2 && *p2 == '\0')
9650 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9652 /* If the replacement _DECL isn't initialized, don't do the
9653 transformation. */
9654 if (!fn)
9655 return NULL_TREE;
9657 return build_call_expr_loc (loc, fn, 1, s1);
9659 return NULL_TREE;
9663 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9664 produced. False otherwise. This is done so that we don't output the error
9665 or warning twice or three times. */
9667 bool
9668 fold_builtin_next_arg (tree exp, bool va_start_p)
9670 tree fntype = TREE_TYPE (current_function_decl);
9671 int nargs = call_expr_nargs (exp);
9672 tree arg;
9673 /* There is good chance the current input_location points inside the
9674 definition of the va_start macro (perhaps on the token for
9675 builtin) in a system header, so warnings will not be emitted.
9676 Use the location in real source code. */
9677 source_location current_location =
9678 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9679 NULL);
9681 if (!stdarg_p (fntype))
9683 error ("%<va_start%> used in function with fixed args");
9684 return true;
9687 if (va_start_p)
9689 if (va_start_p && (nargs != 2))
9691 error ("wrong number of arguments to function %<va_start%>");
9692 return true;
9694 arg = CALL_EXPR_ARG (exp, 1);
9696 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9697 when we checked the arguments and if needed issued a warning. */
9698 else
9700 if (nargs == 0)
9702 /* Evidently an out of date version of <stdarg.h>; can't validate
9703 va_start's second argument, but can still work as intended. */
9704 warning_at (current_location,
9705 OPT_Wvarargs,
9706 "%<__builtin_next_arg%> called without an argument");
9707 return true;
9709 else if (nargs > 1)
9711 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9712 return true;
9714 arg = CALL_EXPR_ARG (exp, 0);
9717 if (TREE_CODE (arg) == SSA_NAME)
9718 arg = SSA_NAME_VAR (arg);
9720 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9721 or __builtin_next_arg (0) the first time we see it, after checking
9722 the arguments and if needed issuing a warning. */
9723 if (!integer_zerop (arg))
9725 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9727 /* Strip off all nops for the sake of the comparison. This
9728 is not quite the same as STRIP_NOPS. It does more.
9729 We must also strip off INDIRECT_EXPR for C++ reference
9730 parameters. */
9731 while (CONVERT_EXPR_P (arg)
9732 || TREE_CODE (arg) == INDIRECT_REF)
9733 arg = TREE_OPERAND (arg, 0);
9734 if (arg != last_parm)
9736 /* FIXME: Sometimes with the tree optimizers we can get the
9737 not the last argument even though the user used the last
9738 argument. We just warn and set the arg to be the last
9739 argument so that we will get wrong-code because of
9740 it. */
9741 warning_at (current_location,
9742 OPT_Wvarargs,
9743 "second parameter of %<va_start%> not last named argument");
9746 /* Undefined by C99 7.15.1.4p4 (va_start):
9747 "If the parameter parmN is declared with the register storage
9748 class, with a function or array type, or with a type that is
9749 not compatible with the type that results after application of
9750 the default argument promotions, the behavior is undefined."
9752 else if (DECL_REGISTER (arg))
9754 warning_at (current_location,
9755 OPT_Wvarargs,
9756 "undefined behavior when second parameter of "
9757 "%<va_start%> is declared with %<register%> storage");
9760 /* We want to verify the second parameter just once before the tree
9761 optimizers are run and then avoid keeping it in the tree,
9762 as otherwise we could warn even for correct code like:
9763 void foo (int i, ...)
9764 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9765 if (va_start_p)
9766 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9767 else
9768 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9770 return false;
9774 /* Expand a call EXP to __builtin_object_size. */
9776 static rtx
9777 expand_builtin_object_size (tree exp)
9779 tree ost;
9780 int object_size_type;
9781 tree fndecl = get_callee_fndecl (exp);
9783 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9785 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
9786 exp, fndecl);
9787 expand_builtin_trap ();
9788 return const0_rtx;
9791 ost = CALL_EXPR_ARG (exp, 1);
9792 STRIP_NOPS (ost);
9794 if (TREE_CODE (ost) != INTEGER_CST
9795 || tree_int_cst_sgn (ost) < 0
9796 || compare_tree_int (ost, 3) > 0)
9798 error ("%Klast argument of %qD is not integer constant between 0 and 3",
9799 exp, fndecl);
9800 expand_builtin_trap ();
9801 return const0_rtx;
9804 object_size_type = tree_to_shwi (ost);
9806 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9809 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9810 FCODE is the BUILT_IN_* to use.
9811 Return NULL_RTX if we failed; the caller should emit a normal call,
9812 otherwise try to get the result in TARGET, if convenient (and in
9813 mode MODE if that's convenient). */
9815 static rtx
9816 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9817 enum built_in_function fcode)
9819 if (!validate_arglist (exp,
9820 POINTER_TYPE,
9821 fcode == BUILT_IN_MEMSET_CHK
9822 ? INTEGER_TYPE : POINTER_TYPE,
9823 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9824 return NULL_RTX;
9826 tree dest = CALL_EXPR_ARG (exp, 0);
9827 tree src = CALL_EXPR_ARG (exp, 1);
9828 tree len = CALL_EXPR_ARG (exp, 2);
9829 tree size = CALL_EXPR_ARG (exp, 3);
9831 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
9832 /*str=*/NULL_TREE, size);
9834 if (!tree_fits_uhwi_p (size))
9835 return NULL_RTX;
9837 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
9839 /* Avoid transforming the checking call to an ordinary one when
9840 an overflow has been detected or when the call couldn't be
9841 validated because the size is not constant. */
9842 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
9843 return NULL_RTX;
9845 tree fn = NULL_TREE;
9846 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9847 mem{cpy,pcpy,move,set} is available. */
9848 switch (fcode)
9850 case BUILT_IN_MEMCPY_CHK:
9851 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
9852 break;
9853 case BUILT_IN_MEMPCPY_CHK:
9854 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
9855 break;
9856 case BUILT_IN_MEMMOVE_CHK:
9857 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
9858 break;
9859 case BUILT_IN_MEMSET_CHK:
9860 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
9861 break;
9862 default:
9863 break;
9866 if (! fn)
9867 return NULL_RTX;
9869 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
9870 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9871 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9872 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9874 else if (fcode == BUILT_IN_MEMSET_CHK)
9875 return NULL_RTX;
9876 else
9878 unsigned int dest_align = get_pointer_alignment (dest);
9880 /* If DEST is not a pointer type, call the normal function. */
9881 if (dest_align == 0)
9882 return NULL_RTX;
9884 /* If SRC and DEST are the same (and not volatile), do nothing. */
9885 if (operand_equal_p (src, dest, 0))
9887 tree expr;
9889 if (fcode != BUILT_IN_MEMPCPY_CHK)
9891 /* Evaluate and ignore LEN in case it has side-effects. */
9892 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9893 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9896 expr = fold_build_pointer_plus (dest, len);
9897 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9900 /* __memmove_chk special case. */
9901 if (fcode == BUILT_IN_MEMMOVE_CHK)
9903 unsigned int src_align = get_pointer_alignment (src);
9905 if (src_align == 0)
9906 return NULL_RTX;
9908 /* If src is categorized for a readonly section we can use
9909 normal __memcpy_chk. */
9910 if (readonly_data_expr (src))
9912 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
9913 if (!fn)
9914 return NULL_RTX;
9915 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9916 dest, src, len, size);
9917 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9918 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9919 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9922 return NULL_RTX;
9926 /* Emit warning if a buffer overflow is detected at compile time. */
9928 static void
9929 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9931 /* The source string. */
9932 tree srcstr = NULL_TREE;
9933 /* The size of the destination object. */
9934 tree objsize = NULL_TREE;
9935 /* The string that is being concatenated with (as in __strcat_chk)
9936 or null if it isn't. */
9937 tree catstr = NULL_TREE;
9938 /* The maximum length of the source sequence in a bounded operation
9939 (such as __strncat_chk) or null if the operation isn't bounded
9940 (such as __strcat_chk). */
9941 tree maxread = NULL_TREE;
9942 /* The exact size of the access (such as in __strncpy_chk). */
9943 tree size = NULL_TREE;
9945 switch (fcode)
9947 case BUILT_IN_STRCPY_CHK:
9948 case BUILT_IN_STPCPY_CHK:
9949 srcstr = CALL_EXPR_ARG (exp, 1);
9950 objsize = CALL_EXPR_ARG (exp, 2);
9951 break;
9953 case BUILT_IN_STRCAT_CHK:
9954 /* For __strcat_chk the warning will be emitted only if overflowing
9955 by at least strlen (dest) + 1 bytes. */
9956 catstr = CALL_EXPR_ARG (exp, 0);
9957 srcstr = CALL_EXPR_ARG (exp, 1);
9958 objsize = CALL_EXPR_ARG (exp, 2);
9959 break;
9961 case BUILT_IN_STRNCAT_CHK:
9962 catstr = CALL_EXPR_ARG (exp, 0);
9963 srcstr = CALL_EXPR_ARG (exp, 1);
9964 maxread = CALL_EXPR_ARG (exp, 2);
9965 objsize = CALL_EXPR_ARG (exp, 3);
9966 break;
9968 case BUILT_IN_STRNCPY_CHK:
9969 case BUILT_IN_STPNCPY_CHK:
9970 srcstr = CALL_EXPR_ARG (exp, 1);
9971 size = CALL_EXPR_ARG (exp, 2);
9972 objsize = CALL_EXPR_ARG (exp, 3);
9973 break;
9975 case BUILT_IN_SNPRINTF_CHK:
9976 case BUILT_IN_VSNPRINTF_CHK:
9977 maxread = CALL_EXPR_ARG (exp, 1);
9978 objsize = CALL_EXPR_ARG (exp, 3);
9979 break;
9980 default:
9981 gcc_unreachable ();
9984 if (catstr && maxread)
9986 /* Check __strncat_chk. There is no way to determine the length
9987 of the string to which the source string is being appended so
9988 just warn when the length of the source string is not known. */
9989 check_strncat_sizes (exp, objsize);
9990 return;
9993 /* The destination argument is the first one for all built-ins above. */
9994 tree dst = CALL_EXPR_ARG (exp, 0);
9996 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
9999 /* Emit warning if a buffer overflow is detected at compile time
10000 in __sprintf_chk/__vsprintf_chk calls. */
10002 static void
10003 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10005 tree size, len, fmt;
10006 const char *fmt_str;
10007 int nargs = call_expr_nargs (exp);
10009 /* Verify the required arguments in the original call. */
10011 if (nargs < 4)
10012 return;
10013 size = CALL_EXPR_ARG (exp, 2);
10014 fmt = CALL_EXPR_ARG (exp, 3);
10016 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10017 return;
10019 /* Check whether the format is a literal string constant. */
10020 fmt_str = c_getstr (fmt);
10021 if (fmt_str == NULL)
10022 return;
10024 if (!init_target_chars ())
10025 return;
10027 /* If the format doesn't contain % args or %%, we know its size. */
10028 if (strchr (fmt_str, target_percent) == 0)
10029 len = build_int_cstu (size_type_node, strlen (fmt_str));
10030 /* If the format is "%s" and first ... argument is a string literal,
10031 we know it too. */
10032 else if (fcode == BUILT_IN_SPRINTF_CHK
10033 && strcmp (fmt_str, target_percent_s) == 0)
10035 tree arg;
10037 if (nargs < 5)
10038 return;
10039 arg = CALL_EXPR_ARG (exp, 4);
10040 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10041 return;
10043 len = c_strlen (arg, 1);
10044 if (!len || ! tree_fits_uhwi_p (len))
10045 return;
10047 else
10048 return;
10050 /* Add one for the terminating nul. */
10051 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10053 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
10054 /*maxread=*/NULL_TREE, len, size);
10057 /* Emit warning if a free is called with address of a variable. */
10059 static void
10060 maybe_emit_free_warning (tree exp)
10062 tree arg = CALL_EXPR_ARG (exp, 0);
10064 STRIP_NOPS (arg);
10065 if (TREE_CODE (arg) != ADDR_EXPR)
10066 return;
10068 arg = get_base_address (TREE_OPERAND (arg, 0));
10069 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
10070 return;
10072 if (SSA_VAR_P (arg))
10073 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10074 "%Kattempt to free a non-heap object %qD", exp, arg);
10075 else
10076 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10077 "%Kattempt to free a non-heap object", exp);
10080 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10081 if possible. */
10083 static tree
10084 fold_builtin_object_size (tree ptr, tree ost)
10086 unsigned HOST_WIDE_INT bytes;
10087 int object_size_type;
10089 if (!validate_arg (ptr, POINTER_TYPE)
10090 || !validate_arg (ost, INTEGER_TYPE))
10091 return NULL_TREE;
10093 STRIP_NOPS (ost);
10095 if (TREE_CODE (ost) != INTEGER_CST
10096 || tree_int_cst_sgn (ost) < 0
10097 || compare_tree_int (ost, 3) > 0)
10098 return NULL_TREE;
10100 object_size_type = tree_to_shwi (ost);
10102 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10103 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10104 and (size_t) 0 for types 2 and 3. */
10105 if (TREE_SIDE_EFFECTS (ptr))
10106 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10108 if (TREE_CODE (ptr) == ADDR_EXPR)
10110 compute_builtin_object_size (ptr, object_size_type, &bytes);
10111 if (wi::fits_to_tree_p (bytes, size_type_node))
10112 return build_int_cstu (size_type_node, bytes);
10114 else if (TREE_CODE (ptr) == SSA_NAME)
10116 /* If object size is not known yet, delay folding until
10117 later. Maybe subsequent passes will help determining
10118 it. */
10119 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10120 && wi::fits_to_tree_p (bytes, size_type_node))
10121 return build_int_cstu (size_type_node, bytes);
10124 return NULL_TREE;
10127 /* Builtins with folding operations that operate on "..." arguments
10128 need special handling; we need to store the arguments in a convenient
10129 data structure before attempting any folding. Fortunately there are
10130 only a few builtins that fall into this category. FNDECL is the
10131 function, EXP is the CALL_EXPR for the call. */
10133 static tree
10134 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10136 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10137 tree ret = NULL_TREE;
10139 switch (fcode)
10141 case BUILT_IN_FPCLASSIFY:
10142 ret = fold_builtin_fpclassify (loc, args, nargs);
10143 break;
10145 default:
10146 break;
10148 if (ret)
10150 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10151 SET_EXPR_LOCATION (ret, loc);
10152 TREE_NO_WARNING (ret) = 1;
10153 return ret;
10155 return NULL_TREE;
10158 /* Initialize format string characters in the target charset. */
10160 bool
10161 init_target_chars (void)
10163 static bool init;
10164 if (!init)
10166 target_newline = lang_hooks.to_target_charset ('\n');
10167 target_percent = lang_hooks.to_target_charset ('%');
10168 target_c = lang_hooks.to_target_charset ('c');
10169 target_s = lang_hooks.to_target_charset ('s');
10170 if (target_newline == 0 || target_percent == 0 || target_c == 0
10171 || target_s == 0)
10172 return false;
10174 target_percent_c[0] = target_percent;
10175 target_percent_c[1] = target_c;
10176 target_percent_c[2] = '\0';
10178 target_percent_s[0] = target_percent;
10179 target_percent_s[1] = target_s;
10180 target_percent_s[2] = '\0';
10182 target_percent_s_newline[0] = target_percent;
10183 target_percent_s_newline[1] = target_s;
10184 target_percent_s_newline[2] = target_newline;
10185 target_percent_s_newline[3] = '\0';
10187 init = true;
10189 return true;
10192 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10193 and no overflow/underflow occurred. INEXACT is true if M was not
10194 exactly calculated. TYPE is the tree type for the result. This
10195 function assumes that you cleared the MPFR flags and then
10196 calculated M to see if anything subsequently set a flag prior to
10197 entering this function. Return NULL_TREE if any checks fail. */
10199 static tree
10200 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10202 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10203 overflow/underflow occurred. If -frounding-math, proceed iff the
10204 result of calling FUNC was exact. */
10205 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10206 && (!flag_rounding_math || !inexact))
10208 REAL_VALUE_TYPE rr;
10210 real_from_mpfr (&rr, m, type, GMP_RNDN);
10211 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10212 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10213 but the mpft_t is not, then we underflowed in the
10214 conversion. */
10215 if (real_isfinite (&rr)
10216 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10218 REAL_VALUE_TYPE rmode;
10220 real_convert (&rmode, TYPE_MODE (type), &rr);
10221 /* Proceed iff the specified mode can hold the value. */
10222 if (real_identical (&rmode, &rr))
10223 return build_real (type, rmode);
10226 return NULL_TREE;
10229 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10230 number and no overflow/underflow occurred. INEXACT is true if M
10231 was not exactly calculated. TYPE is the tree type for the result.
10232 This function assumes that you cleared the MPFR flags and then
10233 calculated M to see if anything subsequently set a flag prior to
10234 entering this function. Return NULL_TREE if any checks fail, if
10235 FORCE_CONVERT is true, then bypass the checks. */
10237 static tree
10238 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10240 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10241 overflow/underflow occurred. If -frounding-math, proceed iff the
10242 result of calling FUNC was exact. */
10243 if (force_convert
10244 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10245 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10246 && (!flag_rounding_math || !inexact)))
10248 REAL_VALUE_TYPE re, im;
10250 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10251 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10252 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10253 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10254 but the mpft_t is not, then we underflowed in the
10255 conversion. */
10256 if (force_convert
10257 || (real_isfinite (&re) && real_isfinite (&im)
10258 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10259 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10261 REAL_VALUE_TYPE re_mode, im_mode;
10263 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10264 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10265 /* Proceed iff the specified mode can hold the value. */
10266 if (force_convert
10267 || (real_identical (&re_mode, &re)
10268 && real_identical (&im_mode, &im)))
10269 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10270 build_real (TREE_TYPE (type), im_mode));
10273 return NULL_TREE;
10276 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10277 the pointer *(ARG_QUO) and return the result. The type is taken
10278 from the type of ARG0 and is used for setting the precision of the
10279 calculation and results. */
10281 static tree
10282 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10284 tree const type = TREE_TYPE (arg0);
10285 tree result = NULL_TREE;
10287 STRIP_NOPS (arg0);
10288 STRIP_NOPS (arg1);
10290 /* To proceed, MPFR must exactly represent the target floating point
10291 format, which only happens when the target base equals two. */
10292 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10293 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10294 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10296 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10297 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10299 if (real_isfinite (ra0) && real_isfinite (ra1))
10301 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10302 const int prec = fmt->p;
10303 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10304 tree result_rem;
10305 long integer_quo;
10306 mpfr_t m0, m1;
10308 mpfr_inits2 (prec, m0, m1, NULL);
10309 mpfr_from_real (m0, ra0, GMP_RNDN);
10310 mpfr_from_real (m1, ra1, GMP_RNDN);
10311 mpfr_clear_flags ();
10312 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10313 /* Remquo is independent of the rounding mode, so pass
10314 inexact=0 to do_mpfr_ckconv(). */
10315 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10316 mpfr_clears (m0, m1, NULL);
10317 if (result_rem)
10319 /* MPFR calculates quo in the host's long so it may
10320 return more bits in quo than the target int can hold
10321 if sizeof(host long) > sizeof(target int). This can
10322 happen even for native compilers in LP64 mode. In
10323 these cases, modulo the quo value with the largest
10324 number that the target int can hold while leaving one
10325 bit for the sign. */
10326 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10327 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10329 /* Dereference the quo pointer argument. */
10330 arg_quo = build_fold_indirect_ref (arg_quo);
10331 /* Proceed iff a valid pointer type was passed in. */
10332 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10334 /* Set the value. */
10335 tree result_quo
10336 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10337 build_int_cst (TREE_TYPE (arg_quo),
10338 integer_quo));
10339 TREE_SIDE_EFFECTS (result_quo) = 1;
10340 /* Combine the quo assignment with the rem. */
10341 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10342 result_quo, result_rem));
10347 return result;
10350 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10351 resulting value as a tree with type TYPE. The mpfr precision is
10352 set to the precision of TYPE. We assume that this mpfr function
10353 returns zero if the result could be calculated exactly within the
10354 requested precision. In addition, the integer pointer represented
10355 by ARG_SG will be dereferenced and set to the appropriate signgam
10356 (-1,1) value. */
10358 static tree
10359 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10361 tree result = NULL_TREE;
10363 STRIP_NOPS (arg);
10365 /* To proceed, MPFR must exactly represent the target floating point
10366 format, which only happens when the target base equals two. Also
10367 verify ARG is a constant and that ARG_SG is an int pointer. */
10368 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10369 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10370 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10371 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10373 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10375 /* In addition to NaN and Inf, the argument cannot be zero or a
10376 negative integer. */
10377 if (real_isfinite (ra)
10378 && ra->cl != rvc_zero
10379 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10381 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10382 const int prec = fmt->p;
10383 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10384 int inexact, sg;
10385 mpfr_t m;
10386 tree result_lg;
10388 mpfr_init2 (m, prec);
10389 mpfr_from_real (m, ra, GMP_RNDN);
10390 mpfr_clear_flags ();
10391 inexact = mpfr_lgamma (m, &sg, m, rnd);
10392 result_lg = do_mpfr_ckconv (m, type, inexact);
10393 mpfr_clear (m);
10394 if (result_lg)
10396 tree result_sg;
10398 /* Dereference the arg_sg pointer argument. */
10399 arg_sg = build_fold_indirect_ref (arg_sg);
10400 /* Assign the signgam value into *arg_sg. */
10401 result_sg = fold_build2 (MODIFY_EXPR,
10402 TREE_TYPE (arg_sg), arg_sg,
10403 build_int_cst (TREE_TYPE (arg_sg), sg));
10404 TREE_SIDE_EFFECTS (result_sg) = 1;
10405 /* Combine the signgam assignment with the lgamma result. */
10406 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10407 result_sg, result_lg));
10412 return result;
10415 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10416 mpc function FUNC on it and return the resulting value as a tree
10417 with type TYPE. The mpfr precision is set to the precision of
10418 TYPE. We assume that function FUNC returns zero if the result
10419 could be calculated exactly within the requested precision. If
10420 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10421 in the arguments and/or results. */
10423 tree
10424 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10425 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10427 tree result = NULL_TREE;
10429 STRIP_NOPS (arg0);
10430 STRIP_NOPS (arg1);
10432 /* To proceed, MPFR must exactly represent the target floating point
10433 format, which only happens when the target base equals two. */
10434 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10435 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10436 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10437 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10438 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10440 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10441 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10442 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10443 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10445 if (do_nonfinite
10446 || (real_isfinite (re0) && real_isfinite (im0)
10447 && real_isfinite (re1) && real_isfinite (im1)))
10449 const struct real_format *const fmt =
10450 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10451 const int prec = fmt->p;
10452 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10453 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10454 int inexact;
10455 mpc_t m0, m1;
10457 mpc_init2 (m0, prec);
10458 mpc_init2 (m1, prec);
10459 mpfr_from_real (mpc_realref (m0), re0, rnd);
10460 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10461 mpfr_from_real (mpc_realref (m1), re1, rnd);
10462 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10463 mpfr_clear_flags ();
10464 inexact = func (m0, m0, m1, crnd);
10465 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10466 mpc_clear (m0);
10467 mpc_clear (m1);
10471 return result;
10474 /* A wrapper function for builtin folding that prevents warnings for
10475 "statement without effect" and the like, caused by removing the
10476 call node earlier than the warning is generated. */
10478 tree
10479 fold_call_stmt (gcall *stmt, bool ignore)
10481 tree ret = NULL_TREE;
10482 tree fndecl = gimple_call_fndecl (stmt);
10483 location_t loc = gimple_location (stmt);
10484 if (fndecl
10485 && TREE_CODE (fndecl) == FUNCTION_DECL
10486 && DECL_BUILT_IN (fndecl)
10487 && !gimple_call_va_arg_pack_p (stmt))
10489 int nargs = gimple_call_num_args (stmt);
10490 tree *args = (nargs > 0
10491 ? gimple_call_arg_ptr (stmt, 0)
10492 : &error_mark_node);
10494 if (avoid_folding_inline_builtin (fndecl))
10495 return NULL_TREE;
10496 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10498 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10500 else
10502 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10503 if (ret)
10505 /* Propagate location information from original call to
10506 expansion of builtin. Otherwise things like
10507 maybe_emit_chk_warning, that operate on the expansion
10508 of a builtin, will use the wrong location information. */
10509 if (gimple_has_location (stmt))
10511 tree realret = ret;
10512 if (TREE_CODE (ret) == NOP_EXPR)
10513 realret = TREE_OPERAND (ret, 0);
10514 if (CAN_HAVE_LOCATION_P (realret)
10515 && !EXPR_HAS_LOCATION (realret))
10516 SET_EXPR_LOCATION (realret, loc);
10517 return realret;
10519 return ret;
10523 return NULL_TREE;
10526 /* Look up the function in builtin_decl that corresponds to DECL
10527 and set ASMSPEC as its user assembler name. DECL must be a
10528 function decl that declares a builtin. */
10530 void
10531 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10533 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10534 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10535 && asmspec != 0);
10537 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10538 set_user_assembler_name (builtin, asmspec);
10540 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10541 && INT_TYPE_SIZE < BITS_PER_WORD)
10543 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
10544 set_user_assembler_libfunc ("ffs", asmspec);
10545 set_optab_libfunc (ffs_optab, mode, "ffs");
10549 /* Return true if DECL is a builtin that expands to a constant or similarly
10550 simple code. */
10551 bool
10552 is_simple_builtin (tree decl)
10554 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10555 switch (DECL_FUNCTION_CODE (decl))
10557 /* Builtins that expand to constants. */
10558 case BUILT_IN_CONSTANT_P:
10559 case BUILT_IN_EXPECT:
10560 case BUILT_IN_OBJECT_SIZE:
10561 case BUILT_IN_UNREACHABLE:
10562 /* Simple register moves or loads from stack. */
10563 case BUILT_IN_ASSUME_ALIGNED:
10564 case BUILT_IN_RETURN_ADDRESS:
10565 case BUILT_IN_EXTRACT_RETURN_ADDR:
10566 case BUILT_IN_FROB_RETURN_ADDR:
10567 case BUILT_IN_RETURN:
10568 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10569 case BUILT_IN_FRAME_ADDRESS:
10570 case BUILT_IN_VA_END:
10571 case BUILT_IN_STACK_SAVE:
10572 case BUILT_IN_STACK_RESTORE:
10573 /* Exception state returns or moves registers around. */
10574 case BUILT_IN_EH_FILTER:
10575 case BUILT_IN_EH_POINTER:
10576 case BUILT_IN_EH_COPY_VALUES:
10577 return true;
10579 default:
10580 return false;
10583 return false;
10586 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10587 most probably expanded inline into reasonably simple code. This is a
10588 superset of is_simple_builtin. */
10589 bool
10590 is_inexpensive_builtin (tree decl)
10592 if (!decl)
10593 return false;
10594 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10595 return true;
10596 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10597 switch (DECL_FUNCTION_CODE (decl))
10599 case BUILT_IN_ABS:
10600 CASE_BUILT_IN_ALLOCA:
10601 case BUILT_IN_BSWAP16:
10602 case BUILT_IN_BSWAP32:
10603 case BUILT_IN_BSWAP64:
10604 case BUILT_IN_CLZ:
10605 case BUILT_IN_CLZIMAX:
10606 case BUILT_IN_CLZL:
10607 case BUILT_IN_CLZLL:
10608 case BUILT_IN_CTZ:
10609 case BUILT_IN_CTZIMAX:
10610 case BUILT_IN_CTZL:
10611 case BUILT_IN_CTZLL:
10612 case BUILT_IN_FFS:
10613 case BUILT_IN_FFSIMAX:
10614 case BUILT_IN_FFSL:
10615 case BUILT_IN_FFSLL:
10616 case BUILT_IN_IMAXABS:
10617 case BUILT_IN_FINITE:
10618 case BUILT_IN_FINITEF:
10619 case BUILT_IN_FINITEL:
10620 case BUILT_IN_FINITED32:
10621 case BUILT_IN_FINITED64:
10622 case BUILT_IN_FINITED128:
10623 case BUILT_IN_FPCLASSIFY:
10624 case BUILT_IN_ISFINITE:
10625 case BUILT_IN_ISINF_SIGN:
10626 case BUILT_IN_ISINF:
10627 case BUILT_IN_ISINFF:
10628 case BUILT_IN_ISINFL:
10629 case BUILT_IN_ISINFD32:
10630 case BUILT_IN_ISINFD64:
10631 case BUILT_IN_ISINFD128:
10632 case BUILT_IN_ISNAN:
10633 case BUILT_IN_ISNANF:
10634 case BUILT_IN_ISNANL:
10635 case BUILT_IN_ISNAND32:
10636 case BUILT_IN_ISNAND64:
10637 case BUILT_IN_ISNAND128:
10638 case BUILT_IN_ISNORMAL:
10639 case BUILT_IN_ISGREATER:
10640 case BUILT_IN_ISGREATEREQUAL:
10641 case BUILT_IN_ISLESS:
10642 case BUILT_IN_ISLESSEQUAL:
10643 case BUILT_IN_ISLESSGREATER:
10644 case BUILT_IN_ISUNORDERED:
10645 case BUILT_IN_VA_ARG_PACK:
10646 case BUILT_IN_VA_ARG_PACK_LEN:
10647 case BUILT_IN_VA_COPY:
10648 case BUILT_IN_TRAP:
10649 case BUILT_IN_SAVEREGS:
10650 case BUILT_IN_POPCOUNTL:
10651 case BUILT_IN_POPCOUNTLL:
10652 case BUILT_IN_POPCOUNTIMAX:
10653 case BUILT_IN_POPCOUNT:
10654 case BUILT_IN_PARITYL:
10655 case BUILT_IN_PARITYLL:
10656 case BUILT_IN_PARITYIMAX:
10657 case BUILT_IN_PARITY:
10658 case BUILT_IN_LABS:
10659 case BUILT_IN_LLABS:
10660 case BUILT_IN_PREFETCH:
10661 case BUILT_IN_ACC_ON_DEVICE:
10662 return true;
10664 default:
10665 return is_simple_builtin (decl);
10668 return false;
10671 /* Return true if T is a constant and the value cast to a target char
10672 can be represented by a host char.
10673 Store the casted char constant in *P if so. */
10675 bool
10676 target_char_cst_p (tree t, char *p)
10678 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
10679 return false;
10681 *p = (char)tree_to_uhwi (t);
10682 return true;
10685 /* Return the maximum object size. */
10687 tree
10688 max_object_size (void)
10690 /* To do: Make this a configurable parameter. */
10691 return TYPE_MAX_VALUE (ptrdiff_type_node);