PR c++/86094 - wrong code with defaulted move ctor.
[official-gcc.git] / gcc / builtins.c
blob8707e0cfbc31aba24ee0803ba3d0fae428711ed3
1 /* Expand builtin functions.
2 Copyright (C) 1988-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-restrict.h"
47 #include "stor-layout.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "tree-object-size.h"
51 #include "realmpfr.h"
52 #include "cfgrtl.h"
53 #include "except.h"
54 #include "dojump.h"
55 #include "explow.h"
56 #include "stmt.h"
57 #include "expr.h"
58 #include "libfuncs.h"
59 #include "output.h"
60 #include "typeclass.h"
61 #include "langhooks.h"
62 #include "value-prof.h"
63 #include "builtins.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "asan.h"
67 #include "internal-fn.h"
68 #include "case-cfn-macros.h"
69 #include "gimple-fold.h"
70 #include "intl.h"
71 #include "file-prefix-map.h" /* remap_macro_filename() */
72 #include "gomp-constants.h"
73 #include "omp-general.h"
75 struct target_builtins default_target_builtins;
76 #if SWITCHABLE_TARGET
77 struct target_builtins *this_target_builtins = &default_target_builtins;
78 #endif
80 /* Define the names of the builtin function types and codes. */
81 const char *const built_in_class_names[BUILT_IN_LAST]
82 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
84 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
85 const char * built_in_names[(int) END_BUILTINS] =
87 #include "builtins.def"
90 /* Setup an array of builtin_info_type, make sure each element decl is
91 initialized to NULL_TREE. */
92 builtin_info_type builtin_info[(int)END_BUILTINS];
94 /* Non-zero if __builtin_constant_p should be folded right away. */
95 bool force_folding_builtin_constant_p;
97 static rtx c_readstr (const char *, scalar_int_mode);
98 static int target_char_cast (tree, char *);
99 static rtx get_memory_rtx (tree, tree);
100 static int apply_args_size (void);
101 static int apply_result_size (void);
102 static rtx result_vector (int, rtx);
103 static void expand_builtin_prefetch (tree);
104 static rtx expand_builtin_apply_args (void);
105 static rtx expand_builtin_apply_args_1 (void);
106 static rtx expand_builtin_apply (rtx, rtx, rtx);
107 static void expand_builtin_return (rtx);
108 static enum type_class type_to_class (tree);
109 static rtx expand_builtin_classify_type (tree);
110 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
111 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
112 static rtx expand_builtin_interclass_mathfn (tree, rtx);
113 static rtx expand_builtin_sincos (tree);
114 static rtx expand_builtin_cexpi (tree, rtx);
115 static rtx expand_builtin_int_roundingfn (tree, rtx);
116 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
117 static rtx expand_builtin_next_arg (void);
118 static rtx expand_builtin_va_start (tree);
119 static rtx expand_builtin_va_end (tree);
120 static rtx expand_builtin_va_copy (tree);
121 static rtx expand_builtin_strcmp (tree, rtx);
122 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
123 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
124 static rtx expand_builtin_memchr (tree, rtx);
125 static rtx expand_builtin_memcpy (tree, rtx);
126 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
127 rtx target, tree exp, int endp);
128 static rtx expand_builtin_memmove (tree, rtx);
129 static rtx expand_builtin_mempcpy (tree, rtx);
130 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, int);
131 static rtx expand_builtin_strcat (tree, rtx);
132 static rtx expand_builtin_strcpy (tree, rtx);
133 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
134 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
135 static rtx expand_builtin_stpncpy (tree, rtx);
136 static rtx expand_builtin_strncat (tree, rtx);
137 static rtx expand_builtin_strncpy (tree, rtx);
138 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
139 static rtx expand_builtin_memset (tree, rtx, machine_mode);
140 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
141 static rtx expand_builtin_bzero (tree);
142 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
143 static rtx expand_builtin_alloca (tree);
144 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
145 static rtx expand_builtin_frame_address (tree, tree);
146 static tree stabilize_va_list_loc (location_t, tree, int);
147 static rtx expand_builtin_expect (tree, rtx);
148 static tree fold_builtin_constant_p (tree);
149 static tree fold_builtin_classify_type (tree);
150 static tree fold_builtin_strlen (location_t, tree, tree);
151 static tree fold_builtin_inf (location_t, tree, int);
152 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
153 static bool validate_arg (const_tree, enum tree_code code);
154 static rtx expand_builtin_fabs (tree, rtx, rtx);
155 static rtx expand_builtin_signbit (tree, rtx);
156 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
157 static tree fold_builtin_isascii (location_t, tree);
158 static tree fold_builtin_toascii (location_t, tree);
159 static tree fold_builtin_isdigit (location_t, tree);
160 static tree fold_builtin_fabs (location_t, tree, tree);
161 static tree fold_builtin_abs (location_t, tree, tree);
162 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
163 enum tree_code);
164 static tree fold_builtin_0 (location_t, tree);
165 static tree fold_builtin_1 (location_t, tree, tree);
166 static tree fold_builtin_2 (location_t, tree, tree, tree);
167 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
168 static tree fold_builtin_varargs (location_t, tree, tree*, int);
170 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
171 static tree fold_builtin_strspn (location_t, tree, tree);
172 static tree fold_builtin_strcspn (location_t, tree, tree);
174 static rtx expand_builtin_object_size (tree);
175 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
176 enum built_in_function);
177 static void maybe_emit_chk_warning (tree, enum built_in_function);
178 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
179 static void maybe_emit_free_warning (tree);
180 static tree fold_builtin_object_size (tree, tree);
182 unsigned HOST_WIDE_INT target_newline;
183 unsigned HOST_WIDE_INT target_percent;
184 static unsigned HOST_WIDE_INT target_c;
185 static unsigned HOST_WIDE_INT target_s;
186 char target_percent_c[3];
187 char target_percent_s[3];
188 char target_percent_s_newline[4];
189 static tree do_mpfr_remquo (tree, tree, tree);
190 static tree do_mpfr_lgamma_r (tree, tree, tree);
191 static void expand_builtin_sync_synchronize (void);
193 /* Return true if NAME starts with __builtin_ or __sync_. */
195 static bool
196 is_builtin_name (const char *name)
198 if (strncmp (name, "__builtin_", 10) == 0)
199 return true;
200 if (strncmp (name, "__sync_", 7) == 0)
201 return true;
202 if (strncmp (name, "__atomic_", 9) == 0)
203 return true;
204 return false;
208 /* Return true if DECL is a function symbol representing a built-in. */
210 bool
211 is_builtin_fn (tree decl)
213 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
216 /* Return true if NODE should be considered for inline expansion regardless
217 of the optimization level. This means whenever a function is invoked with
218 its "internal" name, which normally contains the prefix "__builtin". */
220 bool
221 called_as_built_in (tree node)
223 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
224 we want the name used to call the function, not the name it
225 will have. */
226 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
227 return is_builtin_name (name);
230 /* Compute values M and N such that M divides (address of EXP - N) and such
231 that N < M. If these numbers can be determined, store M in alignp and N in
232 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
233 *alignp and any bit-offset to *bitposp.
235 Note that the address (and thus the alignment) computed here is based
236 on the address to which a symbol resolves, whereas DECL_ALIGN is based
237 on the address at which an object is actually located. These two
238 addresses are not always the same. For example, on ARM targets,
239 the address &foo of a Thumb function foo() has the lowest bit set,
240 whereas foo() itself starts on an even address.
242 If ADDR_P is true we are taking the address of the memory reference EXP
243 and thus cannot rely on the access taking place. */
245 static bool
246 get_object_alignment_2 (tree exp, unsigned int *alignp,
247 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
249 poly_int64 bitsize, bitpos;
250 tree offset;
251 machine_mode mode;
252 int unsignedp, reversep, volatilep;
253 unsigned int align = BITS_PER_UNIT;
254 bool known_alignment = false;
256 /* Get the innermost object and the constant (bitpos) and possibly
257 variable (offset) offset of the access. */
258 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
259 &unsignedp, &reversep, &volatilep);
261 /* Extract alignment information from the innermost object and
262 possibly adjust bitpos and offset. */
263 if (TREE_CODE (exp) == FUNCTION_DECL)
265 /* Function addresses can encode extra information besides their
266 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
267 allows the low bit to be used as a virtual bit, we know
268 that the address itself must be at least 2-byte aligned. */
269 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
270 align = 2 * BITS_PER_UNIT;
272 else if (TREE_CODE (exp) == LABEL_DECL)
274 else if (TREE_CODE (exp) == CONST_DECL)
276 /* The alignment of a CONST_DECL is determined by its initializer. */
277 exp = DECL_INITIAL (exp);
278 align = TYPE_ALIGN (TREE_TYPE (exp));
279 if (CONSTANT_CLASS_P (exp))
280 align = targetm.constant_alignment (exp, align);
282 known_alignment = true;
284 else if (DECL_P (exp))
286 align = DECL_ALIGN (exp);
287 known_alignment = true;
289 else if (TREE_CODE (exp) == INDIRECT_REF
290 || TREE_CODE (exp) == MEM_REF
291 || TREE_CODE (exp) == TARGET_MEM_REF)
293 tree addr = TREE_OPERAND (exp, 0);
294 unsigned ptr_align;
295 unsigned HOST_WIDE_INT ptr_bitpos;
296 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
298 /* If the address is explicitely aligned, handle that. */
299 if (TREE_CODE (addr) == BIT_AND_EXPR
300 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
302 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
303 ptr_bitmask *= BITS_PER_UNIT;
304 align = least_bit_hwi (ptr_bitmask);
305 addr = TREE_OPERAND (addr, 0);
308 known_alignment
309 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
310 align = MAX (ptr_align, align);
312 /* Re-apply explicit alignment to the bitpos. */
313 ptr_bitpos &= ptr_bitmask;
315 /* The alignment of the pointer operand in a TARGET_MEM_REF
316 has to take the variable offset parts into account. */
317 if (TREE_CODE (exp) == TARGET_MEM_REF)
319 if (TMR_INDEX (exp))
321 unsigned HOST_WIDE_INT step = 1;
322 if (TMR_STEP (exp))
323 step = TREE_INT_CST_LOW (TMR_STEP (exp));
324 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
326 if (TMR_INDEX2 (exp))
327 align = BITS_PER_UNIT;
328 known_alignment = false;
331 /* When EXP is an actual memory reference then we can use
332 TYPE_ALIGN of a pointer indirection to derive alignment.
333 Do so only if get_pointer_alignment_1 did not reveal absolute
334 alignment knowledge and if using that alignment would
335 improve the situation. */
336 unsigned int talign;
337 if (!addr_p && !known_alignment
338 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
339 && talign > align)
340 align = talign;
341 else
343 /* Else adjust bitpos accordingly. */
344 bitpos += ptr_bitpos;
345 if (TREE_CODE (exp) == MEM_REF
346 || TREE_CODE (exp) == TARGET_MEM_REF)
347 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
350 else if (TREE_CODE (exp) == STRING_CST)
352 /* STRING_CST are the only constant objects we allow to be not
353 wrapped inside a CONST_DECL. */
354 align = TYPE_ALIGN (TREE_TYPE (exp));
355 if (CONSTANT_CLASS_P (exp))
356 align = targetm.constant_alignment (exp, align);
358 known_alignment = true;
361 /* If there is a non-constant offset part extract the maximum
362 alignment that can prevail. */
363 if (offset)
365 unsigned int trailing_zeros = tree_ctz (offset);
366 if (trailing_zeros < HOST_BITS_PER_INT)
368 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
369 if (inner)
370 align = MIN (align, inner);
374 /* Account for the alignment of runtime coefficients, so that the constant
375 bitpos is guaranteed to be accurate. */
376 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
377 if (alt_align != 0 && alt_align < align)
379 align = alt_align;
380 known_alignment = false;
383 *alignp = align;
384 *bitposp = bitpos.coeffs[0] & (align - 1);
385 return known_alignment;
388 /* For a memory reference expression EXP compute values M and N such that M
389 divides (&EXP - N) and such that N < M. If these numbers can be determined,
390 store M in alignp and N in *BITPOSP and return true. Otherwise return false
391 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
393 bool
394 get_object_alignment_1 (tree exp, unsigned int *alignp,
395 unsigned HOST_WIDE_INT *bitposp)
397 return get_object_alignment_2 (exp, alignp, bitposp, false);
400 /* Return the alignment in bits of EXP, an object. */
402 unsigned int
403 get_object_alignment (tree exp)
405 unsigned HOST_WIDE_INT bitpos = 0;
406 unsigned int align;
408 get_object_alignment_1 (exp, &align, &bitpos);
410 /* align and bitpos now specify known low bits of the pointer.
411 ptr & (align - 1) == bitpos. */
413 if (bitpos != 0)
414 align = least_bit_hwi (bitpos);
415 return align;
418 /* For a pointer valued expression EXP compute values M and N such that M
419 divides (EXP - N) and such that N < M. If these numbers can be determined,
420 store M in alignp and N in *BITPOSP and return true. Return false if
421 the results are just a conservative approximation.
423 If EXP is not a pointer, false is returned too. */
425 bool
426 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
427 unsigned HOST_WIDE_INT *bitposp)
429 STRIP_NOPS (exp);
431 if (TREE_CODE (exp) == ADDR_EXPR)
432 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
433 alignp, bitposp, true);
434 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
436 unsigned int align;
437 unsigned HOST_WIDE_INT bitpos;
438 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
439 &align, &bitpos);
440 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
441 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
442 else
444 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
445 if (trailing_zeros < HOST_BITS_PER_INT)
447 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
448 if (inner)
449 align = MIN (align, inner);
452 *alignp = align;
453 *bitposp = bitpos & (align - 1);
454 return res;
456 else if (TREE_CODE (exp) == SSA_NAME
457 && POINTER_TYPE_P (TREE_TYPE (exp)))
459 unsigned int ptr_align, ptr_misalign;
460 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
462 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
464 *bitposp = ptr_misalign * BITS_PER_UNIT;
465 *alignp = ptr_align * BITS_PER_UNIT;
466 /* Make sure to return a sensible alignment when the multiplication
467 by BITS_PER_UNIT overflowed. */
468 if (*alignp == 0)
469 *alignp = 1u << (HOST_BITS_PER_INT - 1);
470 /* We cannot really tell whether this result is an approximation. */
471 return false;
473 else
475 *bitposp = 0;
476 *alignp = BITS_PER_UNIT;
477 return false;
480 else if (TREE_CODE (exp) == INTEGER_CST)
482 *alignp = BIGGEST_ALIGNMENT;
483 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
484 & (BIGGEST_ALIGNMENT - 1));
485 return true;
488 *bitposp = 0;
489 *alignp = BITS_PER_UNIT;
490 return false;
493 /* Return the alignment in bits of EXP, a pointer valued expression.
494 The alignment returned is, by default, the alignment of the thing that
495 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
497 Otherwise, look at the expression to see if we can do better, i.e., if the
498 expression is actually pointing at an object whose alignment is tighter. */
500 unsigned int
501 get_pointer_alignment (tree exp)
503 unsigned HOST_WIDE_INT bitpos = 0;
504 unsigned int align;
506 get_pointer_alignment_1 (exp, &align, &bitpos);
508 /* align and bitpos now specify known low bits of the pointer.
509 ptr & (align - 1) == bitpos. */
511 if (bitpos != 0)
512 align = least_bit_hwi (bitpos);
514 return align;
517 /* Return the number of non-zero elements in the sequence
518 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
519 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
521 static unsigned
522 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
524 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
526 unsigned n;
528 if (eltsize == 1)
530 /* Optimize the common case of plain char. */
531 for (n = 0; n < maxelts; n++)
533 const char *elt = (const char*) ptr + n;
534 if (!*elt)
535 break;
538 else
540 for (n = 0; n < maxelts; n++)
542 const char *elt = (const char*) ptr + n * eltsize;
543 if (!memcmp (elt, "\0\0\0\0", eltsize))
544 break;
547 return n;
550 /* Compute the length of a null-terminated character string or wide
551 character string handling character sizes of 1, 2, and 4 bytes.
552 TREE_STRING_LENGTH is not the right way because it evaluates to
553 the size of the character array in bytes (as opposed to characters)
554 and because it can contain a zero byte in the middle.
556 ONLY_VALUE should be nonzero if the result is not going to be emitted
557 into the instruction stream and zero if it is going to be expanded.
558 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
559 is returned, otherwise NULL, since
560 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
561 evaluate the side-effects.
563 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
564 accesses. Note that this implies the result is not going to be emitted
565 into the instruction stream.
567 The value returned is of type `ssizetype'.
569 Unfortunately, string_constant can't access the values of const char
570 arrays with initializers, so neither can we do so here. */
572 tree
573 c_strlen (tree src, int only_value)
575 STRIP_NOPS (src);
576 if (TREE_CODE (src) == COND_EXPR
577 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
579 tree len1, len2;
581 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
582 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
583 if (tree_int_cst_equal (len1, len2))
584 return len1;
587 if (TREE_CODE (src) == COMPOUND_EXPR
588 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
589 return c_strlen (TREE_OPERAND (src, 1), only_value);
591 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
593 /* Offset from the beginning of the string in bytes. */
594 tree byteoff;
595 src = string_constant (src, &byteoff);
596 if (src == 0)
597 return NULL_TREE;
599 /* Determine the size of the string element. */
600 unsigned eltsize
601 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src))));
603 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
604 length of SRC. */
605 unsigned maxelts = TREE_STRING_LENGTH (src) / eltsize - 1;
607 /* PTR can point to the byte representation of any string type, including
608 char* and wchar_t*. */
609 const char *ptr = TREE_STRING_POINTER (src);
611 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
613 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
614 compute the offset to the following null if we don't know where to
615 start searching for it. */
616 if (string_length (ptr, eltsize, maxelts) < maxelts)
618 /* Return when an embedded null character is found. */
619 return NULL_TREE;
622 if (!maxelts)
623 return ssize_int (0);
625 /* We don't know the starting offset, but we do know that the string
626 has no internal zero bytes. We can assume that the offset falls
627 within the bounds of the string; otherwise, the programmer deserves
628 what he gets. Subtract the offset from the length of the string,
629 and return that. This would perhaps not be valid if we were dealing
630 with named arrays in addition to literal string constants. */
632 return size_diffop_loc (loc, size_int (maxelts * eltsize), byteoff);
635 /* Offset from the beginning of the string in elements. */
636 HOST_WIDE_INT eltoff;
638 /* We have a known offset into the string. Start searching there for
639 a null character if we can represent it as a single HOST_WIDE_INT. */
640 if (byteoff == 0)
641 eltoff = 0;
642 else if (! tree_fits_shwi_p (byteoff))
643 eltoff = -1;
644 else
645 eltoff = tree_to_shwi (byteoff) / eltsize;
647 /* If the offset is known to be out of bounds, warn, and call strlen at
648 runtime. */
649 if (eltoff < 0 || eltoff > maxelts)
651 /* Suppress multiple warnings for propagated constant strings. */
652 if (only_value != 2
653 && !TREE_NO_WARNING (src))
655 warning_at (loc, OPT_Warray_bounds,
656 "offset %qwi outside bounds of constant string",
657 eltoff);
658 TREE_NO_WARNING (src) = 1;
660 return NULL_TREE;
663 /* Use strlen to search for the first zero byte. Since any strings
664 constructed with build_string will have nulls appended, we win even
665 if we get handed something like (char[4])"abcd".
667 Since ELTOFF is our starting index into the string, no further
668 calculation is needed. */
669 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
670 maxelts - eltoff);
672 return ssize_int (len);
675 /* Return a constant integer corresponding to target reading
676 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
678 static rtx
679 c_readstr (const char *str, scalar_int_mode mode)
681 HOST_WIDE_INT ch;
682 unsigned int i, j;
683 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
685 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
686 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
687 / HOST_BITS_PER_WIDE_INT;
689 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
690 for (i = 0; i < len; i++)
691 tmp[i] = 0;
693 ch = 1;
694 for (i = 0; i < GET_MODE_SIZE (mode); i++)
696 j = i;
697 if (WORDS_BIG_ENDIAN)
698 j = GET_MODE_SIZE (mode) - i - 1;
699 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
700 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
701 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
702 j *= BITS_PER_UNIT;
704 if (ch)
705 ch = (unsigned char) str[i];
706 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
709 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
710 return immed_wide_int_const (c, mode);
713 /* Cast a target constant CST to target CHAR and if that value fits into
714 host char type, return zero and put that value into variable pointed to by
715 P. */
717 static int
718 target_char_cast (tree cst, char *p)
720 unsigned HOST_WIDE_INT val, hostval;
722 if (TREE_CODE (cst) != INTEGER_CST
723 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
724 return 1;
726 /* Do not care if it fits or not right here. */
727 val = TREE_INT_CST_LOW (cst);
729 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
730 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
732 hostval = val;
733 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
734 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
736 if (val != hostval)
737 return 1;
739 *p = hostval;
740 return 0;
743 /* Similar to save_expr, but assumes that arbitrary code is not executed
744 in between the multiple evaluations. In particular, we assume that a
745 non-addressable local variable will not be modified. */
747 static tree
748 builtin_save_expr (tree exp)
750 if (TREE_CODE (exp) == SSA_NAME
751 || (TREE_ADDRESSABLE (exp) == 0
752 && (TREE_CODE (exp) == PARM_DECL
753 || (VAR_P (exp) && !TREE_STATIC (exp)))))
754 return exp;
756 return save_expr (exp);
759 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
760 times to get the address of either a higher stack frame, or a return
761 address located within it (depending on FNDECL_CODE). */
763 static rtx
764 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
766 int i;
767 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
768 if (tem == NULL_RTX)
770 /* For a zero count with __builtin_return_address, we don't care what
771 frame address we return, because target-specific definitions will
772 override us. Therefore frame pointer elimination is OK, and using
773 the soft frame pointer is OK.
775 For a nonzero count, or a zero count with __builtin_frame_address,
776 we require a stable offset from the current frame pointer to the
777 previous one, so we must use the hard frame pointer, and
778 we must disable frame pointer elimination. */
779 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
780 tem = frame_pointer_rtx;
781 else
783 tem = hard_frame_pointer_rtx;
785 /* Tell reload not to eliminate the frame pointer. */
786 crtl->accesses_prior_frames = 1;
790 if (count > 0)
791 SETUP_FRAME_ADDRESSES ();
793 /* On the SPARC, the return address is not in the frame, it is in a
794 register. There is no way to access it off of the current frame
795 pointer, but it can be accessed off the previous frame pointer by
796 reading the value from the register window save area. */
797 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
798 count--;
800 /* Scan back COUNT frames to the specified frame. */
801 for (i = 0; i < count; i++)
803 /* Assume the dynamic chain pointer is in the word that the
804 frame address points to, unless otherwise specified. */
805 tem = DYNAMIC_CHAIN_ADDRESS (tem);
806 tem = memory_address (Pmode, tem);
807 tem = gen_frame_mem (Pmode, tem);
808 tem = copy_to_reg (tem);
811 /* For __builtin_frame_address, return what we've got. But, on
812 the SPARC for example, we may have to add a bias. */
813 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
814 return FRAME_ADDR_RTX (tem);
816 /* For __builtin_return_address, get the return address from that frame. */
817 #ifdef RETURN_ADDR_RTX
818 tem = RETURN_ADDR_RTX (count, tem);
819 #else
820 tem = memory_address (Pmode,
821 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
822 tem = gen_frame_mem (Pmode, tem);
823 #endif
824 return tem;
827 /* Alias set used for setjmp buffer. */
828 static alias_set_type setjmp_alias_set = -1;
830 /* Construct the leading half of a __builtin_setjmp call. Control will
831 return to RECEIVER_LABEL. This is also called directly by the SJLJ
832 exception handling code. */
834 void
835 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
837 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
838 rtx stack_save;
839 rtx mem;
841 if (setjmp_alias_set == -1)
842 setjmp_alias_set = new_alias_set ();
844 buf_addr = convert_memory_address (Pmode, buf_addr);
846 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
848 /* We store the frame pointer and the address of receiver_label in
849 the buffer and use the rest of it for the stack save area, which
850 is machine-dependent. */
852 mem = gen_rtx_MEM (Pmode, buf_addr);
853 set_mem_alias_set (mem, setjmp_alias_set);
854 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
856 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
857 GET_MODE_SIZE (Pmode))),
858 set_mem_alias_set (mem, setjmp_alias_set);
860 emit_move_insn (validize_mem (mem),
861 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
863 stack_save = gen_rtx_MEM (sa_mode,
864 plus_constant (Pmode, buf_addr,
865 2 * GET_MODE_SIZE (Pmode)));
866 set_mem_alias_set (stack_save, setjmp_alias_set);
867 emit_stack_save (SAVE_NONLOCAL, &stack_save);
869 /* If there is further processing to do, do it. */
870 if (targetm.have_builtin_setjmp_setup ())
871 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
873 /* We have a nonlocal label. */
874 cfun->has_nonlocal_label = 1;
877 /* Construct the trailing part of a __builtin_setjmp call. This is
878 also called directly by the SJLJ exception handling code.
879 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
881 void
882 expand_builtin_setjmp_receiver (rtx receiver_label)
884 rtx chain;
886 /* Mark the FP as used when we get here, so we have to make sure it's
887 marked as used by this function. */
888 emit_use (hard_frame_pointer_rtx);
890 /* Mark the static chain as clobbered here so life information
891 doesn't get messed up for it. */
892 chain = rtx_for_static_chain (current_function_decl, true);
893 if (chain && REG_P (chain))
894 emit_clobber (chain);
896 /* Now put in the code to restore the frame pointer, and argument
897 pointer, if needed. */
898 if (! targetm.have_nonlocal_goto ())
900 /* First adjust our frame pointer to its actual value. It was
901 previously set to the start of the virtual area corresponding to
902 the stacked variables when we branched here and now needs to be
903 adjusted to the actual hardware fp value.
905 Assignments to virtual registers are converted by
906 instantiate_virtual_regs into the corresponding assignment
907 to the underlying register (fp in this case) that makes
908 the original assignment true.
909 So the following insn will actually be decrementing fp by
910 TARGET_STARTING_FRAME_OFFSET. */
911 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
913 /* Restoring the frame pointer also modifies the hard frame pointer.
914 Mark it used (so that the previous assignment remains live once
915 the frame pointer is eliminated) and clobbered (to represent the
916 implicit update from the assignment). */
917 emit_use (hard_frame_pointer_rtx);
918 emit_clobber (hard_frame_pointer_rtx);
921 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
923 /* If the argument pointer can be eliminated in favor of the
924 frame pointer, we don't need to restore it. We assume here
925 that if such an elimination is present, it can always be used.
926 This is the case on all known machines; if we don't make this
927 assumption, we do unnecessary saving on many machines. */
928 size_t i;
929 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
931 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
932 if (elim_regs[i].from == ARG_POINTER_REGNUM
933 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
934 break;
936 if (i == ARRAY_SIZE (elim_regs))
938 /* Now restore our arg pointer from the address at which it
939 was saved in our stack frame. */
940 emit_move_insn (crtl->args.internal_arg_pointer,
941 copy_to_reg (get_arg_pointer_save_area ()));
945 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
946 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
947 else if (targetm.have_nonlocal_goto_receiver ())
948 emit_insn (targetm.gen_nonlocal_goto_receiver ());
949 else
950 { /* Nothing */ }
952 /* We must not allow the code we just generated to be reordered by
953 scheduling. Specifically, the update of the frame pointer must
954 happen immediately, not later. */
955 emit_insn (gen_blockage ());
958 /* __builtin_longjmp is passed a pointer to an array of five words (not
959 all will be used on all machines). It operates similarly to the C
960 library function of the same name, but is more efficient. Much of
961 the code below is copied from the handling of non-local gotos. */
963 static void
964 expand_builtin_longjmp (rtx buf_addr, rtx value)
966 rtx fp, lab, stack;
967 rtx_insn *insn, *last;
968 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
970 /* DRAP is needed for stack realign if longjmp is expanded to current
971 function */
972 if (SUPPORTS_STACK_ALIGNMENT)
973 crtl->need_drap = true;
975 if (setjmp_alias_set == -1)
976 setjmp_alias_set = new_alias_set ();
978 buf_addr = convert_memory_address (Pmode, buf_addr);
980 buf_addr = force_reg (Pmode, buf_addr);
982 /* We require that the user must pass a second argument of 1, because
983 that is what builtin_setjmp will return. */
984 gcc_assert (value == const1_rtx);
986 last = get_last_insn ();
987 if (targetm.have_builtin_longjmp ())
988 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
989 else
991 fp = gen_rtx_MEM (Pmode, buf_addr);
992 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
993 GET_MODE_SIZE (Pmode)));
995 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
996 2 * GET_MODE_SIZE (Pmode)));
997 set_mem_alias_set (fp, setjmp_alias_set);
998 set_mem_alias_set (lab, setjmp_alias_set);
999 set_mem_alias_set (stack, setjmp_alias_set);
1001 /* Pick up FP, label, and SP from the block and jump. This code is
1002 from expand_goto in stmt.c; see there for detailed comments. */
1003 if (targetm.have_nonlocal_goto ())
1004 /* We have to pass a value to the nonlocal_goto pattern that will
1005 get copied into the static_chain pointer, but it does not matter
1006 what that value is, because builtin_setjmp does not use it. */
1007 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1008 else
1010 lab = copy_to_reg (lab);
1012 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1013 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1015 emit_move_insn (hard_frame_pointer_rtx, fp);
1016 emit_stack_restore (SAVE_NONLOCAL, stack);
1018 emit_use (hard_frame_pointer_rtx);
1019 emit_use (stack_pointer_rtx);
1020 emit_indirect_jump (lab);
1024 /* Search backwards and mark the jump insn as a non-local goto.
1025 Note that this precludes the use of __builtin_longjmp to a
1026 __builtin_setjmp target in the same function. However, we've
1027 already cautioned the user that these functions are for
1028 internal exception handling use only. */
1029 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1031 gcc_assert (insn != last);
1033 if (JUMP_P (insn))
1035 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1036 break;
1038 else if (CALL_P (insn))
1039 break;
1043 static inline bool
1044 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1046 return (iter->i < iter->n);
1049 /* This function validates the types of a function call argument list
1050 against a specified list of tree_codes. If the last specifier is a 0,
1051 that represents an ellipsis, otherwise the last specifier must be a
1052 VOID_TYPE. */
1054 static bool
1055 validate_arglist (const_tree callexpr, ...)
1057 enum tree_code code;
1058 bool res = 0;
1059 va_list ap;
1060 const_call_expr_arg_iterator iter;
1061 const_tree arg;
1063 va_start (ap, callexpr);
1064 init_const_call_expr_arg_iterator (callexpr, &iter);
1066 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1067 tree fn = CALL_EXPR_FN (callexpr);
1068 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1070 for (unsigned argno = 1; ; ++argno)
1072 code = (enum tree_code) va_arg (ap, int);
1074 switch (code)
1076 case 0:
1077 /* This signifies an ellipses, any further arguments are all ok. */
1078 res = true;
1079 goto end;
1080 case VOID_TYPE:
1081 /* This signifies an endlink, if no arguments remain, return
1082 true, otherwise return false. */
1083 res = !more_const_call_expr_args_p (&iter);
1084 goto end;
1085 case POINTER_TYPE:
1086 /* The actual argument must be nonnull when either the whole
1087 called function has been declared nonnull, or when the formal
1088 argument corresponding to the actual argument has been. */
1089 if (argmap
1090 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1092 arg = next_const_call_expr_arg (&iter);
1093 if (!validate_arg (arg, code) || integer_zerop (arg))
1094 goto end;
1095 break;
1097 /* FALLTHRU */
1098 default:
1099 /* If no parameters remain or the parameter's code does not
1100 match the specified code, return false. Otherwise continue
1101 checking any remaining arguments. */
1102 arg = next_const_call_expr_arg (&iter);
1103 if (!validate_arg (arg, code))
1104 goto end;
1105 break;
1109 /* We need gotos here since we can only have one VA_CLOSE in a
1110 function. */
1111 end: ;
1112 va_end (ap);
1114 BITMAP_FREE (argmap);
1116 return res;
1119 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1120 and the address of the save area. */
1122 static rtx
1123 expand_builtin_nonlocal_goto (tree exp)
1125 tree t_label, t_save_area;
1126 rtx r_label, r_save_area, r_fp, r_sp;
1127 rtx_insn *insn;
1129 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1130 return NULL_RTX;
1132 t_label = CALL_EXPR_ARG (exp, 0);
1133 t_save_area = CALL_EXPR_ARG (exp, 1);
1135 r_label = expand_normal (t_label);
1136 r_label = convert_memory_address (Pmode, r_label);
1137 r_save_area = expand_normal (t_save_area);
1138 r_save_area = convert_memory_address (Pmode, r_save_area);
1139 /* Copy the address of the save location to a register just in case it was
1140 based on the frame pointer. */
1141 r_save_area = copy_to_reg (r_save_area);
1142 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1143 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1144 plus_constant (Pmode, r_save_area,
1145 GET_MODE_SIZE (Pmode)));
1147 crtl->has_nonlocal_goto = 1;
1149 /* ??? We no longer need to pass the static chain value, afaik. */
1150 if (targetm.have_nonlocal_goto ())
1151 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1152 else
1154 r_label = copy_to_reg (r_label);
1156 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1157 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1159 /* Restore frame pointer for containing function. */
1160 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1161 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1163 /* USE of hard_frame_pointer_rtx added for consistency;
1164 not clear if really needed. */
1165 emit_use (hard_frame_pointer_rtx);
1166 emit_use (stack_pointer_rtx);
1168 /* If the architecture is using a GP register, we must
1169 conservatively assume that the target function makes use of it.
1170 The prologue of functions with nonlocal gotos must therefore
1171 initialize the GP register to the appropriate value, and we
1172 must then make sure that this value is live at the point
1173 of the jump. (Note that this doesn't necessarily apply
1174 to targets with a nonlocal_goto pattern; they are free
1175 to implement it in their own way. Note also that this is
1176 a no-op if the GP register is a global invariant.) */
1177 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1178 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1179 emit_use (pic_offset_table_rtx);
1181 emit_indirect_jump (r_label);
1184 /* Search backwards to the jump insn and mark it as a
1185 non-local goto. */
1186 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1188 if (JUMP_P (insn))
1190 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1191 break;
1193 else if (CALL_P (insn))
1194 break;
1197 return const0_rtx;
1200 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1201 (not all will be used on all machines) that was passed to __builtin_setjmp.
1202 It updates the stack pointer in that block to the current value. This is
1203 also called directly by the SJLJ exception handling code. */
1205 void
1206 expand_builtin_update_setjmp_buf (rtx buf_addr)
1208 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1209 buf_addr = convert_memory_address (Pmode, buf_addr);
1210 rtx stack_save
1211 = gen_rtx_MEM (sa_mode,
1212 memory_address
1213 (sa_mode,
1214 plus_constant (Pmode, buf_addr,
1215 2 * GET_MODE_SIZE (Pmode))));
1217 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1220 /* Expand a call to __builtin_prefetch. For a target that does not support
1221 data prefetch, evaluate the memory address argument in case it has side
1222 effects. */
1224 static void
1225 expand_builtin_prefetch (tree exp)
1227 tree arg0, arg1, arg2;
1228 int nargs;
1229 rtx op0, op1, op2;
1231 if (!validate_arglist (exp, POINTER_TYPE, 0))
1232 return;
1234 arg0 = CALL_EXPR_ARG (exp, 0);
1236 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1237 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1238 locality). */
1239 nargs = call_expr_nargs (exp);
1240 if (nargs > 1)
1241 arg1 = CALL_EXPR_ARG (exp, 1);
1242 else
1243 arg1 = integer_zero_node;
1244 if (nargs > 2)
1245 arg2 = CALL_EXPR_ARG (exp, 2);
1246 else
1247 arg2 = integer_three_node;
1249 /* Argument 0 is an address. */
1250 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1252 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1253 if (TREE_CODE (arg1) != INTEGER_CST)
1255 error ("second argument to %<__builtin_prefetch%> must be a constant");
1256 arg1 = integer_zero_node;
1258 op1 = expand_normal (arg1);
1259 /* Argument 1 must be either zero or one. */
1260 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1262 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1263 " using zero");
1264 op1 = const0_rtx;
1267 /* Argument 2 (locality) must be a compile-time constant int. */
1268 if (TREE_CODE (arg2) != INTEGER_CST)
1270 error ("third argument to %<__builtin_prefetch%> must be a constant");
1271 arg2 = integer_zero_node;
1273 op2 = expand_normal (arg2);
1274 /* Argument 2 must be 0, 1, 2, or 3. */
1275 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1277 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1278 op2 = const0_rtx;
1281 if (targetm.have_prefetch ())
1283 struct expand_operand ops[3];
1285 create_address_operand (&ops[0], op0);
1286 create_integer_operand (&ops[1], INTVAL (op1));
1287 create_integer_operand (&ops[2], INTVAL (op2));
1288 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1289 return;
1292 /* Don't do anything with direct references to volatile memory, but
1293 generate code to handle other side effects. */
1294 if (!MEM_P (op0) && side_effects_p (op0))
1295 emit_insn (op0);
1298 /* Get a MEM rtx for expression EXP which is the address of an operand
1299 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1300 the maximum length of the block of memory that might be accessed or
1301 NULL if unknown. */
1303 static rtx
1304 get_memory_rtx (tree exp, tree len)
1306 tree orig_exp = exp;
1307 rtx addr, mem;
1309 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1310 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1311 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1312 exp = TREE_OPERAND (exp, 0);
1314 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1315 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1317 /* Get an expression we can use to find the attributes to assign to MEM.
1318 First remove any nops. */
1319 while (CONVERT_EXPR_P (exp)
1320 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1321 exp = TREE_OPERAND (exp, 0);
1323 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1324 (as builtin stringops may alias with anything). */
1325 exp = fold_build2 (MEM_REF,
1326 build_array_type (char_type_node,
1327 build_range_type (sizetype,
1328 size_one_node, len)),
1329 exp, build_int_cst (ptr_type_node, 0));
1331 /* If the MEM_REF has no acceptable address, try to get the base object
1332 from the original address we got, and build an all-aliasing
1333 unknown-sized access to that one. */
1334 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1335 set_mem_attributes (mem, exp, 0);
1336 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1337 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1338 0))))
1340 exp = build_fold_addr_expr (exp);
1341 exp = fold_build2 (MEM_REF,
1342 build_array_type (char_type_node,
1343 build_range_type (sizetype,
1344 size_zero_node,
1345 NULL)),
1346 exp, build_int_cst (ptr_type_node, 0));
1347 set_mem_attributes (mem, exp, 0);
1349 set_mem_alias_set (mem, 0);
1350 return mem;
1353 /* Built-in functions to perform an untyped call and return. */
1355 #define apply_args_mode \
1356 (this_target_builtins->x_apply_args_mode)
1357 #define apply_result_mode \
1358 (this_target_builtins->x_apply_result_mode)
1360 /* Return the size required for the block returned by __builtin_apply_args,
1361 and initialize apply_args_mode. */
1363 static int
1364 apply_args_size (void)
1366 static int size = -1;
1367 int align;
1368 unsigned int regno;
1370 /* The values computed by this function never change. */
1371 if (size < 0)
1373 /* The first value is the incoming arg-pointer. */
1374 size = GET_MODE_SIZE (Pmode);
1376 /* The second value is the structure value address unless this is
1377 passed as an "invisible" first argument. */
1378 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1379 size += GET_MODE_SIZE (Pmode);
1381 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1382 if (FUNCTION_ARG_REGNO_P (regno))
1384 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1386 gcc_assert (mode != VOIDmode);
1388 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1389 if (size % align != 0)
1390 size = CEIL (size, align) * align;
1391 size += GET_MODE_SIZE (mode);
1392 apply_args_mode[regno] = mode;
1394 else
1396 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1399 return size;
1402 /* Return the size required for the block returned by __builtin_apply,
1403 and initialize apply_result_mode. */
1405 static int
1406 apply_result_size (void)
1408 static int size = -1;
1409 int align, regno;
1411 /* The values computed by this function never change. */
1412 if (size < 0)
1414 size = 0;
1416 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1417 if (targetm.calls.function_value_regno_p (regno))
1419 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1421 gcc_assert (mode != VOIDmode);
1423 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1424 if (size % align != 0)
1425 size = CEIL (size, align) * align;
1426 size += GET_MODE_SIZE (mode);
1427 apply_result_mode[regno] = mode;
1429 else
1430 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1432 /* Allow targets that use untyped_call and untyped_return to override
1433 the size so that machine-specific information can be stored here. */
1434 #ifdef APPLY_RESULT_SIZE
1435 size = APPLY_RESULT_SIZE;
1436 #endif
1438 return size;
1441 /* Create a vector describing the result block RESULT. If SAVEP is true,
1442 the result block is used to save the values; otherwise it is used to
1443 restore the values. */
1445 static rtx
1446 result_vector (int savep, rtx result)
1448 int regno, size, align, nelts;
1449 fixed_size_mode mode;
1450 rtx reg, mem;
1451 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1453 size = nelts = 0;
1454 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1455 if ((mode = apply_result_mode[regno]) != VOIDmode)
1457 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1458 if (size % align != 0)
1459 size = CEIL (size, align) * align;
1460 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1461 mem = adjust_address (result, mode, size);
1462 savevec[nelts++] = (savep
1463 ? gen_rtx_SET (mem, reg)
1464 : gen_rtx_SET (reg, mem));
1465 size += GET_MODE_SIZE (mode);
1467 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1470 /* Save the state required to perform an untyped call with the same
1471 arguments as were passed to the current function. */
1473 static rtx
1474 expand_builtin_apply_args_1 (void)
1476 rtx registers, tem;
1477 int size, align, regno;
1478 fixed_size_mode mode;
1479 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1481 /* Create a block where the arg-pointer, structure value address,
1482 and argument registers can be saved. */
1483 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1485 /* Walk past the arg-pointer and structure value address. */
1486 size = GET_MODE_SIZE (Pmode);
1487 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1488 size += GET_MODE_SIZE (Pmode);
1490 /* Save each register used in calling a function to the block. */
1491 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1492 if ((mode = apply_args_mode[regno]) != VOIDmode)
1494 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1495 if (size % align != 0)
1496 size = CEIL (size, align) * align;
1498 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1500 emit_move_insn (adjust_address (registers, mode, size), tem);
1501 size += GET_MODE_SIZE (mode);
1504 /* Save the arg pointer to the block. */
1505 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1506 /* We need the pointer as the caller actually passed them to us, not
1507 as we might have pretended they were passed. Make sure it's a valid
1508 operand, as emit_move_insn isn't expected to handle a PLUS. */
1509 if (STACK_GROWS_DOWNWARD)
1511 = force_operand (plus_constant (Pmode, tem,
1512 crtl->args.pretend_args_size),
1513 NULL_RTX);
1514 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1516 size = GET_MODE_SIZE (Pmode);
1518 /* Save the structure value address unless this is passed as an
1519 "invisible" first argument. */
1520 if (struct_incoming_value)
1522 emit_move_insn (adjust_address (registers, Pmode, size),
1523 copy_to_reg (struct_incoming_value));
1524 size += GET_MODE_SIZE (Pmode);
1527 /* Return the address of the block. */
1528 return copy_addr_to_reg (XEXP (registers, 0));
1531 /* __builtin_apply_args returns block of memory allocated on
1532 the stack into which is stored the arg pointer, structure
1533 value address, static chain, and all the registers that might
1534 possibly be used in performing a function call. The code is
1535 moved to the start of the function so the incoming values are
1536 saved. */
1538 static rtx
1539 expand_builtin_apply_args (void)
1541 /* Don't do __builtin_apply_args more than once in a function.
1542 Save the result of the first call and reuse it. */
1543 if (apply_args_value != 0)
1544 return apply_args_value;
1546 /* When this function is called, it means that registers must be
1547 saved on entry to this function. So we migrate the
1548 call to the first insn of this function. */
1549 rtx temp;
1551 start_sequence ();
1552 temp = expand_builtin_apply_args_1 ();
1553 rtx_insn *seq = get_insns ();
1554 end_sequence ();
1556 apply_args_value = temp;
1558 /* Put the insns after the NOTE that starts the function.
1559 If this is inside a start_sequence, make the outer-level insn
1560 chain current, so the code is placed at the start of the
1561 function. If internal_arg_pointer is a non-virtual pseudo,
1562 it needs to be placed after the function that initializes
1563 that pseudo. */
1564 push_topmost_sequence ();
1565 if (REG_P (crtl->args.internal_arg_pointer)
1566 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1567 emit_insn_before (seq, parm_birth_insn);
1568 else
1569 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1570 pop_topmost_sequence ();
1571 return temp;
1575 /* Perform an untyped call and save the state required to perform an
1576 untyped return of whatever value was returned by the given function. */
1578 static rtx
1579 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1581 int size, align, regno;
1582 fixed_size_mode mode;
1583 rtx incoming_args, result, reg, dest, src;
1584 rtx_call_insn *call_insn;
1585 rtx old_stack_level = 0;
1586 rtx call_fusage = 0;
1587 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1589 arguments = convert_memory_address (Pmode, arguments);
1591 /* Create a block where the return registers can be saved. */
1592 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1594 /* Fetch the arg pointer from the ARGUMENTS block. */
1595 incoming_args = gen_reg_rtx (Pmode);
1596 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1597 if (!STACK_GROWS_DOWNWARD)
1598 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1599 incoming_args, 0, OPTAB_LIB_WIDEN);
1601 /* Push a new argument block and copy the arguments. Do not allow
1602 the (potential) memcpy call below to interfere with our stack
1603 manipulations. */
1604 do_pending_stack_adjust ();
1605 NO_DEFER_POP;
1607 /* Save the stack with nonlocal if available. */
1608 if (targetm.have_save_stack_nonlocal ())
1609 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1610 else
1611 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1613 /* Allocate a block of memory onto the stack and copy the memory
1614 arguments to the outgoing arguments address. We can pass TRUE
1615 as the 4th argument because we just saved the stack pointer
1616 and will restore it right after the call. */
1617 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1619 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1620 may have already set current_function_calls_alloca to true.
1621 current_function_calls_alloca won't be set if argsize is zero,
1622 so we have to guarantee need_drap is true here. */
1623 if (SUPPORTS_STACK_ALIGNMENT)
1624 crtl->need_drap = true;
1626 dest = virtual_outgoing_args_rtx;
1627 if (!STACK_GROWS_DOWNWARD)
1629 if (CONST_INT_P (argsize))
1630 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1631 else
1632 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1634 dest = gen_rtx_MEM (BLKmode, dest);
1635 set_mem_align (dest, PARM_BOUNDARY);
1636 src = gen_rtx_MEM (BLKmode, incoming_args);
1637 set_mem_align (src, PARM_BOUNDARY);
1638 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1640 /* Refer to the argument block. */
1641 apply_args_size ();
1642 arguments = gen_rtx_MEM (BLKmode, arguments);
1643 set_mem_align (arguments, PARM_BOUNDARY);
1645 /* Walk past the arg-pointer and structure value address. */
1646 size = GET_MODE_SIZE (Pmode);
1647 if (struct_value)
1648 size += GET_MODE_SIZE (Pmode);
1650 /* Restore each of the registers previously saved. Make USE insns
1651 for each of these registers for use in making the call. */
1652 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1653 if ((mode = apply_args_mode[regno]) != VOIDmode)
1655 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1656 if (size % align != 0)
1657 size = CEIL (size, align) * align;
1658 reg = gen_rtx_REG (mode, regno);
1659 emit_move_insn (reg, adjust_address (arguments, mode, size));
1660 use_reg (&call_fusage, reg);
1661 size += GET_MODE_SIZE (mode);
1664 /* Restore the structure value address unless this is passed as an
1665 "invisible" first argument. */
1666 size = GET_MODE_SIZE (Pmode);
1667 if (struct_value)
1669 rtx value = gen_reg_rtx (Pmode);
1670 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1671 emit_move_insn (struct_value, value);
1672 if (REG_P (struct_value))
1673 use_reg (&call_fusage, struct_value);
1674 size += GET_MODE_SIZE (Pmode);
1677 /* All arguments and registers used for the call are set up by now! */
1678 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1680 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1681 and we don't want to load it into a register as an optimization,
1682 because prepare_call_address already did it if it should be done. */
1683 if (GET_CODE (function) != SYMBOL_REF)
1684 function = memory_address (FUNCTION_MODE, function);
1686 /* Generate the actual call instruction and save the return value. */
1687 if (targetm.have_untyped_call ())
1689 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1690 emit_call_insn (targetm.gen_untyped_call (mem, result,
1691 result_vector (1, result)));
1693 else if (targetm.have_call_value ())
1695 rtx valreg = 0;
1697 /* Locate the unique return register. It is not possible to
1698 express a call that sets more than one return register using
1699 call_value; use untyped_call for that. In fact, untyped_call
1700 only needs to save the return registers in the given block. */
1701 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1702 if ((mode = apply_result_mode[regno]) != VOIDmode)
1704 gcc_assert (!valreg); /* have_untyped_call required. */
1706 valreg = gen_rtx_REG (mode, regno);
1709 emit_insn (targetm.gen_call_value (valreg,
1710 gen_rtx_MEM (FUNCTION_MODE, function),
1711 const0_rtx, NULL_RTX, const0_rtx));
1713 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1715 else
1716 gcc_unreachable ();
1718 /* Find the CALL insn we just emitted, and attach the register usage
1719 information. */
1720 call_insn = last_call_insn ();
1721 add_function_usage_to (call_insn, call_fusage);
1723 /* Restore the stack. */
1724 if (targetm.have_save_stack_nonlocal ())
1725 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1726 else
1727 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1728 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1730 OK_DEFER_POP;
1732 /* Return the address of the result block. */
1733 result = copy_addr_to_reg (XEXP (result, 0));
1734 return convert_memory_address (ptr_mode, result);
1737 /* Perform an untyped return. */
1739 static void
1740 expand_builtin_return (rtx result)
1742 int size, align, regno;
1743 fixed_size_mode mode;
1744 rtx reg;
1745 rtx_insn *call_fusage = 0;
1747 result = convert_memory_address (Pmode, result);
1749 apply_result_size ();
1750 result = gen_rtx_MEM (BLKmode, result);
1752 if (targetm.have_untyped_return ())
1754 rtx vector = result_vector (0, result);
1755 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1756 emit_barrier ();
1757 return;
1760 /* Restore the return value and note that each value is used. */
1761 size = 0;
1762 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1763 if ((mode = apply_result_mode[regno]) != VOIDmode)
1765 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1766 if (size % align != 0)
1767 size = CEIL (size, align) * align;
1768 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1769 emit_move_insn (reg, adjust_address (result, mode, size));
1771 push_to_sequence (call_fusage);
1772 emit_use (reg);
1773 call_fusage = get_insns ();
1774 end_sequence ();
1775 size += GET_MODE_SIZE (mode);
1778 /* Put the USE insns before the return. */
1779 emit_insn (call_fusage);
1781 /* Return whatever values was restored by jumping directly to the end
1782 of the function. */
1783 expand_naked_return ();
1786 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1788 static enum type_class
1789 type_to_class (tree type)
1791 switch (TREE_CODE (type))
1793 case VOID_TYPE: return void_type_class;
1794 case INTEGER_TYPE: return integer_type_class;
1795 case ENUMERAL_TYPE: return enumeral_type_class;
1796 case BOOLEAN_TYPE: return boolean_type_class;
1797 case POINTER_TYPE: return pointer_type_class;
1798 case REFERENCE_TYPE: return reference_type_class;
1799 case OFFSET_TYPE: return offset_type_class;
1800 case REAL_TYPE: return real_type_class;
1801 case COMPLEX_TYPE: return complex_type_class;
1802 case FUNCTION_TYPE: return function_type_class;
1803 case METHOD_TYPE: return method_type_class;
1804 case RECORD_TYPE: return record_type_class;
1805 case UNION_TYPE:
1806 case QUAL_UNION_TYPE: return union_type_class;
1807 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1808 ? string_type_class : array_type_class);
1809 case LANG_TYPE: return lang_type_class;
1810 default: return no_type_class;
1814 /* Expand a call EXP to __builtin_classify_type. */
1816 static rtx
1817 expand_builtin_classify_type (tree exp)
1819 if (call_expr_nargs (exp))
1820 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1821 return GEN_INT (no_type_class);
1824 /* This helper macro, meant to be used in mathfn_built_in below, determines
1825 which among a set of builtin math functions is appropriate for a given type
1826 mode. The `F' (float) and `L' (long double) are automatically generated
1827 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1828 types, there are additional types that are considered with 'F32', 'F64',
1829 'F128', etc. suffixes. */
1830 #define CASE_MATHFN(MATHFN) \
1831 CASE_CFN_##MATHFN: \
1832 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1833 fcodel = BUILT_IN_##MATHFN##L ; break;
1834 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1835 types. */
1836 #define CASE_MATHFN_FLOATN(MATHFN) \
1837 CASE_CFN_##MATHFN: \
1838 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1839 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1840 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1841 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1842 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1843 break;
1844 /* Similar to above, but appends _R after any F/L suffix. */
1845 #define CASE_MATHFN_REENT(MATHFN) \
1846 case CFN_BUILT_IN_##MATHFN##_R: \
1847 case CFN_BUILT_IN_##MATHFN##F_R: \
1848 case CFN_BUILT_IN_##MATHFN##L_R: \
1849 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1850 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1852 /* Return a function equivalent to FN but operating on floating-point
1853 values of type TYPE, or END_BUILTINS if no such function exists.
1854 This is purely an operation on function codes; it does not guarantee
1855 that the target actually has an implementation of the function. */
1857 static built_in_function
1858 mathfn_built_in_2 (tree type, combined_fn fn)
1860 tree mtype;
1861 built_in_function fcode, fcodef, fcodel;
1862 built_in_function fcodef16 = END_BUILTINS;
1863 built_in_function fcodef32 = END_BUILTINS;
1864 built_in_function fcodef64 = END_BUILTINS;
1865 built_in_function fcodef128 = END_BUILTINS;
1866 built_in_function fcodef32x = END_BUILTINS;
1867 built_in_function fcodef64x = END_BUILTINS;
1868 built_in_function fcodef128x = END_BUILTINS;
1870 switch (fn)
1872 CASE_MATHFN (ACOS)
1873 CASE_MATHFN (ACOSH)
1874 CASE_MATHFN (ASIN)
1875 CASE_MATHFN (ASINH)
1876 CASE_MATHFN (ATAN)
1877 CASE_MATHFN (ATAN2)
1878 CASE_MATHFN (ATANH)
1879 CASE_MATHFN (CBRT)
1880 CASE_MATHFN_FLOATN (CEIL)
1881 CASE_MATHFN (CEXPI)
1882 CASE_MATHFN_FLOATN (COPYSIGN)
1883 CASE_MATHFN (COS)
1884 CASE_MATHFN (COSH)
1885 CASE_MATHFN (DREM)
1886 CASE_MATHFN (ERF)
1887 CASE_MATHFN (ERFC)
1888 CASE_MATHFN (EXP)
1889 CASE_MATHFN (EXP10)
1890 CASE_MATHFN (EXP2)
1891 CASE_MATHFN (EXPM1)
1892 CASE_MATHFN (FABS)
1893 CASE_MATHFN (FDIM)
1894 CASE_MATHFN_FLOATN (FLOOR)
1895 CASE_MATHFN_FLOATN (FMA)
1896 CASE_MATHFN_FLOATN (FMAX)
1897 CASE_MATHFN_FLOATN (FMIN)
1898 CASE_MATHFN (FMOD)
1899 CASE_MATHFN (FREXP)
1900 CASE_MATHFN (GAMMA)
1901 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1902 CASE_MATHFN (HUGE_VAL)
1903 CASE_MATHFN (HYPOT)
1904 CASE_MATHFN (ILOGB)
1905 CASE_MATHFN (ICEIL)
1906 CASE_MATHFN (IFLOOR)
1907 CASE_MATHFN (INF)
1908 CASE_MATHFN (IRINT)
1909 CASE_MATHFN (IROUND)
1910 CASE_MATHFN (ISINF)
1911 CASE_MATHFN (J0)
1912 CASE_MATHFN (J1)
1913 CASE_MATHFN (JN)
1914 CASE_MATHFN (LCEIL)
1915 CASE_MATHFN (LDEXP)
1916 CASE_MATHFN (LFLOOR)
1917 CASE_MATHFN (LGAMMA)
1918 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1919 CASE_MATHFN (LLCEIL)
1920 CASE_MATHFN (LLFLOOR)
1921 CASE_MATHFN (LLRINT)
1922 CASE_MATHFN (LLROUND)
1923 CASE_MATHFN (LOG)
1924 CASE_MATHFN (LOG10)
1925 CASE_MATHFN (LOG1P)
1926 CASE_MATHFN (LOG2)
1927 CASE_MATHFN (LOGB)
1928 CASE_MATHFN (LRINT)
1929 CASE_MATHFN (LROUND)
1930 CASE_MATHFN (MODF)
1931 CASE_MATHFN (NAN)
1932 CASE_MATHFN (NANS)
1933 CASE_MATHFN_FLOATN (NEARBYINT)
1934 CASE_MATHFN (NEXTAFTER)
1935 CASE_MATHFN (NEXTTOWARD)
1936 CASE_MATHFN (POW)
1937 CASE_MATHFN (POWI)
1938 CASE_MATHFN (POW10)
1939 CASE_MATHFN (REMAINDER)
1940 CASE_MATHFN (REMQUO)
1941 CASE_MATHFN_FLOATN (RINT)
1942 CASE_MATHFN_FLOATN (ROUND)
1943 CASE_MATHFN (SCALB)
1944 CASE_MATHFN (SCALBLN)
1945 CASE_MATHFN (SCALBN)
1946 CASE_MATHFN (SIGNBIT)
1947 CASE_MATHFN (SIGNIFICAND)
1948 CASE_MATHFN (SIN)
1949 CASE_MATHFN (SINCOS)
1950 CASE_MATHFN (SINH)
1951 CASE_MATHFN_FLOATN (SQRT)
1952 CASE_MATHFN (TAN)
1953 CASE_MATHFN (TANH)
1954 CASE_MATHFN (TGAMMA)
1955 CASE_MATHFN_FLOATN (TRUNC)
1956 CASE_MATHFN (Y0)
1957 CASE_MATHFN (Y1)
1958 CASE_MATHFN (YN)
1960 default:
1961 return END_BUILTINS;
1964 mtype = TYPE_MAIN_VARIANT (type);
1965 if (mtype == double_type_node)
1966 return fcode;
1967 else if (mtype == float_type_node)
1968 return fcodef;
1969 else if (mtype == long_double_type_node)
1970 return fcodel;
1971 else if (mtype == float16_type_node)
1972 return fcodef16;
1973 else if (mtype == float32_type_node)
1974 return fcodef32;
1975 else if (mtype == float64_type_node)
1976 return fcodef64;
1977 else if (mtype == float128_type_node)
1978 return fcodef128;
1979 else if (mtype == float32x_type_node)
1980 return fcodef32x;
1981 else if (mtype == float64x_type_node)
1982 return fcodef64x;
1983 else if (mtype == float128x_type_node)
1984 return fcodef128x;
1985 else
1986 return END_BUILTINS;
1989 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1990 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1991 otherwise use the explicit declaration. If we can't do the conversion,
1992 return null. */
1994 static tree
1995 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
1997 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1998 if (fcode2 == END_BUILTINS)
1999 return NULL_TREE;
2001 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2002 return NULL_TREE;
2004 return builtin_decl_explicit (fcode2);
2007 /* Like mathfn_built_in_1, but always use the implicit array. */
2009 tree
2010 mathfn_built_in (tree type, combined_fn fn)
2012 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2015 /* Like mathfn_built_in_1, but take a built_in_function and
2016 always use the implicit array. */
2018 tree
2019 mathfn_built_in (tree type, enum built_in_function fn)
2021 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2024 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2025 return its code, otherwise return IFN_LAST. Note that this function
2026 only tests whether the function is defined in internals.def, not whether
2027 it is actually available on the target. */
2029 internal_fn
2030 associated_internal_fn (tree fndecl)
2032 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2033 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2034 switch (DECL_FUNCTION_CODE (fndecl))
2036 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2037 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2038 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2039 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2040 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2041 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2042 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2043 #include "internal-fn.def"
2045 CASE_FLT_FN (BUILT_IN_POW10):
2046 return IFN_EXP10;
2048 CASE_FLT_FN (BUILT_IN_DREM):
2049 return IFN_REMAINDER;
2051 CASE_FLT_FN (BUILT_IN_SCALBN):
2052 CASE_FLT_FN (BUILT_IN_SCALBLN):
2053 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2054 return IFN_LDEXP;
2055 return IFN_LAST;
2057 default:
2058 return IFN_LAST;
2062 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2063 on the current target by a call to an internal function, return the
2064 code of that internal function, otherwise return IFN_LAST. The caller
2065 is responsible for ensuring that any side-effects of the built-in
2066 call are dealt with correctly. E.g. if CALL sets errno, the caller
2067 must decide that the errno result isn't needed or make it available
2068 in some other way. */
2070 internal_fn
2071 replacement_internal_fn (gcall *call)
2073 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2075 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2076 if (ifn != IFN_LAST)
2078 tree_pair types = direct_internal_fn_types (ifn, call);
2079 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2080 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2081 return ifn;
2084 return IFN_LAST;
2087 /* Expand a call to the builtin trinary math functions (fma).
2088 Return NULL_RTX if a normal call should be emitted rather than expanding the
2089 function in-line. EXP is the expression that is a call to the builtin
2090 function; if convenient, the result should be placed in TARGET.
2091 SUBTARGET may be used as the target for computing one of EXP's
2092 operands. */
2094 static rtx
2095 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2097 optab builtin_optab;
2098 rtx op0, op1, op2, result;
2099 rtx_insn *insns;
2100 tree fndecl = get_callee_fndecl (exp);
2101 tree arg0, arg1, arg2;
2102 machine_mode mode;
2104 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2105 return NULL_RTX;
2107 arg0 = CALL_EXPR_ARG (exp, 0);
2108 arg1 = CALL_EXPR_ARG (exp, 1);
2109 arg2 = CALL_EXPR_ARG (exp, 2);
2111 switch (DECL_FUNCTION_CODE (fndecl))
2113 CASE_FLT_FN (BUILT_IN_FMA):
2114 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2115 builtin_optab = fma_optab; break;
2116 default:
2117 gcc_unreachable ();
2120 /* Make a suitable register to place result in. */
2121 mode = TYPE_MODE (TREE_TYPE (exp));
2123 /* Before working hard, check whether the instruction is available. */
2124 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2125 return NULL_RTX;
2127 result = gen_reg_rtx (mode);
2129 /* Always stabilize the argument list. */
2130 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2131 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2132 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2134 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2135 op1 = expand_normal (arg1);
2136 op2 = expand_normal (arg2);
2138 start_sequence ();
2140 /* Compute into RESULT.
2141 Set RESULT to wherever the result comes back. */
2142 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2143 result, 0);
2145 /* If we were unable to expand via the builtin, stop the sequence
2146 (without outputting the insns) and call to the library function
2147 with the stabilized argument list. */
2148 if (result == 0)
2150 end_sequence ();
2151 return expand_call (exp, target, target == const0_rtx);
2154 /* Output the entire sequence. */
2155 insns = get_insns ();
2156 end_sequence ();
2157 emit_insn (insns);
2159 return result;
2162 /* Expand a call to the builtin sin and cos math functions.
2163 Return NULL_RTX if a normal call should be emitted rather than expanding the
2164 function in-line. EXP is the expression that is a call to the builtin
2165 function; if convenient, the result should be placed in TARGET.
2166 SUBTARGET may be used as the target for computing one of EXP's
2167 operands. */
2169 static rtx
2170 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2172 optab builtin_optab;
2173 rtx op0;
2174 rtx_insn *insns;
2175 tree fndecl = get_callee_fndecl (exp);
2176 machine_mode mode;
2177 tree arg;
2179 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2180 return NULL_RTX;
2182 arg = CALL_EXPR_ARG (exp, 0);
2184 switch (DECL_FUNCTION_CODE (fndecl))
2186 CASE_FLT_FN (BUILT_IN_SIN):
2187 CASE_FLT_FN (BUILT_IN_COS):
2188 builtin_optab = sincos_optab; break;
2189 default:
2190 gcc_unreachable ();
2193 /* Make a suitable register to place result in. */
2194 mode = TYPE_MODE (TREE_TYPE (exp));
2196 /* Check if sincos insn is available, otherwise fallback
2197 to sin or cos insn. */
2198 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2199 switch (DECL_FUNCTION_CODE (fndecl))
2201 CASE_FLT_FN (BUILT_IN_SIN):
2202 builtin_optab = sin_optab; break;
2203 CASE_FLT_FN (BUILT_IN_COS):
2204 builtin_optab = cos_optab; break;
2205 default:
2206 gcc_unreachable ();
2209 /* Before working hard, check whether the instruction is available. */
2210 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2212 rtx result = gen_reg_rtx (mode);
2214 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2215 need to expand the argument again. This way, we will not perform
2216 side-effects more the once. */
2217 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2219 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2221 start_sequence ();
2223 /* Compute into RESULT.
2224 Set RESULT to wherever the result comes back. */
2225 if (builtin_optab == sincos_optab)
2227 int ok;
2229 switch (DECL_FUNCTION_CODE (fndecl))
2231 CASE_FLT_FN (BUILT_IN_SIN):
2232 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2233 break;
2234 CASE_FLT_FN (BUILT_IN_COS):
2235 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2236 break;
2237 default:
2238 gcc_unreachable ();
2240 gcc_assert (ok);
2242 else
2243 result = expand_unop (mode, builtin_optab, op0, result, 0);
2245 if (result != 0)
2247 /* Output the entire sequence. */
2248 insns = get_insns ();
2249 end_sequence ();
2250 emit_insn (insns);
2251 return result;
2254 /* If we were unable to expand via the builtin, stop the sequence
2255 (without outputting the insns) and call to the library function
2256 with the stabilized argument list. */
2257 end_sequence ();
2260 return expand_call (exp, target, target == const0_rtx);
2263 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2264 return an RTL instruction code that implements the functionality.
2265 If that isn't possible or available return CODE_FOR_nothing. */
2267 static enum insn_code
2268 interclass_mathfn_icode (tree arg, tree fndecl)
2270 bool errno_set = false;
2271 optab builtin_optab = unknown_optab;
2272 machine_mode mode;
2274 switch (DECL_FUNCTION_CODE (fndecl))
2276 CASE_FLT_FN (BUILT_IN_ILOGB):
2277 errno_set = true; builtin_optab = ilogb_optab; break;
2278 CASE_FLT_FN (BUILT_IN_ISINF):
2279 builtin_optab = isinf_optab; break;
2280 case BUILT_IN_ISNORMAL:
2281 case BUILT_IN_ISFINITE:
2282 CASE_FLT_FN (BUILT_IN_FINITE):
2283 case BUILT_IN_FINITED32:
2284 case BUILT_IN_FINITED64:
2285 case BUILT_IN_FINITED128:
2286 case BUILT_IN_ISINFD32:
2287 case BUILT_IN_ISINFD64:
2288 case BUILT_IN_ISINFD128:
2289 /* These builtins have no optabs (yet). */
2290 break;
2291 default:
2292 gcc_unreachable ();
2295 /* There's no easy way to detect the case we need to set EDOM. */
2296 if (flag_errno_math && errno_set)
2297 return CODE_FOR_nothing;
2299 /* Optab mode depends on the mode of the input argument. */
2300 mode = TYPE_MODE (TREE_TYPE (arg));
2302 if (builtin_optab)
2303 return optab_handler (builtin_optab, mode);
2304 return CODE_FOR_nothing;
2307 /* Expand a call to one of the builtin math functions that operate on
2308 floating point argument and output an integer result (ilogb, isinf,
2309 isnan, etc).
2310 Return 0 if a normal call should be emitted rather than expanding the
2311 function in-line. EXP is the expression that is a call to the builtin
2312 function; if convenient, the result should be placed in TARGET. */
2314 static rtx
2315 expand_builtin_interclass_mathfn (tree exp, rtx target)
2317 enum insn_code icode = CODE_FOR_nothing;
2318 rtx op0;
2319 tree fndecl = get_callee_fndecl (exp);
2320 machine_mode mode;
2321 tree arg;
2323 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2324 return NULL_RTX;
2326 arg = CALL_EXPR_ARG (exp, 0);
2327 icode = interclass_mathfn_icode (arg, fndecl);
2328 mode = TYPE_MODE (TREE_TYPE (arg));
2330 if (icode != CODE_FOR_nothing)
2332 struct expand_operand ops[1];
2333 rtx_insn *last = get_last_insn ();
2334 tree orig_arg = arg;
2336 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2337 need to expand the argument again. This way, we will not perform
2338 side-effects more the once. */
2339 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2341 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2343 if (mode != GET_MODE (op0))
2344 op0 = convert_to_mode (mode, op0, 0);
2346 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2347 if (maybe_legitimize_operands (icode, 0, 1, ops)
2348 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2349 return ops[0].value;
2351 delete_insns_since (last);
2352 CALL_EXPR_ARG (exp, 0) = orig_arg;
2355 return NULL_RTX;
2358 /* Expand a call to the builtin sincos math function.
2359 Return NULL_RTX if a normal call should be emitted rather than expanding the
2360 function in-line. EXP is the expression that is a call to the builtin
2361 function. */
2363 static rtx
2364 expand_builtin_sincos (tree exp)
2366 rtx op0, op1, op2, target1, target2;
2367 machine_mode mode;
2368 tree arg, sinp, cosp;
2369 int result;
2370 location_t loc = EXPR_LOCATION (exp);
2371 tree alias_type, alias_off;
2373 if (!validate_arglist (exp, REAL_TYPE,
2374 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2375 return NULL_RTX;
2377 arg = CALL_EXPR_ARG (exp, 0);
2378 sinp = CALL_EXPR_ARG (exp, 1);
2379 cosp = CALL_EXPR_ARG (exp, 2);
2381 /* Make a suitable register to place result in. */
2382 mode = TYPE_MODE (TREE_TYPE (arg));
2384 /* Check if sincos insn is available, otherwise emit the call. */
2385 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2386 return NULL_RTX;
2388 target1 = gen_reg_rtx (mode);
2389 target2 = gen_reg_rtx (mode);
2391 op0 = expand_normal (arg);
2392 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2393 alias_off = build_int_cst (alias_type, 0);
2394 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2395 sinp, alias_off));
2396 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2397 cosp, alias_off));
2399 /* Compute into target1 and target2.
2400 Set TARGET to wherever the result comes back. */
2401 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2402 gcc_assert (result);
2404 /* Move target1 and target2 to the memory locations indicated
2405 by op1 and op2. */
2406 emit_move_insn (op1, target1);
2407 emit_move_insn (op2, target2);
2409 return const0_rtx;
2412 /* Expand a call to the internal cexpi builtin to the sincos math function.
2413 EXP is the expression that is a call to the builtin function; if convenient,
2414 the result should be placed in TARGET. */
2416 static rtx
2417 expand_builtin_cexpi (tree exp, rtx target)
2419 tree fndecl = get_callee_fndecl (exp);
2420 tree arg, type;
2421 machine_mode mode;
2422 rtx op0, op1, op2;
2423 location_t loc = EXPR_LOCATION (exp);
2425 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2426 return NULL_RTX;
2428 arg = CALL_EXPR_ARG (exp, 0);
2429 type = TREE_TYPE (arg);
2430 mode = TYPE_MODE (TREE_TYPE (arg));
2432 /* Try expanding via a sincos optab, fall back to emitting a libcall
2433 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2434 is only generated from sincos, cexp or if we have either of them. */
2435 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2437 op1 = gen_reg_rtx (mode);
2438 op2 = gen_reg_rtx (mode);
2440 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2442 /* Compute into op1 and op2. */
2443 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2445 else if (targetm.libc_has_function (function_sincos))
2447 tree call, fn = NULL_TREE;
2448 tree top1, top2;
2449 rtx op1a, op2a;
2451 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2452 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2453 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2454 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2455 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2456 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2457 else
2458 gcc_unreachable ();
2460 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2461 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2462 op1a = copy_addr_to_reg (XEXP (op1, 0));
2463 op2a = copy_addr_to_reg (XEXP (op2, 0));
2464 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2465 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2467 /* Make sure not to fold the sincos call again. */
2468 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2469 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2470 call, 3, arg, top1, top2));
2472 else
2474 tree call, fn = NULL_TREE, narg;
2475 tree ctype = build_complex_type (type);
2477 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2478 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2479 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2480 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2481 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2482 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2483 else
2484 gcc_unreachable ();
2486 /* If we don't have a decl for cexp create one. This is the
2487 friendliest fallback if the user calls __builtin_cexpi
2488 without full target C99 function support. */
2489 if (fn == NULL_TREE)
2491 tree fntype;
2492 const char *name = NULL;
2494 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2495 name = "cexpf";
2496 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2497 name = "cexp";
2498 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2499 name = "cexpl";
2501 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2502 fn = build_fn_decl (name, fntype);
2505 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2506 build_real (type, dconst0), arg);
2508 /* Make sure not to fold the cexp call again. */
2509 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2510 return expand_expr (build_call_nary (ctype, call, 1, narg),
2511 target, VOIDmode, EXPAND_NORMAL);
2514 /* Now build the proper return type. */
2515 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2516 make_tree (TREE_TYPE (arg), op2),
2517 make_tree (TREE_TYPE (arg), op1)),
2518 target, VOIDmode, EXPAND_NORMAL);
2521 /* Conveniently construct a function call expression. FNDECL names the
2522 function to be called, N is the number of arguments, and the "..."
2523 parameters are the argument expressions. Unlike build_call_exr
2524 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2526 static tree
2527 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2529 va_list ap;
2530 tree fntype = TREE_TYPE (fndecl);
2531 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2533 va_start (ap, n);
2534 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2535 va_end (ap);
2536 SET_EXPR_LOCATION (fn, loc);
2537 return fn;
2540 /* Expand a call to one of the builtin rounding functions gcc defines
2541 as an extension (lfloor and lceil). As these are gcc extensions we
2542 do not need to worry about setting errno to EDOM.
2543 If expanding via optab fails, lower expression to (int)(floor(x)).
2544 EXP is the expression that is a call to the builtin function;
2545 if convenient, the result should be placed in TARGET. */
2547 static rtx
2548 expand_builtin_int_roundingfn (tree exp, rtx target)
2550 convert_optab builtin_optab;
2551 rtx op0, tmp;
2552 rtx_insn *insns;
2553 tree fndecl = get_callee_fndecl (exp);
2554 enum built_in_function fallback_fn;
2555 tree fallback_fndecl;
2556 machine_mode mode;
2557 tree arg;
2559 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2560 gcc_unreachable ();
2562 arg = CALL_EXPR_ARG (exp, 0);
2564 switch (DECL_FUNCTION_CODE (fndecl))
2566 CASE_FLT_FN (BUILT_IN_ICEIL):
2567 CASE_FLT_FN (BUILT_IN_LCEIL):
2568 CASE_FLT_FN (BUILT_IN_LLCEIL):
2569 builtin_optab = lceil_optab;
2570 fallback_fn = BUILT_IN_CEIL;
2571 break;
2573 CASE_FLT_FN (BUILT_IN_IFLOOR):
2574 CASE_FLT_FN (BUILT_IN_LFLOOR):
2575 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2576 builtin_optab = lfloor_optab;
2577 fallback_fn = BUILT_IN_FLOOR;
2578 break;
2580 default:
2581 gcc_unreachable ();
2584 /* Make a suitable register to place result in. */
2585 mode = TYPE_MODE (TREE_TYPE (exp));
2587 target = gen_reg_rtx (mode);
2589 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2590 need to expand the argument again. This way, we will not perform
2591 side-effects more the once. */
2592 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2594 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2596 start_sequence ();
2598 /* Compute into TARGET. */
2599 if (expand_sfix_optab (target, op0, builtin_optab))
2601 /* Output the entire sequence. */
2602 insns = get_insns ();
2603 end_sequence ();
2604 emit_insn (insns);
2605 return target;
2608 /* If we were unable to expand via the builtin, stop the sequence
2609 (without outputting the insns). */
2610 end_sequence ();
2612 /* Fall back to floating point rounding optab. */
2613 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2615 /* For non-C99 targets we may end up without a fallback fndecl here
2616 if the user called __builtin_lfloor directly. In this case emit
2617 a call to the floor/ceil variants nevertheless. This should result
2618 in the best user experience for not full C99 targets. */
2619 if (fallback_fndecl == NULL_TREE)
2621 tree fntype;
2622 const char *name = NULL;
2624 switch (DECL_FUNCTION_CODE (fndecl))
2626 case BUILT_IN_ICEIL:
2627 case BUILT_IN_LCEIL:
2628 case BUILT_IN_LLCEIL:
2629 name = "ceil";
2630 break;
2631 case BUILT_IN_ICEILF:
2632 case BUILT_IN_LCEILF:
2633 case BUILT_IN_LLCEILF:
2634 name = "ceilf";
2635 break;
2636 case BUILT_IN_ICEILL:
2637 case BUILT_IN_LCEILL:
2638 case BUILT_IN_LLCEILL:
2639 name = "ceill";
2640 break;
2641 case BUILT_IN_IFLOOR:
2642 case BUILT_IN_LFLOOR:
2643 case BUILT_IN_LLFLOOR:
2644 name = "floor";
2645 break;
2646 case BUILT_IN_IFLOORF:
2647 case BUILT_IN_LFLOORF:
2648 case BUILT_IN_LLFLOORF:
2649 name = "floorf";
2650 break;
2651 case BUILT_IN_IFLOORL:
2652 case BUILT_IN_LFLOORL:
2653 case BUILT_IN_LLFLOORL:
2654 name = "floorl";
2655 break;
2656 default:
2657 gcc_unreachable ();
2660 fntype = build_function_type_list (TREE_TYPE (arg),
2661 TREE_TYPE (arg), NULL_TREE);
2662 fallback_fndecl = build_fn_decl (name, fntype);
2665 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2667 tmp = expand_normal (exp);
2668 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2670 /* Truncate the result of floating point optab to integer
2671 via expand_fix (). */
2672 target = gen_reg_rtx (mode);
2673 expand_fix (target, tmp, 0);
2675 return target;
2678 /* Expand a call to one of the builtin math functions doing integer
2679 conversion (lrint).
2680 Return 0 if a normal call should be emitted rather than expanding the
2681 function in-line. EXP is the expression that is a call to the builtin
2682 function; if convenient, the result should be placed in TARGET. */
2684 static rtx
2685 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2687 convert_optab builtin_optab;
2688 rtx op0;
2689 rtx_insn *insns;
2690 tree fndecl = get_callee_fndecl (exp);
2691 tree arg;
2692 machine_mode mode;
2693 enum built_in_function fallback_fn = BUILT_IN_NONE;
2695 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2696 gcc_unreachable ();
2698 arg = CALL_EXPR_ARG (exp, 0);
2700 switch (DECL_FUNCTION_CODE (fndecl))
2702 CASE_FLT_FN (BUILT_IN_IRINT):
2703 fallback_fn = BUILT_IN_LRINT;
2704 gcc_fallthrough ();
2705 CASE_FLT_FN (BUILT_IN_LRINT):
2706 CASE_FLT_FN (BUILT_IN_LLRINT):
2707 builtin_optab = lrint_optab;
2708 break;
2710 CASE_FLT_FN (BUILT_IN_IROUND):
2711 fallback_fn = BUILT_IN_LROUND;
2712 gcc_fallthrough ();
2713 CASE_FLT_FN (BUILT_IN_LROUND):
2714 CASE_FLT_FN (BUILT_IN_LLROUND):
2715 builtin_optab = lround_optab;
2716 break;
2718 default:
2719 gcc_unreachable ();
2722 /* There's no easy way to detect the case we need to set EDOM. */
2723 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2724 return NULL_RTX;
2726 /* Make a suitable register to place result in. */
2727 mode = TYPE_MODE (TREE_TYPE (exp));
2729 /* There's no easy way to detect the case we need to set EDOM. */
2730 if (!flag_errno_math)
2732 rtx result = gen_reg_rtx (mode);
2734 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2735 need to expand the argument again. This way, we will not perform
2736 side-effects more the once. */
2737 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2739 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2741 start_sequence ();
2743 if (expand_sfix_optab (result, op0, builtin_optab))
2745 /* Output the entire sequence. */
2746 insns = get_insns ();
2747 end_sequence ();
2748 emit_insn (insns);
2749 return result;
2752 /* If we were unable to expand via the builtin, stop the sequence
2753 (without outputting the insns) and call to the library function
2754 with the stabilized argument list. */
2755 end_sequence ();
2758 if (fallback_fn != BUILT_IN_NONE)
2760 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2761 targets, (int) round (x) should never be transformed into
2762 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2763 a call to lround in the hope that the target provides at least some
2764 C99 functions. This should result in the best user experience for
2765 not full C99 targets. */
2766 tree fallback_fndecl = mathfn_built_in_1
2767 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2769 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2770 fallback_fndecl, 1, arg);
2772 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2773 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2774 return convert_to_mode (mode, target, 0);
2777 return expand_call (exp, target, target == const0_rtx);
2780 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2781 a normal call should be emitted rather than expanding the function
2782 in-line. EXP is the expression that is a call to the builtin
2783 function; if convenient, the result should be placed in TARGET. */
2785 static rtx
2786 expand_builtin_powi (tree exp, rtx target)
2788 tree arg0, arg1;
2789 rtx op0, op1;
2790 machine_mode mode;
2791 machine_mode mode2;
2793 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2794 return NULL_RTX;
2796 arg0 = CALL_EXPR_ARG (exp, 0);
2797 arg1 = CALL_EXPR_ARG (exp, 1);
2798 mode = TYPE_MODE (TREE_TYPE (exp));
2800 /* Emit a libcall to libgcc. */
2802 /* Mode of the 2nd argument must match that of an int. */
2803 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2805 if (target == NULL_RTX)
2806 target = gen_reg_rtx (mode);
2808 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2809 if (GET_MODE (op0) != mode)
2810 op0 = convert_to_mode (mode, op0, 0);
2811 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2812 if (GET_MODE (op1) != mode2)
2813 op1 = convert_to_mode (mode2, op1, 0);
2815 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2816 target, LCT_CONST, mode,
2817 op0, mode, op1, mode2);
2819 return target;
2822 /* Expand expression EXP which is a call to the strlen builtin. Return
2823 NULL_RTX if we failed the caller should emit a normal call, otherwise
2824 try to get the result in TARGET, if convenient. */
2826 static rtx
2827 expand_builtin_strlen (tree exp, rtx target,
2828 machine_mode target_mode)
2830 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2831 return NULL_RTX;
2833 struct expand_operand ops[4];
2834 rtx pat;
2835 tree len;
2836 tree src = CALL_EXPR_ARG (exp, 0);
2837 rtx src_reg;
2838 rtx_insn *before_strlen;
2839 machine_mode insn_mode;
2840 enum insn_code icode = CODE_FOR_nothing;
2841 unsigned int align;
2843 /* If the length can be computed at compile-time, return it. */
2844 len = c_strlen (src, 0);
2845 if (len)
2846 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2848 /* If the length can be computed at compile-time and is constant
2849 integer, but there are side-effects in src, evaluate
2850 src for side-effects, then return len.
2851 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2852 can be optimized into: i++; x = 3; */
2853 len = c_strlen (src, 1);
2854 if (len && TREE_CODE (len) == INTEGER_CST)
2856 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2857 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2860 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2862 /* If SRC is not a pointer type, don't do this operation inline. */
2863 if (align == 0)
2864 return NULL_RTX;
2866 /* Bail out if we can't compute strlen in the right mode. */
2867 FOR_EACH_MODE_FROM (insn_mode, target_mode)
2869 icode = optab_handler (strlen_optab, insn_mode);
2870 if (icode != CODE_FOR_nothing)
2871 break;
2873 if (insn_mode == VOIDmode)
2874 return NULL_RTX;
2876 /* Make a place to hold the source address. We will not expand
2877 the actual source until we are sure that the expansion will
2878 not fail -- there are trees that cannot be expanded twice. */
2879 src_reg = gen_reg_rtx (Pmode);
2881 /* Mark the beginning of the strlen sequence so we can emit the
2882 source operand later. */
2883 before_strlen = get_last_insn ();
2885 create_output_operand (&ops[0], target, insn_mode);
2886 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2887 create_integer_operand (&ops[2], 0);
2888 create_integer_operand (&ops[3], align);
2889 if (!maybe_expand_insn (icode, 4, ops))
2890 return NULL_RTX;
2892 /* Check to see if the argument was declared attribute nonstring
2893 and if so, issue a warning since at this point it's not known
2894 to be nul-terminated. */
2895 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
2897 /* Now that we are assured of success, expand the source. */
2898 start_sequence ();
2899 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2900 if (pat != src_reg)
2902 #ifdef POINTERS_EXTEND_UNSIGNED
2903 if (GET_MODE (pat) != Pmode)
2904 pat = convert_to_mode (Pmode, pat,
2905 POINTERS_EXTEND_UNSIGNED);
2906 #endif
2907 emit_move_insn (src_reg, pat);
2909 pat = get_insns ();
2910 end_sequence ();
2912 if (before_strlen)
2913 emit_insn_after (pat, before_strlen);
2914 else
2915 emit_insn_before (pat, get_insns ());
2917 /* Return the value in the proper mode for this function. */
2918 if (GET_MODE (ops[0].value) == target_mode)
2919 target = ops[0].value;
2920 else if (target != 0)
2921 convert_move (target, ops[0].value, 0);
2922 else
2923 target = convert_to_mode (target_mode, ops[0].value, 0);
2925 return target;
2928 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2929 bytes from constant string DATA + OFFSET and return it as target
2930 constant. */
2932 static rtx
2933 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2934 scalar_int_mode mode)
2936 const char *str = (const char *) data;
2938 gcc_assert (offset >= 0
2939 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2940 <= strlen (str) + 1));
2942 return c_readstr (str + offset, mode);
2945 /* LEN specify length of the block of memcpy/memset operation.
2946 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2947 In some cases we can make very likely guess on max size, then we
2948 set it into PROBABLE_MAX_SIZE. */
2950 static void
2951 determine_block_size (tree len, rtx len_rtx,
2952 unsigned HOST_WIDE_INT *min_size,
2953 unsigned HOST_WIDE_INT *max_size,
2954 unsigned HOST_WIDE_INT *probable_max_size)
2956 if (CONST_INT_P (len_rtx))
2958 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
2959 return;
2961 else
2963 wide_int min, max;
2964 enum value_range_type range_type = VR_UNDEFINED;
2966 /* Determine bounds from the type. */
2967 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2968 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2969 else
2970 *min_size = 0;
2971 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2972 *probable_max_size = *max_size
2973 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
2974 else
2975 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2977 if (TREE_CODE (len) == SSA_NAME)
2978 range_type = get_range_info (len, &min, &max);
2979 if (range_type == VR_RANGE)
2981 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
2982 *min_size = min.to_uhwi ();
2983 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
2984 *probable_max_size = *max_size = max.to_uhwi ();
2986 else if (range_type == VR_ANTI_RANGE)
2988 /* Anti range 0...N lets us to determine minimal size to N+1. */
2989 if (min == 0)
2991 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2992 *min_size = max.to_uhwi () + 1;
2994 /* Code like
2996 int n;
2997 if (n < 100)
2998 memcpy (a, b, n)
3000 Produce anti range allowing negative values of N. We still
3001 can use the information and make a guess that N is not negative.
3003 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3004 *probable_max_size = min.to_uhwi () - 1;
3007 gcc_checking_assert (*max_size <=
3008 (unsigned HOST_WIDE_INT)
3009 GET_MODE_MASK (GET_MODE (len_rtx)));
3012 /* Try to verify that the sizes and lengths of the arguments to a string
3013 manipulation function given by EXP are within valid bounds and that
3014 the operation does not lead to buffer overflow or read past the end.
3015 Arguments other than EXP may be null. When non-null, the arguments
3016 have the following meaning:
3017 DST is the destination of a copy call or NULL otherwise.
3018 SRC is the source of a copy call or NULL otherwise.
3019 DSTWRITE is the number of bytes written into the destination obtained
3020 from the user-supplied size argument to the function (such as in
3021 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3022 MAXREAD is the user-supplied bound on the length of the source sequence
3023 (such as in strncat(d, s, N). It specifies the upper limit on the number
3024 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3025 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3026 expression EXP is a string function call (as opposed to a memory call
3027 like memcpy). As an exception, SRCSTR can also be an integer denoting
3028 the precomputed size of the source string or object (for functions like
3029 memcpy).
3030 DSTSIZE is the size of the destination object specified by the last
3031 argument to the _chk builtins, typically resulting from the expansion
3032 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3033 DSTSIZE).
3035 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3036 SIZE_MAX.
3038 If the call is successfully verified as safe return true, otherwise
3039 return false. */
3041 static bool
3042 check_access (tree exp, tree, tree, tree dstwrite,
3043 tree maxread, tree srcstr, tree dstsize)
3045 int opt = OPT_Wstringop_overflow_;
3047 /* The size of the largest object is half the address space, or
3048 PTRDIFF_MAX. (This is way too permissive.) */
3049 tree maxobjsize = max_object_size ();
3051 /* Either the length of the source string for string functions or
3052 the size of the source object for raw memory functions. */
3053 tree slen = NULL_TREE;
3055 tree range[2] = { NULL_TREE, NULL_TREE };
3057 /* Set to true when the exact number of bytes written by a string
3058 function like strcpy is not known and the only thing that is
3059 known is that it must be at least one (for the terminating nul). */
3060 bool at_least_one = false;
3061 if (srcstr)
3063 /* SRCSTR is normally a pointer to string but as a special case
3064 it can be an integer denoting the length of a string. */
3065 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
3067 /* Try to determine the range of lengths the source string
3068 refers to. If it can be determined and is less than
3069 the upper bound given by MAXREAD add one to it for
3070 the terminating nul. Otherwise, set it to one for
3071 the same reason, or to MAXREAD as appropriate. */
3072 get_range_strlen (srcstr, range);
3073 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
3075 if (maxread && tree_int_cst_le (maxread, range[0]))
3076 range[0] = range[1] = maxread;
3077 else
3078 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3079 range[0], size_one_node);
3081 if (maxread && tree_int_cst_le (maxread, range[1]))
3082 range[1] = maxread;
3083 else if (!integer_all_onesp (range[1]))
3084 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3085 range[1], size_one_node);
3087 slen = range[0];
3089 else
3091 at_least_one = true;
3092 slen = size_one_node;
3095 else
3096 slen = srcstr;
3099 if (!dstwrite && !maxread)
3101 /* When the only available piece of data is the object size
3102 there is nothing to do. */
3103 if (!slen)
3104 return true;
3106 /* Otherwise, when the length of the source sequence is known
3107 (as with strlen), set DSTWRITE to it. */
3108 if (!range[0])
3109 dstwrite = slen;
3112 if (!dstsize)
3113 dstsize = maxobjsize;
3115 if (dstwrite)
3116 get_size_range (dstwrite, range);
3118 tree func = get_callee_fndecl (exp);
3120 /* First check the number of bytes to be written against the maximum
3121 object size. */
3122 if (range[0] && tree_int_cst_lt (maxobjsize, range[0]))
3124 location_t loc = tree_nonartificial_location (exp);
3125 loc = expansion_point_location_if_in_system_header (loc);
3127 if (range[0] == range[1])
3128 warning_at (loc, opt,
3129 "%K%qD specified size %E "
3130 "exceeds maximum object size %E",
3131 exp, func, range[0], maxobjsize);
3132 else
3133 warning_at (loc, opt,
3134 "%K%qD specified size between %E and %E "
3135 "exceeds maximum object size %E",
3136 exp, func,
3137 range[0], range[1], maxobjsize);
3138 return false;
3141 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3142 constant, and in range of unsigned HOST_WIDE_INT. */
3143 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3145 /* Next check the number of bytes to be written against the destination
3146 object size. */
3147 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
3149 if (range[0]
3150 && ((tree_fits_uhwi_p (dstsize)
3151 && tree_int_cst_lt (dstsize, range[0]))
3152 || (tree_fits_uhwi_p (dstwrite)
3153 && tree_int_cst_lt (dstwrite, range[0]))))
3155 if (TREE_NO_WARNING (exp))
3156 return false;
3158 location_t loc = tree_nonartificial_location (exp);
3159 loc = expansion_point_location_if_in_system_header (loc);
3161 if (dstwrite == slen && at_least_one)
3163 /* This is a call to strcpy with a destination of 0 size
3164 and a source of unknown length. The call will write
3165 at least one byte past the end of the destination. */
3166 warning_at (loc, opt,
3167 "%K%qD writing %E or more bytes into a region "
3168 "of size %E overflows the destination",
3169 exp, func, range[0], dstsize);
3171 else if (tree_int_cst_equal (range[0], range[1]))
3172 warning_n (loc, opt, tree_to_uhwi (range[0]),
3173 "%K%qD writing %E byte into a region "
3174 "of size %E overflows the destination",
3175 "%K%qD writing %E bytes into a region "
3176 "of size %E overflows the destination",
3177 exp, func, range[0], dstsize);
3178 else if (tree_int_cst_sign_bit (range[1]))
3180 /* Avoid printing the upper bound if it's invalid. */
3181 warning_at (loc, opt,
3182 "%K%qD writing %E or more bytes into a region "
3183 "of size %E overflows the destination",
3184 exp, func, range[0], dstsize);
3186 else
3187 warning_at (loc, opt,
3188 "%K%qD writing between %E and %E bytes into "
3189 "a region of size %E overflows the destination",
3190 exp, func, range[0], range[1],
3191 dstsize);
3193 /* Return error when an overflow has been detected. */
3194 return false;
3198 /* Check the maximum length of the source sequence against the size
3199 of the destination object if known, or against the maximum size
3200 of an object. */
3201 if (maxread)
3203 get_size_range (maxread, range);
3205 /* Use the lower end for MAXREAD from now on. */
3206 if (range[0])
3207 maxread = range[0];
3209 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
3211 location_t loc = tree_nonartificial_location (exp);
3212 loc = expansion_point_location_if_in_system_header (loc);
3214 if (tree_int_cst_lt (maxobjsize, range[0]))
3216 if (TREE_NO_WARNING (exp))
3217 return false;
3219 /* Warn about crazy big sizes first since that's more
3220 likely to be meaningful than saying that the bound
3221 is greater than the object size if both are big. */
3222 if (range[0] == range[1])
3223 warning_at (loc, opt,
3224 "%K%qD specified bound %E "
3225 "exceeds maximum object size %E",
3226 exp, func,
3227 range[0], maxobjsize);
3228 else
3229 warning_at (loc, opt,
3230 "%K%qD specified bound between %E and %E "
3231 "exceeds maximum object size %E",
3232 exp, func,
3233 range[0], range[1], maxobjsize);
3235 return false;
3238 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
3240 if (TREE_NO_WARNING (exp))
3241 return false;
3243 if (tree_int_cst_equal (range[0], range[1]))
3244 warning_at (loc, opt,
3245 "%K%qD specified bound %E "
3246 "exceeds destination size %E",
3247 exp, func,
3248 range[0], dstsize);
3249 else
3250 warning_at (loc, opt,
3251 "%K%qD specified bound between %E and %E "
3252 "exceeds destination size %E",
3253 exp, func,
3254 range[0], range[1], dstsize);
3255 return false;
3260 /* Check for reading past the end of SRC. */
3261 if (slen
3262 && slen == srcstr
3263 && dstwrite && range[0]
3264 && tree_int_cst_lt (slen, range[0]))
3266 if (TREE_NO_WARNING (exp))
3267 return false;
3269 location_t loc = tree_nonartificial_location (exp);
3271 if (tree_int_cst_equal (range[0], range[1]))
3272 warning_n (loc, opt, tree_to_uhwi (range[0]),
3273 "%K%qD reading %E byte from a region of size %E",
3274 "%K%qD reading %E bytes from a region of size %E",
3275 exp, func, range[0], slen);
3276 else if (tree_int_cst_sign_bit (range[1]))
3278 /* Avoid printing the upper bound if it's invalid. */
3279 warning_at (loc, opt,
3280 "%K%qD reading %E or more bytes from a region "
3281 "of size %E",
3282 exp, func, range[0], slen);
3284 else
3285 warning_at (loc, opt,
3286 "%K%qD reading between %E and %E bytes from a region "
3287 "of size %E",
3288 exp, func, range[0], range[1], slen);
3289 return false;
3292 return true;
3295 /* Helper to compute the size of the object referenced by the DEST
3296 expression which must have pointer type, using Object Size type
3297 OSTYPE (only the least significant 2 bits are used). Return
3298 an estimate of the size of the object if successful or NULL when
3299 the size cannot be determined. When the referenced object involves
3300 a non-constant offset in some range the returned value represents
3301 the largest size given the smallest non-negative offset in the
3302 range. The function is intended for diagnostics and should not
3303 be used to influence code generation or optimization. */
3305 tree
3306 compute_objsize (tree dest, int ostype)
3308 unsigned HOST_WIDE_INT size;
3310 /* Only the two least significant bits are meaningful. */
3311 ostype &= 3;
3313 if (compute_builtin_object_size (dest, ostype, &size))
3314 return build_int_cst (sizetype, size);
3316 if (TREE_CODE (dest) == SSA_NAME)
3318 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3319 if (!is_gimple_assign (stmt))
3320 return NULL_TREE;
3322 dest = gimple_assign_rhs1 (stmt);
3324 tree_code code = gimple_assign_rhs_code (stmt);
3325 if (code == POINTER_PLUS_EXPR)
3327 /* compute_builtin_object_size fails for addresses with
3328 non-constant offsets. Try to determine the range of
3329 such an offset here and use it to adjus the constant
3330 size. */
3331 tree off = gimple_assign_rhs2 (stmt);
3332 if (TREE_CODE (off) == SSA_NAME
3333 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3335 wide_int min, max;
3336 enum value_range_type rng = get_range_info (off, &min, &max);
3338 if (rng == VR_RANGE)
3340 if (tree size = compute_objsize (dest, ostype))
3342 wide_int wisiz = wi::to_wide (size);
3344 /* Ignore negative offsets for now. For others,
3345 use the lower bound as the most optimistic
3346 estimate of the (remaining)size. */
3347 if (wi::sign_mask (min))
3349 else if (wi::ltu_p (min, wisiz))
3350 return wide_int_to_tree (TREE_TYPE (size),
3351 wi::sub (wisiz, min));
3352 else
3353 return size_zero_node;
3358 else if (code != ADDR_EXPR)
3359 return NULL_TREE;
3362 /* Unless computing the largest size (for memcpy and other raw memory
3363 functions), try to determine the size of the object from its type. */
3364 if (!ostype)
3365 return NULL_TREE;
3367 if (TREE_CODE (dest) != ADDR_EXPR)
3368 return NULL_TREE;
3370 tree type = TREE_TYPE (dest);
3371 if (TREE_CODE (type) == POINTER_TYPE)
3372 type = TREE_TYPE (type);
3374 type = TYPE_MAIN_VARIANT (type);
3376 if (TREE_CODE (type) == ARRAY_TYPE
3377 && !array_at_struct_end_p (TREE_OPERAND (dest, 0)))
3379 /* Return the constant size unless it's zero (that's a zero-length
3380 array likely at the end of a struct). */
3381 tree size = TYPE_SIZE_UNIT (type);
3382 if (size && TREE_CODE (size) == INTEGER_CST
3383 && !integer_zerop (size))
3384 return size;
3387 return NULL_TREE;
3390 /* Helper to determine and check the sizes of the source and the destination
3391 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3392 call expression, DEST is the destination argument, SRC is the source
3393 argument or null, and LEN is the number of bytes. Use Object Size type-0
3394 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3395 (no overflow or invalid sizes), false otherwise. */
3397 static bool
3398 check_memop_access (tree exp, tree dest, tree src, tree size)
3400 /* For functions like memset and memcpy that operate on raw memory
3401 try to determine the size of the largest source and destination
3402 object using type-0 Object Size regardless of the object size
3403 type specified by the option. */
3404 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3405 tree dstsize = compute_objsize (dest, 0);
3407 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
3408 srcsize, dstsize);
3411 /* Validate memchr arguments without performing any expansion.
3412 Return NULL_RTX. */
3414 static rtx
3415 expand_builtin_memchr (tree exp, rtx)
3417 if (!validate_arglist (exp,
3418 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3419 return NULL_RTX;
3421 tree arg1 = CALL_EXPR_ARG (exp, 0);
3422 tree len = CALL_EXPR_ARG (exp, 2);
3424 /* Diagnose calls where the specified length exceeds the size
3425 of the object. */
3426 if (warn_stringop_overflow)
3428 tree size = compute_objsize (arg1, 0);
3429 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
3430 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
3433 return NULL_RTX;
3436 /* Expand a call EXP to the memcpy builtin.
3437 Return NULL_RTX if we failed, the caller should emit a normal call,
3438 otherwise try to get the result in TARGET, if convenient (and in
3439 mode MODE if that's convenient). */
3441 static rtx
3442 expand_builtin_memcpy (tree exp, rtx target)
3444 if (!validate_arglist (exp,
3445 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3446 return NULL_RTX;
3448 tree dest = CALL_EXPR_ARG (exp, 0);
3449 tree src = CALL_EXPR_ARG (exp, 1);
3450 tree len = CALL_EXPR_ARG (exp, 2);
3452 check_memop_access (exp, dest, src, len);
3454 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3455 /*endp=*/ 0);
3458 /* Check a call EXP to the memmove built-in for validity.
3459 Return NULL_RTX on both success and failure. */
3461 static rtx
3462 expand_builtin_memmove (tree exp, rtx)
3464 if (!validate_arglist (exp,
3465 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3466 return NULL_RTX;
3468 tree dest = CALL_EXPR_ARG (exp, 0);
3469 tree src = CALL_EXPR_ARG (exp, 1);
3470 tree len = CALL_EXPR_ARG (exp, 2);
3472 check_memop_access (exp, dest, src, len);
3474 return NULL_RTX;
3477 /* Expand a call EXP to the mempcpy builtin.
3478 Return NULL_RTX if we failed; the caller should emit a normal call,
3479 otherwise try to get the result in TARGET, if convenient (and in
3480 mode MODE if that's convenient). If ENDP is 0 return the
3481 destination pointer, if ENDP is 1 return the end pointer ala
3482 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3483 stpcpy. */
3485 static rtx
3486 expand_builtin_mempcpy (tree exp, rtx target)
3488 if (!validate_arglist (exp,
3489 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3490 return NULL_RTX;
3492 tree dest = CALL_EXPR_ARG (exp, 0);
3493 tree src = CALL_EXPR_ARG (exp, 1);
3494 tree len = CALL_EXPR_ARG (exp, 2);
3496 /* Policy does not generally allow using compute_objsize (which
3497 is used internally by check_memop_size) to change code generation
3498 or drive optimization decisions.
3500 In this instance it is safe because the code we generate has
3501 the same semantics regardless of the return value of
3502 check_memop_sizes. Exactly the same amount of data is copied
3503 and the return value is exactly the same in both cases.
3505 Furthermore, check_memop_size always uses mode 0 for the call to
3506 compute_objsize, so the imprecise nature of compute_objsize is
3507 avoided. */
3509 /* Avoid expanding mempcpy into memcpy when the call is determined
3510 to overflow the buffer. This also prevents the same overflow
3511 from being diagnosed again when expanding memcpy. */
3512 if (!check_memop_access (exp, dest, src, len))
3513 return NULL_RTX;
3515 return expand_builtin_mempcpy_args (dest, src, len,
3516 target, exp, /*endp=*/ 1);
3519 /* Helper function to do the actual work for expand of memory copy family
3520 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3521 of memory from SRC to DEST and assign to TARGET if convenient.
3522 If ENDP is 0 return the
3523 destination pointer, if ENDP is 1 return the end pointer ala
3524 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3525 stpcpy. */
3527 static rtx
3528 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3529 rtx target, tree exp, int endp)
3531 const char *src_str;
3532 unsigned int src_align = get_pointer_alignment (src);
3533 unsigned int dest_align = get_pointer_alignment (dest);
3534 rtx dest_mem, src_mem, dest_addr, len_rtx;
3535 HOST_WIDE_INT expected_size = -1;
3536 unsigned int expected_align = 0;
3537 unsigned HOST_WIDE_INT min_size;
3538 unsigned HOST_WIDE_INT max_size;
3539 unsigned HOST_WIDE_INT probable_max_size;
3541 /* If DEST is not a pointer type, call the normal function. */
3542 if (dest_align == 0)
3543 return NULL_RTX;
3545 /* If either SRC is not a pointer type, don't do this
3546 operation in-line. */
3547 if (src_align == 0)
3548 return NULL_RTX;
3550 if (currently_expanding_gimple_stmt)
3551 stringop_block_profile (currently_expanding_gimple_stmt,
3552 &expected_align, &expected_size);
3554 if (expected_align < dest_align)
3555 expected_align = dest_align;
3556 dest_mem = get_memory_rtx (dest, len);
3557 set_mem_align (dest_mem, dest_align);
3558 len_rtx = expand_normal (len);
3559 determine_block_size (len, len_rtx, &min_size, &max_size,
3560 &probable_max_size);
3561 src_str = c_getstr (src);
3563 /* If SRC is a string constant and block move would be done
3564 by pieces, we can avoid loading the string from memory
3565 and only stored the computed constants. */
3566 if (src_str
3567 && CONST_INT_P (len_rtx)
3568 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3569 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3570 CONST_CAST (char *, src_str),
3571 dest_align, false))
3573 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3574 builtin_memcpy_read_str,
3575 CONST_CAST (char *, src_str),
3576 dest_align, false, endp);
3577 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3578 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3579 return dest_mem;
3582 src_mem = get_memory_rtx (src, len);
3583 set_mem_align (src_mem, src_align);
3585 /* Copy word part most expediently. */
3586 enum block_op_methods method = BLOCK_OP_NORMAL;
3587 if (CALL_EXPR_TAILCALL (exp) && (endp == 0 || target == const0_rtx))
3588 method = BLOCK_OP_TAILCALL;
3589 if (endp == 1 && target != const0_rtx)
3590 method = BLOCK_OP_NO_LIBCALL_RET;
3591 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3592 expected_align, expected_size,
3593 min_size, max_size, probable_max_size);
3594 if (dest_addr == pc_rtx)
3595 return NULL_RTX;
3597 if (dest_addr == 0)
3599 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3600 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3603 if (endp && target != const0_rtx)
3605 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3606 /* stpcpy pointer to last byte. */
3607 if (endp == 2)
3608 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3611 return dest_addr;
3614 static rtx
3615 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3616 rtx target, tree orig_exp, int endp)
3618 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3619 endp);
3622 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3623 we failed, the caller should emit a normal call, otherwise try to
3624 get the result in TARGET, if convenient. If ENDP is 0 return the
3625 destination pointer, if ENDP is 1 return the end pointer ala
3626 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3627 stpcpy. */
3629 static rtx
3630 expand_movstr (tree dest, tree src, rtx target, int endp)
3632 struct expand_operand ops[3];
3633 rtx dest_mem;
3634 rtx src_mem;
3636 if (!targetm.have_movstr ())
3637 return NULL_RTX;
3639 dest_mem = get_memory_rtx (dest, NULL);
3640 src_mem = get_memory_rtx (src, NULL);
3641 if (!endp)
3643 target = force_reg (Pmode, XEXP (dest_mem, 0));
3644 dest_mem = replace_equiv_address (dest_mem, target);
3647 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3648 create_fixed_operand (&ops[1], dest_mem);
3649 create_fixed_operand (&ops[2], src_mem);
3650 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3651 return NULL_RTX;
3653 if (endp && target != const0_rtx)
3655 target = ops[0].value;
3656 /* movstr is supposed to set end to the address of the NUL
3657 terminator. If the caller requested a mempcpy-like return value,
3658 adjust it. */
3659 if (endp == 1)
3661 rtx tem = plus_constant (GET_MODE (target),
3662 gen_lowpart (GET_MODE (target), target), 1);
3663 emit_move_insn (target, force_operand (tem, NULL_RTX));
3666 return target;
3669 /* Do some very basic size validation of a call to the strcpy builtin
3670 given by EXP. Return NULL_RTX to have the built-in expand to a call
3671 to the library function. */
3673 static rtx
3674 expand_builtin_strcat (tree exp, rtx)
3676 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3677 || !warn_stringop_overflow)
3678 return NULL_RTX;
3680 tree dest = CALL_EXPR_ARG (exp, 0);
3681 tree src = CALL_EXPR_ARG (exp, 1);
3683 /* There is no way here to determine the length of the string in
3684 the destination to which the SRC string is being appended so
3685 just diagnose cases when the souce string is longer than
3686 the destination object. */
3688 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3690 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
3691 destsize);
3693 return NULL_RTX;
3696 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3697 NULL_RTX if we failed the caller should emit a normal call, otherwise
3698 try to get the result in TARGET, if convenient (and in mode MODE if that's
3699 convenient). */
3701 static rtx
3702 expand_builtin_strcpy (tree exp, rtx target)
3704 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3705 return NULL_RTX;
3707 tree dest = CALL_EXPR_ARG (exp, 0);
3708 tree src = CALL_EXPR_ARG (exp, 1);
3710 if (warn_stringop_overflow)
3712 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3713 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3714 src, destsize);
3717 if (rtx ret = expand_builtin_strcpy_args (dest, src, target))
3719 /* Check to see if the argument was declared attribute nonstring
3720 and if so, issue a warning since at this point it's not known
3721 to be nul-terminated. */
3722 tree fndecl = get_callee_fndecl (exp);
3723 maybe_warn_nonstring_arg (fndecl, exp);
3724 return ret;
3727 return NULL_RTX;
3730 /* Helper function to do the actual work for expand_builtin_strcpy. The
3731 arguments to the builtin_strcpy call DEST and SRC are broken out
3732 so that this can also be called without constructing an actual CALL_EXPR.
3733 The other arguments and return value are the same as for
3734 expand_builtin_strcpy. */
3736 static rtx
3737 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3739 return expand_movstr (dest, src, target, /*endp=*/0);
3742 /* Expand a call EXP to the stpcpy builtin.
3743 Return NULL_RTX if we failed the caller should emit a normal call,
3744 otherwise try to get the result in TARGET, if convenient (and in
3745 mode MODE if that's convenient). */
3747 static rtx
3748 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
3750 tree dst, src;
3751 location_t loc = EXPR_LOCATION (exp);
3753 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3754 return NULL_RTX;
3756 dst = CALL_EXPR_ARG (exp, 0);
3757 src = CALL_EXPR_ARG (exp, 1);
3759 if (warn_stringop_overflow)
3761 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
3762 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3763 src, destsize);
3766 /* If return value is ignored, transform stpcpy into strcpy. */
3767 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3769 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3770 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3771 return expand_expr (result, target, mode, EXPAND_NORMAL);
3773 else
3775 tree len, lenp1;
3776 rtx ret;
3778 /* Ensure we get an actual string whose length can be evaluated at
3779 compile-time, not an expression containing a string. This is
3780 because the latter will potentially produce pessimized code
3781 when used to produce the return value. */
3782 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3783 return expand_movstr (dst, src, target, /*endp=*/2);
3785 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3786 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3787 target, exp, /*endp=*/2);
3789 if (ret)
3790 return ret;
3792 if (TREE_CODE (len) == INTEGER_CST)
3794 rtx len_rtx = expand_normal (len);
3796 if (CONST_INT_P (len_rtx))
3798 ret = expand_builtin_strcpy_args (dst, src, target);
3800 if (ret)
3802 if (! target)
3804 if (mode != VOIDmode)
3805 target = gen_reg_rtx (mode);
3806 else
3807 target = gen_reg_rtx (GET_MODE (ret));
3809 if (GET_MODE (target) != GET_MODE (ret))
3810 ret = gen_lowpart (GET_MODE (target), ret);
3812 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3813 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3814 gcc_assert (ret);
3816 return target;
3821 return expand_movstr (dst, src, target, /*endp=*/2);
3825 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
3826 arguments while being careful to avoid duplicate warnings (which could
3827 be issued if the expander were to expand the call, resulting in it
3828 being emitted in expand_call(). */
3830 static rtx
3831 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3833 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
3835 /* The call has been successfully expanded. Check for nonstring
3836 arguments and issue warnings as appropriate. */
3837 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3838 return ret;
3841 return NULL_RTX;
3844 /* Check a call EXP to the stpncpy built-in for validity.
3845 Return NULL_RTX on both success and failure. */
3847 static rtx
3848 expand_builtin_stpncpy (tree exp, rtx)
3850 if (!validate_arglist (exp,
3851 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3852 || !warn_stringop_overflow)
3853 return NULL_RTX;
3855 /* The source and destination of the call. */
3856 tree dest = CALL_EXPR_ARG (exp, 0);
3857 tree src = CALL_EXPR_ARG (exp, 1);
3859 /* The exact number of bytes to write (not the maximum). */
3860 tree len = CALL_EXPR_ARG (exp, 2);
3862 /* The size of the destination object. */
3863 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3865 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
3867 return NULL_RTX;
3870 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3871 bytes from constant string DATA + OFFSET and return it as target
3872 constant. */
3875 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3876 scalar_int_mode mode)
3878 const char *str = (const char *) data;
3880 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3881 return const0_rtx;
3883 return c_readstr (str + offset, mode);
3886 /* Helper to check the sizes of sequences and the destination of calls
3887 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3888 success (no overflow or invalid sizes), false otherwise. */
3890 static bool
3891 check_strncat_sizes (tree exp, tree objsize)
3893 tree dest = CALL_EXPR_ARG (exp, 0);
3894 tree src = CALL_EXPR_ARG (exp, 1);
3895 tree maxread = CALL_EXPR_ARG (exp, 2);
3897 /* Try to determine the range of lengths that the source expression
3898 refers to. */
3899 tree lenrange[2];
3900 get_range_strlen (src, lenrange);
3902 /* Try to verify that the destination is big enough for the shortest
3903 string. */
3905 if (!objsize && warn_stringop_overflow)
3907 /* If it hasn't been provided by __strncat_chk, try to determine
3908 the size of the destination object into which the source is
3909 being copied. */
3910 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
3913 /* Add one for the terminating nul. */
3914 tree srclen = (lenrange[0]
3915 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3916 size_one_node)
3917 : NULL_TREE);
3919 /* The strncat function copies at most MAXREAD bytes and always appends
3920 the terminating nul so the specified upper bound should never be equal
3921 to (or greater than) the size of the destination. */
3922 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
3923 && tree_int_cst_equal (objsize, maxread))
3925 location_t loc = tree_nonartificial_location (exp);
3926 loc = expansion_point_location_if_in_system_header (loc);
3928 warning_at (loc, OPT_Wstringop_overflow_,
3929 "%K%qD specified bound %E equals destination size",
3930 exp, get_callee_fndecl (exp), maxread);
3932 return false;
3935 if (!srclen
3936 || (maxread && tree_fits_uhwi_p (maxread)
3937 && tree_fits_uhwi_p (srclen)
3938 && tree_int_cst_lt (maxread, srclen)))
3939 srclen = maxread;
3941 /* The number of bytes to write is LEN but check_access will also
3942 check SRCLEN if LEN's value isn't known. */
3943 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
3944 objsize);
3947 /* Similar to expand_builtin_strcat, do some very basic size validation
3948 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
3949 the built-in expand to a call to the library function. */
3951 static rtx
3952 expand_builtin_strncat (tree exp, rtx)
3954 if (!validate_arglist (exp,
3955 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3956 || !warn_stringop_overflow)
3957 return NULL_RTX;
3959 tree dest = CALL_EXPR_ARG (exp, 0);
3960 tree src = CALL_EXPR_ARG (exp, 1);
3961 /* The upper bound on the number of bytes to write. */
3962 tree maxread = CALL_EXPR_ARG (exp, 2);
3963 /* The length of the source sequence. */
3964 tree slen = c_strlen (src, 1);
3966 /* Try to determine the range of lengths that the source expression
3967 refers to. */
3968 tree lenrange[2];
3969 if (slen)
3970 lenrange[0] = lenrange[1] = slen;
3971 else
3972 get_range_strlen (src, lenrange);
3974 /* Try to verify that the destination is big enough for the shortest
3975 string. First try to determine the size of the destination object
3976 into which the source is being copied. */
3977 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3979 /* Add one for the terminating nul. */
3980 tree srclen = (lenrange[0]
3981 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3982 size_one_node)
3983 : NULL_TREE);
3985 /* The strncat function copies at most MAXREAD bytes and always appends
3986 the terminating nul so the specified upper bound should never be equal
3987 to (or greater than) the size of the destination. */
3988 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
3989 && tree_int_cst_equal (destsize, maxread))
3991 location_t loc = tree_nonartificial_location (exp);
3992 loc = expansion_point_location_if_in_system_header (loc);
3994 warning_at (loc, OPT_Wstringop_overflow_,
3995 "%K%qD specified bound %E equals destination size",
3996 exp, get_callee_fndecl (exp), maxread);
3998 return NULL_RTX;
4001 if (!srclen
4002 || (maxread && tree_fits_uhwi_p (maxread)
4003 && tree_fits_uhwi_p (srclen)
4004 && tree_int_cst_lt (maxread, srclen)))
4005 srclen = maxread;
4007 /* The number of bytes to write is SRCLEN. */
4008 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
4010 return NULL_RTX;
4013 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4014 NULL_RTX if we failed the caller should emit a normal call. */
4016 static rtx
4017 expand_builtin_strncpy (tree exp, rtx target)
4019 location_t loc = EXPR_LOCATION (exp);
4021 if (validate_arglist (exp,
4022 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4024 tree dest = CALL_EXPR_ARG (exp, 0);
4025 tree src = CALL_EXPR_ARG (exp, 1);
4026 /* The number of bytes to write (not the maximum). */
4027 tree len = CALL_EXPR_ARG (exp, 2);
4028 /* The length of the source sequence. */
4029 tree slen = c_strlen (src, 1);
4031 if (warn_stringop_overflow)
4033 tree destsize = compute_objsize (dest,
4034 warn_stringop_overflow - 1);
4036 /* The number of bytes to write is LEN but check_access will also
4037 check SLEN if LEN's value isn't known. */
4038 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4039 destsize);
4042 /* We must be passed a constant len and src parameter. */
4043 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4044 return NULL_RTX;
4046 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4048 /* We're required to pad with trailing zeros if the requested
4049 len is greater than strlen(s2)+1. In that case try to
4050 use store_by_pieces, if it fails, punt. */
4051 if (tree_int_cst_lt (slen, len))
4053 unsigned int dest_align = get_pointer_alignment (dest);
4054 const char *p = c_getstr (src);
4055 rtx dest_mem;
4057 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4058 || !can_store_by_pieces (tree_to_uhwi (len),
4059 builtin_strncpy_read_str,
4060 CONST_CAST (char *, p),
4061 dest_align, false))
4062 return NULL_RTX;
4064 dest_mem = get_memory_rtx (dest, len);
4065 store_by_pieces (dest_mem, tree_to_uhwi (len),
4066 builtin_strncpy_read_str,
4067 CONST_CAST (char *, p), dest_align, false, 0);
4068 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4069 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4070 return dest_mem;
4073 return NULL_RTX;
4076 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4077 bytes from constant string DATA + OFFSET and return it as target
4078 constant. */
4081 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4082 scalar_int_mode mode)
4084 const char *c = (const char *) data;
4085 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4087 memset (p, *c, GET_MODE_SIZE (mode));
4089 return c_readstr (p, mode);
4092 /* Callback routine for store_by_pieces. Return the RTL of a register
4093 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4094 char value given in the RTL register data. For example, if mode is
4095 4 bytes wide, return the RTL for 0x01010101*data. */
4097 static rtx
4098 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4099 scalar_int_mode mode)
4101 rtx target, coeff;
4102 size_t size;
4103 char *p;
4105 size = GET_MODE_SIZE (mode);
4106 if (size == 1)
4107 return (rtx) data;
4109 p = XALLOCAVEC (char, size);
4110 memset (p, 1, size);
4111 coeff = c_readstr (p, mode);
4113 target = convert_to_mode (mode, (rtx) data, 1);
4114 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4115 return force_reg (mode, target);
4118 /* Expand expression EXP, which is a call to the memset builtin. Return
4119 NULL_RTX if we failed the caller should emit a normal call, otherwise
4120 try to get the result in TARGET, if convenient (and in mode MODE if that's
4121 convenient). */
4123 static rtx
4124 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4126 if (!validate_arglist (exp,
4127 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4128 return NULL_RTX;
4130 tree dest = CALL_EXPR_ARG (exp, 0);
4131 tree val = CALL_EXPR_ARG (exp, 1);
4132 tree len = CALL_EXPR_ARG (exp, 2);
4134 check_memop_access (exp, dest, NULL_TREE, len);
4136 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4139 /* Helper function to do the actual work for expand_builtin_memset. The
4140 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4141 so that this can also be called without constructing an actual CALL_EXPR.
4142 The other arguments and return value are the same as for
4143 expand_builtin_memset. */
4145 static rtx
4146 expand_builtin_memset_args (tree dest, tree val, tree len,
4147 rtx target, machine_mode mode, tree orig_exp)
4149 tree fndecl, fn;
4150 enum built_in_function fcode;
4151 machine_mode val_mode;
4152 char c;
4153 unsigned int dest_align;
4154 rtx dest_mem, dest_addr, len_rtx;
4155 HOST_WIDE_INT expected_size = -1;
4156 unsigned int expected_align = 0;
4157 unsigned HOST_WIDE_INT min_size;
4158 unsigned HOST_WIDE_INT max_size;
4159 unsigned HOST_WIDE_INT probable_max_size;
4161 dest_align = get_pointer_alignment (dest);
4163 /* If DEST is not a pointer type, don't do this operation in-line. */
4164 if (dest_align == 0)
4165 return NULL_RTX;
4167 if (currently_expanding_gimple_stmt)
4168 stringop_block_profile (currently_expanding_gimple_stmt,
4169 &expected_align, &expected_size);
4171 if (expected_align < dest_align)
4172 expected_align = dest_align;
4174 /* If the LEN parameter is zero, return DEST. */
4175 if (integer_zerop (len))
4177 /* Evaluate and ignore VAL in case it has side-effects. */
4178 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4179 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4182 /* Stabilize the arguments in case we fail. */
4183 dest = builtin_save_expr (dest);
4184 val = builtin_save_expr (val);
4185 len = builtin_save_expr (len);
4187 len_rtx = expand_normal (len);
4188 determine_block_size (len, len_rtx, &min_size, &max_size,
4189 &probable_max_size);
4190 dest_mem = get_memory_rtx (dest, len);
4191 val_mode = TYPE_MODE (unsigned_char_type_node);
4193 if (TREE_CODE (val) != INTEGER_CST)
4195 rtx val_rtx;
4197 val_rtx = expand_normal (val);
4198 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4200 /* Assume that we can memset by pieces if we can store
4201 * the coefficients by pieces (in the required modes).
4202 * We can't pass builtin_memset_gen_str as that emits RTL. */
4203 c = 1;
4204 if (tree_fits_uhwi_p (len)
4205 && can_store_by_pieces (tree_to_uhwi (len),
4206 builtin_memset_read_str, &c, dest_align,
4207 true))
4209 val_rtx = force_reg (val_mode, val_rtx);
4210 store_by_pieces (dest_mem, tree_to_uhwi (len),
4211 builtin_memset_gen_str, val_rtx, dest_align,
4212 true, 0);
4214 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4215 dest_align, expected_align,
4216 expected_size, min_size, max_size,
4217 probable_max_size))
4218 goto do_libcall;
4220 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4221 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4222 return dest_mem;
4225 if (target_char_cast (val, &c))
4226 goto do_libcall;
4228 if (c)
4230 if (tree_fits_uhwi_p (len)
4231 && can_store_by_pieces (tree_to_uhwi (len),
4232 builtin_memset_read_str, &c, dest_align,
4233 true))
4234 store_by_pieces (dest_mem, tree_to_uhwi (len),
4235 builtin_memset_read_str, &c, dest_align, true, 0);
4236 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4237 gen_int_mode (c, val_mode),
4238 dest_align, expected_align,
4239 expected_size, min_size, max_size,
4240 probable_max_size))
4241 goto do_libcall;
4243 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4244 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4245 return dest_mem;
4248 set_mem_align (dest_mem, dest_align);
4249 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4250 CALL_EXPR_TAILCALL (orig_exp)
4251 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4252 expected_align, expected_size,
4253 min_size, max_size,
4254 probable_max_size);
4256 if (dest_addr == 0)
4258 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4259 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4262 return dest_addr;
4264 do_libcall:
4265 fndecl = get_callee_fndecl (orig_exp);
4266 fcode = DECL_FUNCTION_CODE (fndecl);
4267 if (fcode == BUILT_IN_MEMSET)
4268 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4269 dest, val, len);
4270 else if (fcode == BUILT_IN_BZERO)
4271 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4272 dest, len);
4273 else
4274 gcc_unreachable ();
4275 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4276 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4277 return expand_call (fn, target, target == const0_rtx);
4280 /* Expand expression EXP, which is a call to the bzero builtin. Return
4281 NULL_RTX if we failed the caller should emit a normal call. */
4283 static rtx
4284 expand_builtin_bzero (tree exp)
4286 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4287 return NULL_RTX;
4289 tree dest = CALL_EXPR_ARG (exp, 0);
4290 tree size = CALL_EXPR_ARG (exp, 1);
4292 check_memop_access (exp, dest, NULL_TREE, size);
4294 /* New argument list transforming bzero(ptr x, int y) to
4295 memset(ptr x, int 0, size_t y). This is done this way
4296 so that if it isn't expanded inline, we fallback to
4297 calling bzero instead of memset. */
4299 location_t loc = EXPR_LOCATION (exp);
4301 return expand_builtin_memset_args (dest, integer_zero_node,
4302 fold_convert_loc (loc,
4303 size_type_node, size),
4304 const0_rtx, VOIDmode, exp);
4307 /* Try to expand cmpstr operation ICODE with the given operands.
4308 Return the result rtx on success, otherwise return null. */
4310 static rtx
4311 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4312 HOST_WIDE_INT align)
4314 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4316 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4317 target = NULL_RTX;
4319 struct expand_operand ops[4];
4320 create_output_operand (&ops[0], target, insn_mode);
4321 create_fixed_operand (&ops[1], arg1_rtx);
4322 create_fixed_operand (&ops[2], arg2_rtx);
4323 create_integer_operand (&ops[3], align);
4324 if (maybe_expand_insn (icode, 4, ops))
4325 return ops[0].value;
4326 return NULL_RTX;
4329 /* Expand expression EXP, which is a call to the memcmp built-in function.
4330 Return NULL_RTX if we failed and the caller should emit a normal call,
4331 otherwise try to get the result in TARGET, if convenient.
4332 RESULT_EQ is true if we can relax the returned value to be either zero
4333 or nonzero, without caring about the sign. */
4335 static rtx
4336 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4338 if (!validate_arglist (exp,
4339 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4340 return NULL_RTX;
4342 tree arg1 = CALL_EXPR_ARG (exp, 0);
4343 tree arg2 = CALL_EXPR_ARG (exp, 1);
4344 tree len = CALL_EXPR_ARG (exp, 2);
4346 /* Diagnose calls where the specified length exceeds the size of either
4347 object. */
4348 if (warn_stringop_overflow)
4350 tree size = compute_objsize (arg1, 0);
4351 if (check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
4352 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE))
4354 size = compute_objsize (arg2, 0);
4355 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
4356 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
4360 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4361 location_t loc = EXPR_LOCATION (exp);
4363 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4364 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4366 /* If we don't have POINTER_TYPE, call the function. */
4367 if (arg1_align == 0 || arg2_align == 0)
4368 return NULL_RTX;
4370 rtx arg1_rtx = get_memory_rtx (arg1, len);
4371 rtx arg2_rtx = get_memory_rtx (arg2, len);
4372 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4374 /* Set MEM_SIZE as appropriate. */
4375 if (CONST_INT_P (len_rtx))
4377 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4378 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4381 by_pieces_constfn constfn = NULL;
4383 const char *src_str = c_getstr (arg2);
4384 if (result_eq && src_str == NULL)
4386 src_str = c_getstr (arg1);
4387 if (src_str != NULL)
4388 std::swap (arg1_rtx, arg2_rtx);
4391 /* If SRC is a string constant and block move would be done
4392 by pieces, we can avoid loading the string from memory
4393 and only stored the computed constants. */
4394 if (src_str
4395 && CONST_INT_P (len_rtx)
4396 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4397 constfn = builtin_memcpy_read_str;
4399 rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4400 TREE_TYPE (len), target,
4401 result_eq, constfn,
4402 CONST_CAST (char *, src_str));
4404 if (result)
4406 /* Return the value in the proper mode for this function. */
4407 if (GET_MODE (result) == mode)
4408 return result;
4410 if (target != 0)
4412 convert_move (target, result, 0);
4413 return target;
4416 return convert_to_mode (mode, result, 0);
4419 return NULL_RTX;
4422 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4423 if we failed the caller should emit a normal call, otherwise try to get
4424 the result in TARGET, if convenient. */
4426 static rtx
4427 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4429 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4430 return NULL_RTX;
4432 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4433 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4434 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4435 return NULL_RTX;
4437 tree arg1 = CALL_EXPR_ARG (exp, 0);
4438 tree arg2 = CALL_EXPR_ARG (exp, 1);
4440 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4441 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4443 /* If we don't have POINTER_TYPE, call the function. */
4444 if (arg1_align == 0 || arg2_align == 0)
4445 return NULL_RTX;
4447 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4448 arg1 = builtin_save_expr (arg1);
4449 arg2 = builtin_save_expr (arg2);
4451 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4452 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4454 rtx result = NULL_RTX;
4455 /* Try to call cmpstrsi. */
4456 if (cmpstr_icode != CODE_FOR_nothing)
4457 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4458 MIN (arg1_align, arg2_align));
4460 /* Try to determine at least one length and call cmpstrnsi. */
4461 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4463 tree len;
4464 rtx arg3_rtx;
4466 tree len1 = c_strlen (arg1, 1);
4467 tree len2 = c_strlen (arg2, 1);
4469 if (len1)
4470 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4471 if (len2)
4472 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4474 /* If we don't have a constant length for the first, use the length
4475 of the second, if we know it. We don't require a constant for
4476 this case; some cost analysis could be done if both are available
4477 but neither is constant. For now, assume they're equally cheap,
4478 unless one has side effects. If both strings have constant lengths,
4479 use the smaller. */
4481 if (!len1)
4482 len = len2;
4483 else if (!len2)
4484 len = len1;
4485 else if (TREE_SIDE_EFFECTS (len1))
4486 len = len2;
4487 else if (TREE_SIDE_EFFECTS (len2))
4488 len = len1;
4489 else if (TREE_CODE (len1) != INTEGER_CST)
4490 len = len2;
4491 else if (TREE_CODE (len2) != INTEGER_CST)
4492 len = len1;
4493 else if (tree_int_cst_lt (len1, len2))
4494 len = len1;
4495 else
4496 len = len2;
4498 /* If both arguments have side effects, we cannot optimize. */
4499 if (len && !TREE_SIDE_EFFECTS (len))
4501 arg3_rtx = expand_normal (len);
4502 result = expand_cmpstrn_or_cmpmem
4503 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4504 arg3_rtx, MIN (arg1_align, arg2_align));
4508 tree fndecl = get_callee_fndecl (exp);
4509 if (result)
4511 /* Check to see if the argument was declared attribute nonstring
4512 and if so, issue a warning since at this point it's not known
4513 to be nul-terminated. */
4514 maybe_warn_nonstring_arg (fndecl, exp);
4516 /* Return the value in the proper mode for this function. */
4517 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4518 if (GET_MODE (result) == mode)
4519 return result;
4520 if (target == 0)
4521 return convert_to_mode (mode, result, 0);
4522 convert_move (target, result, 0);
4523 return target;
4526 /* Expand the library call ourselves using a stabilized argument
4527 list to avoid re-evaluating the function's arguments twice. */
4528 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4529 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4530 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4531 return expand_call (fn, target, target == const0_rtx);
4534 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4535 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4536 the result in TARGET, if convenient. */
4538 static rtx
4539 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4540 ATTRIBUTE_UNUSED machine_mode mode)
4542 if (!validate_arglist (exp,
4543 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4544 return NULL_RTX;
4546 /* If c_strlen can determine an expression for one of the string
4547 lengths, and it doesn't have side effects, then emit cmpstrnsi
4548 using length MIN(strlen(string)+1, arg3). */
4549 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4550 if (cmpstrn_icode == CODE_FOR_nothing)
4551 return NULL_RTX;
4553 tree len;
4555 tree arg1 = CALL_EXPR_ARG (exp, 0);
4556 tree arg2 = CALL_EXPR_ARG (exp, 1);
4557 tree arg3 = CALL_EXPR_ARG (exp, 2);
4559 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4560 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4562 tree len1 = c_strlen (arg1, 1);
4563 tree len2 = c_strlen (arg2, 1);
4565 location_t loc = EXPR_LOCATION (exp);
4567 if (len1)
4568 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4569 if (len2)
4570 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4572 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4574 /* If we don't have a constant length for the first, use the length
4575 of the second, if we know it. If neither string is constant length,
4576 use the given length argument. We don't require a constant for
4577 this case; some cost analysis could be done if both are available
4578 but neither is constant. For now, assume they're equally cheap,
4579 unless one has side effects. If both strings have constant lengths,
4580 use the smaller. */
4582 if (!len1 && !len2)
4583 len = len3;
4584 else if (!len1)
4585 len = len2;
4586 else if (!len2)
4587 len = len1;
4588 else if (TREE_SIDE_EFFECTS (len1))
4589 len = len2;
4590 else if (TREE_SIDE_EFFECTS (len2))
4591 len = len1;
4592 else if (TREE_CODE (len1) != INTEGER_CST)
4593 len = len2;
4594 else if (TREE_CODE (len2) != INTEGER_CST)
4595 len = len1;
4596 else if (tree_int_cst_lt (len1, len2))
4597 len = len1;
4598 else
4599 len = len2;
4601 /* If we are not using the given length, we must incorporate it here.
4602 The actual new length parameter will be MIN(len,arg3) in this case. */
4603 if (len != len3)
4604 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4605 rtx arg1_rtx = get_memory_rtx (arg1, len);
4606 rtx arg2_rtx = get_memory_rtx (arg2, len);
4607 rtx arg3_rtx = expand_normal (len);
4608 rtx result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4609 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4610 MIN (arg1_align, arg2_align));
4612 tree fndecl = get_callee_fndecl (exp);
4613 if (result)
4615 /* Check to see if the argument was declared attribute nonstring
4616 and if so, issue a warning since at this point it's not known
4617 to be nul-terminated. */
4618 maybe_warn_nonstring_arg (fndecl, exp);
4620 /* Return the value in the proper mode for this function. */
4621 mode = TYPE_MODE (TREE_TYPE (exp));
4622 if (GET_MODE (result) == mode)
4623 return result;
4624 if (target == 0)
4625 return convert_to_mode (mode, result, 0);
4626 convert_move (target, result, 0);
4627 return target;
4630 /* Expand the library call ourselves using a stabilized argument
4631 list to avoid re-evaluating the function's arguments twice. */
4632 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4633 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4634 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4635 return expand_call (fn, target, target == const0_rtx);
4638 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4639 if that's convenient. */
4642 expand_builtin_saveregs (void)
4644 rtx val;
4645 rtx_insn *seq;
4647 /* Don't do __builtin_saveregs more than once in a function.
4648 Save the result of the first call and reuse it. */
4649 if (saveregs_value != 0)
4650 return saveregs_value;
4652 /* When this function is called, it means that registers must be
4653 saved on entry to this function. So we migrate the call to the
4654 first insn of this function. */
4656 start_sequence ();
4658 /* Do whatever the machine needs done in this case. */
4659 val = targetm.calls.expand_builtin_saveregs ();
4661 seq = get_insns ();
4662 end_sequence ();
4664 saveregs_value = val;
4666 /* Put the insns after the NOTE that starts the function. If this
4667 is inside a start_sequence, make the outer-level insn chain current, so
4668 the code is placed at the start of the function. */
4669 push_topmost_sequence ();
4670 emit_insn_after (seq, entry_of_function ());
4671 pop_topmost_sequence ();
4673 return val;
4676 /* Expand a call to __builtin_next_arg. */
4678 static rtx
4679 expand_builtin_next_arg (void)
4681 /* Checking arguments is already done in fold_builtin_next_arg
4682 that must be called before this function. */
4683 return expand_binop (ptr_mode, add_optab,
4684 crtl->args.internal_arg_pointer,
4685 crtl->args.arg_offset_rtx,
4686 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4689 /* Make it easier for the backends by protecting the valist argument
4690 from multiple evaluations. */
4692 static tree
4693 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4695 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4697 /* The current way of determining the type of valist is completely
4698 bogus. We should have the information on the va builtin instead. */
4699 if (!vatype)
4700 vatype = targetm.fn_abi_va_list (cfun->decl);
4702 if (TREE_CODE (vatype) == ARRAY_TYPE)
4704 if (TREE_SIDE_EFFECTS (valist))
4705 valist = save_expr (valist);
4707 /* For this case, the backends will be expecting a pointer to
4708 vatype, but it's possible we've actually been given an array
4709 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4710 So fix it. */
4711 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4713 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4714 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4717 else
4719 tree pt = build_pointer_type (vatype);
4721 if (! needs_lvalue)
4723 if (! TREE_SIDE_EFFECTS (valist))
4724 return valist;
4726 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4727 TREE_SIDE_EFFECTS (valist) = 1;
4730 if (TREE_SIDE_EFFECTS (valist))
4731 valist = save_expr (valist);
4732 valist = fold_build2_loc (loc, MEM_REF,
4733 vatype, valist, build_int_cst (pt, 0));
4736 return valist;
4739 /* The "standard" definition of va_list is void*. */
4741 tree
4742 std_build_builtin_va_list (void)
4744 return ptr_type_node;
4747 /* The "standard" abi va_list is va_list_type_node. */
4749 tree
4750 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4752 return va_list_type_node;
4755 /* The "standard" type of va_list is va_list_type_node. */
4757 tree
4758 std_canonical_va_list_type (tree type)
4760 tree wtype, htype;
4762 wtype = va_list_type_node;
4763 htype = type;
4765 if (TREE_CODE (wtype) == ARRAY_TYPE)
4767 /* If va_list is an array type, the argument may have decayed
4768 to a pointer type, e.g. by being passed to another function.
4769 In that case, unwrap both types so that we can compare the
4770 underlying records. */
4771 if (TREE_CODE (htype) == ARRAY_TYPE
4772 || POINTER_TYPE_P (htype))
4774 wtype = TREE_TYPE (wtype);
4775 htype = TREE_TYPE (htype);
4778 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4779 return va_list_type_node;
4781 return NULL_TREE;
4784 /* The "standard" implementation of va_start: just assign `nextarg' to
4785 the variable. */
4787 void
4788 std_expand_builtin_va_start (tree valist, rtx nextarg)
4790 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4791 convert_move (va_r, nextarg, 0);
4794 /* Expand EXP, a call to __builtin_va_start. */
4796 static rtx
4797 expand_builtin_va_start (tree exp)
4799 rtx nextarg;
4800 tree valist;
4801 location_t loc = EXPR_LOCATION (exp);
4803 if (call_expr_nargs (exp) < 2)
4805 error_at (loc, "too few arguments to function %<va_start%>");
4806 return const0_rtx;
4809 if (fold_builtin_next_arg (exp, true))
4810 return const0_rtx;
4812 nextarg = expand_builtin_next_arg ();
4813 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4815 if (targetm.expand_builtin_va_start)
4816 targetm.expand_builtin_va_start (valist, nextarg);
4817 else
4818 std_expand_builtin_va_start (valist, nextarg);
4820 return const0_rtx;
4823 /* Expand EXP, a call to __builtin_va_end. */
4825 static rtx
4826 expand_builtin_va_end (tree exp)
4828 tree valist = CALL_EXPR_ARG (exp, 0);
4830 /* Evaluate for side effects, if needed. I hate macros that don't
4831 do that. */
4832 if (TREE_SIDE_EFFECTS (valist))
4833 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4835 return const0_rtx;
4838 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4839 builtin rather than just as an assignment in stdarg.h because of the
4840 nastiness of array-type va_list types. */
4842 static rtx
4843 expand_builtin_va_copy (tree exp)
4845 tree dst, src, t;
4846 location_t loc = EXPR_LOCATION (exp);
4848 dst = CALL_EXPR_ARG (exp, 0);
4849 src = CALL_EXPR_ARG (exp, 1);
4851 dst = stabilize_va_list_loc (loc, dst, 1);
4852 src = stabilize_va_list_loc (loc, src, 0);
4854 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4856 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4858 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4859 TREE_SIDE_EFFECTS (t) = 1;
4860 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4862 else
4864 rtx dstb, srcb, size;
4866 /* Evaluate to pointers. */
4867 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4868 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4869 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4870 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4872 dstb = convert_memory_address (Pmode, dstb);
4873 srcb = convert_memory_address (Pmode, srcb);
4875 /* "Dereference" to BLKmode memories. */
4876 dstb = gen_rtx_MEM (BLKmode, dstb);
4877 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4878 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4879 srcb = gen_rtx_MEM (BLKmode, srcb);
4880 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4881 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4883 /* Copy. */
4884 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4887 return const0_rtx;
4890 /* Expand a call to one of the builtin functions __builtin_frame_address or
4891 __builtin_return_address. */
4893 static rtx
4894 expand_builtin_frame_address (tree fndecl, tree exp)
4896 /* The argument must be a nonnegative integer constant.
4897 It counts the number of frames to scan up the stack.
4898 The value is either the frame pointer value or the return
4899 address saved in that frame. */
4900 if (call_expr_nargs (exp) == 0)
4901 /* Warning about missing arg was already issued. */
4902 return const0_rtx;
4903 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4905 error ("invalid argument to %qD", fndecl);
4906 return const0_rtx;
4908 else
4910 /* Number of frames to scan up the stack. */
4911 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4913 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4915 /* Some ports cannot access arbitrary stack frames. */
4916 if (tem == NULL)
4918 warning (0, "unsupported argument to %qD", fndecl);
4919 return const0_rtx;
4922 if (count)
4924 /* Warn since no effort is made to ensure that any frame
4925 beyond the current one exists or can be safely reached. */
4926 warning (OPT_Wframe_address, "calling %qD with "
4927 "a nonzero argument is unsafe", fndecl);
4930 /* For __builtin_frame_address, return what we've got. */
4931 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4932 return tem;
4934 if (!REG_P (tem)
4935 && ! CONSTANT_P (tem))
4936 tem = copy_addr_to_reg (tem);
4937 return tem;
4941 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4942 failed and the caller should emit a normal call. */
4944 static rtx
4945 expand_builtin_alloca (tree exp)
4947 rtx op0;
4948 rtx result;
4949 unsigned int align;
4950 tree fndecl = get_callee_fndecl (exp);
4951 HOST_WIDE_INT max_size;
4952 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4953 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
4954 bool valid_arglist
4955 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
4956 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
4957 VOID_TYPE)
4958 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
4959 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4960 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4962 if (!valid_arglist)
4963 return NULL_RTX;
4965 if ((alloca_for_var && !warn_vla_limit)
4966 || (!alloca_for_var && !warn_alloca_limit))
4968 /* -Walloca-larger-than and -Wvla-larger-than settings override
4969 the more general -Walloc-size-larger-than so unless either of
4970 the former options is specified check the alloca arguments for
4971 overflow. */
4972 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
4973 int idx[] = { 0, -1 };
4974 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
4977 /* Compute the argument. */
4978 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4980 /* Compute the alignment. */
4981 align = (fcode == BUILT_IN_ALLOCA
4982 ? BIGGEST_ALIGNMENT
4983 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
4985 /* Compute the maximum size. */
4986 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
4987 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
4988 : -1);
4990 /* Allocate the desired space. If the allocation stems from the declaration
4991 of a variable-sized object, it cannot accumulate. */
4992 result
4993 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
4994 result = convert_memory_address (ptr_mode, result);
4996 return result;
4999 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5000 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5001 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5002 handle_builtin_stack_restore function. */
5004 static rtx
5005 expand_asan_emit_allocas_unpoison (tree exp)
5007 tree arg0 = CALL_EXPR_ARG (exp, 0);
5008 tree arg1 = CALL_EXPR_ARG (exp, 1);
5009 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5010 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5011 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5012 stack_pointer_rtx, NULL_RTX, 0,
5013 OPTAB_LIB_WIDEN);
5014 off = convert_modes (ptr_mode, Pmode, off, 0);
5015 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5016 OPTAB_LIB_WIDEN);
5017 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5018 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5019 top, ptr_mode, bot, ptr_mode);
5020 return ret;
5023 /* Expand a call to bswap builtin in EXP.
5024 Return NULL_RTX if a normal call should be emitted rather than expanding the
5025 function in-line. If convenient, the result should be placed in TARGET.
5026 SUBTARGET may be used as the target for computing one of EXP's operands. */
5028 static rtx
5029 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5030 rtx subtarget)
5032 tree arg;
5033 rtx op0;
5035 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5036 return NULL_RTX;
5038 arg = CALL_EXPR_ARG (exp, 0);
5039 op0 = expand_expr (arg,
5040 subtarget && GET_MODE (subtarget) == target_mode
5041 ? subtarget : NULL_RTX,
5042 target_mode, EXPAND_NORMAL);
5043 if (GET_MODE (op0) != target_mode)
5044 op0 = convert_to_mode (target_mode, op0, 1);
5046 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5048 gcc_assert (target);
5050 return convert_to_mode (target_mode, target, 1);
5053 /* Expand a call to a unary builtin in EXP.
5054 Return NULL_RTX if a normal call should be emitted rather than expanding the
5055 function in-line. If convenient, the result should be placed in TARGET.
5056 SUBTARGET may be used as the target for computing one of EXP's operands. */
5058 static rtx
5059 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5060 rtx subtarget, optab op_optab)
5062 rtx op0;
5064 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5065 return NULL_RTX;
5067 /* Compute the argument. */
5068 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5069 (subtarget
5070 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5071 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5072 VOIDmode, EXPAND_NORMAL);
5073 /* Compute op, into TARGET if possible.
5074 Set TARGET to wherever the result comes back. */
5075 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5076 op_optab, op0, target, op_optab != clrsb_optab);
5077 gcc_assert (target);
5079 return convert_to_mode (target_mode, target, 0);
5082 /* Expand a call to __builtin_expect. We just return our argument
5083 as the builtin_expect semantic should've been already executed by
5084 tree branch prediction pass. */
5086 static rtx
5087 expand_builtin_expect (tree exp, rtx target)
5089 tree arg;
5091 if (call_expr_nargs (exp) < 2)
5092 return const0_rtx;
5093 arg = CALL_EXPR_ARG (exp, 0);
5095 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5096 /* When guessing was done, the hints should be already stripped away. */
5097 gcc_assert (!flag_guess_branch_prob
5098 || optimize == 0 || seen_error ());
5099 return target;
5102 /* Expand a call to __builtin_assume_aligned. We just return our first
5103 argument as the builtin_assume_aligned semantic should've been already
5104 executed by CCP. */
5106 static rtx
5107 expand_builtin_assume_aligned (tree exp, rtx target)
5109 if (call_expr_nargs (exp) < 2)
5110 return const0_rtx;
5111 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5112 EXPAND_NORMAL);
5113 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5114 && (call_expr_nargs (exp) < 3
5115 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5116 return target;
5119 void
5120 expand_builtin_trap (void)
5122 if (targetm.have_trap ())
5124 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5125 /* For trap insns when not accumulating outgoing args force
5126 REG_ARGS_SIZE note to prevent crossjumping of calls with
5127 different args sizes. */
5128 if (!ACCUMULATE_OUTGOING_ARGS)
5129 add_args_size_note (insn, stack_pointer_delta);
5131 else
5133 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5134 tree call_expr = build_call_expr (fn, 0);
5135 expand_call (call_expr, NULL_RTX, false);
5138 emit_barrier ();
5141 /* Expand a call to __builtin_unreachable. We do nothing except emit
5142 a barrier saying that control flow will not pass here.
5144 It is the responsibility of the program being compiled to ensure
5145 that control flow does never reach __builtin_unreachable. */
5146 static void
5147 expand_builtin_unreachable (void)
5149 emit_barrier ();
5152 /* Expand EXP, a call to fabs, fabsf or fabsl.
5153 Return NULL_RTX if a normal call should be emitted rather than expanding
5154 the function inline. If convenient, the result should be placed
5155 in TARGET. SUBTARGET may be used as the target for computing
5156 the operand. */
5158 static rtx
5159 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5161 machine_mode mode;
5162 tree arg;
5163 rtx op0;
5165 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5166 return NULL_RTX;
5168 arg = CALL_EXPR_ARG (exp, 0);
5169 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5170 mode = TYPE_MODE (TREE_TYPE (arg));
5171 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5172 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5175 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5176 Return NULL is a normal call should be emitted rather than expanding the
5177 function inline. If convenient, the result should be placed in TARGET.
5178 SUBTARGET may be used as the target for computing the operand. */
5180 static rtx
5181 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5183 rtx op0, op1;
5184 tree arg;
5186 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5187 return NULL_RTX;
5189 arg = CALL_EXPR_ARG (exp, 0);
5190 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5192 arg = CALL_EXPR_ARG (exp, 1);
5193 op1 = expand_normal (arg);
5195 return expand_copysign (op0, op1, target);
5198 /* Expand a call to __builtin___clear_cache. */
5200 static rtx
5201 expand_builtin___clear_cache (tree exp)
5203 if (!targetm.code_for_clear_cache)
5205 #ifdef CLEAR_INSN_CACHE
5206 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5207 does something. Just do the default expansion to a call to
5208 __clear_cache(). */
5209 return NULL_RTX;
5210 #else
5211 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5212 does nothing. There is no need to call it. Do nothing. */
5213 return const0_rtx;
5214 #endif /* CLEAR_INSN_CACHE */
5217 /* We have a "clear_cache" insn, and it will handle everything. */
5218 tree begin, end;
5219 rtx begin_rtx, end_rtx;
5221 /* We must not expand to a library call. If we did, any
5222 fallback library function in libgcc that might contain a call to
5223 __builtin___clear_cache() would recurse infinitely. */
5224 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5226 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5227 return const0_rtx;
5230 if (targetm.have_clear_cache ())
5232 struct expand_operand ops[2];
5234 begin = CALL_EXPR_ARG (exp, 0);
5235 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5237 end = CALL_EXPR_ARG (exp, 1);
5238 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5240 create_address_operand (&ops[0], begin_rtx);
5241 create_address_operand (&ops[1], end_rtx);
5242 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5243 return const0_rtx;
5245 return const0_rtx;
5248 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5250 static rtx
5251 round_trampoline_addr (rtx tramp)
5253 rtx temp, addend, mask;
5255 /* If we don't need too much alignment, we'll have been guaranteed
5256 proper alignment by get_trampoline_type. */
5257 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5258 return tramp;
5260 /* Round address up to desired boundary. */
5261 temp = gen_reg_rtx (Pmode);
5262 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5263 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5265 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5266 temp, 0, OPTAB_LIB_WIDEN);
5267 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5268 temp, 0, OPTAB_LIB_WIDEN);
5270 return tramp;
5273 static rtx
5274 expand_builtin_init_trampoline (tree exp, bool onstack)
5276 tree t_tramp, t_func, t_chain;
5277 rtx m_tramp, r_tramp, r_chain, tmp;
5279 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5280 POINTER_TYPE, VOID_TYPE))
5281 return NULL_RTX;
5283 t_tramp = CALL_EXPR_ARG (exp, 0);
5284 t_func = CALL_EXPR_ARG (exp, 1);
5285 t_chain = CALL_EXPR_ARG (exp, 2);
5287 r_tramp = expand_normal (t_tramp);
5288 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5289 MEM_NOTRAP_P (m_tramp) = 1;
5291 /* If ONSTACK, the TRAMP argument should be the address of a field
5292 within the local function's FRAME decl. Either way, let's see if
5293 we can fill in the MEM_ATTRs for this memory. */
5294 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5295 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5297 /* Creator of a heap trampoline is responsible for making sure the
5298 address is aligned to at least STACK_BOUNDARY. Normally malloc
5299 will ensure this anyhow. */
5300 tmp = round_trampoline_addr (r_tramp);
5301 if (tmp != r_tramp)
5303 m_tramp = change_address (m_tramp, BLKmode, tmp);
5304 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5305 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5308 /* The FUNC argument should be the address of the nested function.
5309 Extract the actual function decl to pass to the hook. */
5310 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5311 t_func = TREE_OPERAND (t_func, 0);
5312 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5314 r_chain = expand_normal (t_chain);
5316 /* Generate insns to initialize the trampoline. */
5317 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5319 if (onstack)
5321 trampolines_created = 1;
5323 if (targetm.calls.custom_function_descriptors != 0)
5324 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5325 "trampoline generated for nested function %qD", t_func);
5328 return const0_rtx;
5331 static rtx
5332 expand_builtin_adjust_trampoline (tree exp)
5334 rtx tramp;
5336 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5337 return NULL_RTX;
5339 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5340 tramp = round_trampoline_addr (tramp);
5341 if (targetm.calls.trampoline_adjust_address)
5342 tramp = targetm.calls.trampoline_adjust_address (tramp);
5344 return tramp;
5347 /* Expand a call to the builtin descriptor initialization routine.
5348 A descriptor is made up of a couple of pointers to the static
5349 chain and the code entry in this order. */
5351 static rtx
5352 expand_builtin_init_descriptor (tree exp)
5354 tree t_descr, t_func, t_chain;
5355 rtx m_descr, r_descr, r_func, r_chain;
5357 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5358 VOID_TYPE))
5359 return NULL_RTX;
5361 t_descr = CALL_EXPR_ARG (exp, 0);
5362 t_func = CALL_EXPR_ARG (exp, 1);
5363 t_chain = CALL_EXPR_ARG (exp, 2);
5365 r_descr = expand_normal (t_descr);
5366 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5367 MEM_NOTRAP_P (m_descr) = 1;
5369 r_func = expand_normal (t_func);
5370 r_chain = expand_normal (t_chain);
5372 /* Generate insns to initialize the descriptor. */
5373 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5374 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5375 POINTER_SIZE / BITS_PER_UNIT), r_func);
5377 return const0_rtx;
5380 /* Expand a call to the builtin descriptor adjustment routine. */
5382 static rtx
5383 expand_builtin_adjust_descriptor (tree exp)
5385 rtx tramp;
5387 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5388 return NULL_RTX;
5390 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5392 /* Unalign the descriptor to allow runtime identification. */
5393 tramp = plus_constant (ptr_mode, tramp,
5394 targetm.calls.custom_function_descriptors);
5396 return force_operand (tramp, NULL_RTX);
5399 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5400 function. The function first checks whether the back end provides
5401 an insn to implement signbit for the respective mode. If not, it
5402 checks whether the floating point format of the value is such that
5403 the sign bit can be extracted. If that is not the case, error out.
5404 EXP is the expression that is a call to the builtin function; if
5405 convenient, the result should be placed in TARGET. */
5406 static rtx
5407 expand_builtin_signbit (tree exp, rtx target)
5409 const struct real_format *fmt;
5410 scalar_float_mode fmode;
5411 scalar_int_mode rmode, imode;
5412 tree arg;
5413 int word, bitpos;
5414 enum insn_code icode;
5415 rtx temp;
5416 location_t loc = EXPR_LOCATION (exp);
5418 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5419 return NULL_RTX;
5421 arg = CALL_EXPR_ARG (exp, 0);
5422 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5423 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5424 fmt = REAL_MODE_FORMAT (fmode);
5426 arg = builtin_save_expr (arg);
5428 /* Expand the argument yielding a RTX expression. */
5429 temp = expand_normal (arg);
5431 /* Check if the back end provides an insn that handles signbit for the
5432 argument's mode. */
5433 icode = optab_handler (signbit_optab, fmode);
5434 if (icode != CODE_FOR_nothing)
5436 rtx_insn *last = get_last_insn ();
5437 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5438 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5439 return target;
5440 delete_insns_since (last);
5443 /* For floating point formats without a sign bit, implement signbit
5444 as "ARG < 0.0". */
5445 bitpos = fmt->signbit_ro;
5446 if (bitpos < 0)
5448 /* But we can't do this if the format supports signed zero. */
5449 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5451 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5452 build_real (TREE_TYPE (arg), dconst0));
5453 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5456 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5458 imode = int_mode_for_mode (fmode).require ();
5459 temp = gen_lowpart (imode, temp);
5461 else
5463 imode = word_mode;
5464 /* Handle targets with different FP word orders. */
5465 if (FLOAT_WORDS_BIG_ENDIAN)
5466 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5467 else
5468 word = bitpos / BITS_PER_WORD;
5469 temp = operand_subword_force (temp, word, fmode);
5470 bitpos = bitpos % BITS_PER_WORD;
5473 /* Force the intermediate word_mode (or narrower) result into a
5474 register. This avoids attempting to create paradoxical SUBREGs
5475 of floating point modes below. */
5476 temp = force_reg (imode, temp);
5478 /* If the bitpos is within the "result mode" lowpart, the operation
5479 can be implement with a single bitwise AND. Otherwise, we need
5480 a right shift and an AND. */
5482 if (bitpos < GET_MODE_BITSIZE (rmode))
5484 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5486 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5487 temp = gen_lowpart (rmode, temp);
5488 temp = expand_binop (rmode, and_optab, temp,
5489 immed_wide_int_const (mask, rmode),
5490 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5492 else
5494 /* Perform a logical right shift to place the signbit in the least
5495 significant bit, then truncate the result to the desired mode
5496 and mask just this bit. */
5497 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5498 temp = gen_lowpart (rmode, temp);
5499 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5500 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5503 return temp;
5506 /* Expand fork or exec calls. TARGET is the desired target of the
5507 call. EXP is the call. FN is the
5508 identificator of the actual function. IGNORE is nonzero if the
5509 value is to be ignored. */
5511 static rtx
5512 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5514 tree id, decl;
5515 tree call;
5517 /* If we are not profiling, just call the function. */
5518 if (!profile_arc_flag)
5519 return NULL_RTX;
5521 /* Otherwise call the wrapper. This should be equivalent for the rest of
5522 compiler, so the code does not diverge, and the wrapper may run the
5523 code necessary for keeping the profiling sane. */
5525 switch (DECL_FUNCTION_CODE (fn))
5527 case BUILT_IN_FORK:
5528 id = get_identifier ("__gcov_fork");
5529 break;
5531 case BUILT_IN_EXECL:
5532 id = get_identifier ("__gcov_execl");
5533 break;
5535 case BUILT_IN_EXECV:
5536 id = get_identifier ("__gcov_execv");
5537 break;
5539 case BUILT_IN_EXECLP:
5540 id = get_identifier ("__gcov_execlp");
5541 break;
5543 case BUILT_IN_EXECLE:
5544 id = get_identifier ("__gcov_execle");
5545 break;
5547 case BUILT_IN_EXECVP:
5548 id = get_identifier ("__gcov_execvp");
5549 break;
5551 case BUILT_IN_EXECVE:
5552 id = get_identifier ("__gcov_execve");
5553 break;
5555 default:
5556 gcc_unreachable ();
5559 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5560 FUNCTION_DECL, id, TREE_TYPE (fn));
5561 DECL_EXTERNAL (decl) = 1;
5562 TREE_PUBLIC (decl) = 1;
5563 DECL_ARTIFICIAL (decl) = 1;
5564 TREE_NOTHROW (decl) = 1;
5565 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5566 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5567 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5568 return expand_call (call, target, ignore);
5573 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5574 the pointer in these functions is void*, the tree optimizers may remove
5575 casts. The mode computed in expand_builtin isn't reliable either, due
5576 to __sync_bool_compare_and_swap.
5578 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5579 group of builtins. This gives us log2 of the mode size. */
5581 static inline machine_mode
5582 get_builtin_sync_mode (int fcode_diff)
5584 /* The size is not negotiable, so ask not to get BLKmode in return
5585 if the target indicates that a smaller size would be better. */
5586 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5589 /* Expand the memory expression LOC and return the appropriate memory operand
5590 for the builtin_sync operations. */
5592 static rtx
5593 get_builtin_sync_mem (tree loc, machine_mode mode)
5595 rtx addr, mem;
5597 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5598 addr = convert_memory_address (Pmode, addr);
5600 /* Note that we explicitly do not want any alias information for this
5601 memory, so that we kill all other live memories. Otherwise we don't
5602 satisfy the full barrier semantics of the intrinsic. */
5603 mem = validize_mem (gen_rtx_MEM (mode, addr));
5605 /* The alignment needs to be at least according to that of the mode. */
5606 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5607 get_pointer_alignment (loc)));
5608 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5609 MEM_VOLATILE_P (mem) = 1;
5611 return mem;
5614 /* Make sure an argument is in the right mode.
5615 EXP is the tree argument.
5616 MODE is the mode it should be in. */
5618 static rtx
5619 expand_expr_force_mode (tree exp, machine_mode mode)
5621 rtx val;
5622 machine_mode old_mode;
5624 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5625 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5626 of CONST_INTs, where we know the old_mode only from the call argument. */
5628 old_mode = GET_MODE (val);
5629 if (old_mode == VOIDmode)
5630 old_mode = TYPE_MODE (TREE_TYPE (exp));
5631 val = convert_modes (mode, old_mode, val, 1);
5632 return val;
5636 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5637 EXP is the CALL_EXPR. CODE is the rtx code
5638 that corresponds to the arithmetic or logical operation from the name;
5639 an exception here is that NOT actually means NAND. TARGET is an optional
5640 place for us to store the results; AFTER is true if this is the
5641 fetch_and_xxx form. */
5643 static rtx
5644 expand_builtin_sync_operation (machine_mode mode, tree exp,
5645 enum rtx_code code, bool after,
5646 rtx target)
5648 rtx val, mem;
5649 location_t loc = EXPR_LOCATION (exp);
5651 if (code == NOT && warn_sync_nand)
5653 tree fndecl = get_callee_fndecl (exp);
5654 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5656 static bool warned_f_a_n, warned_n_a_f;
5658 switch (fcode)
5660 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5661 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5662 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5663 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5664 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5665 if (warned_f_a_n)
5666 break;
5668 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5669 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5670 warned_f_a_n = true;
5671 break;
5673 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5674 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5675 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5676 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5677 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5678 if (warned_n_a_f)
5679 break;
5681 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5682 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5683 warned_n_a_f = true;
5684 break;
5686 default:
5687 gcc_unreachable ();
5691 /* Expand the operands. */
5692 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5693 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5695 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5696 after);
5699 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5700 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5701 true if this is the boolean form. TARGET is a place for us to store the
5702 results; this is NOT optional if IS_BOOL is true. */
5704 static rtx
5705 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5706 bool is_bool, rtx target)
5708 rtx old_val, new_val, mem;
5709 rtx *pbool, *poval;
5711 /* Expand the operands. */
5712 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5713 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5714 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5716 pbool = poval = NULL;
5717 if (target != const0_rtx)
5719 if (is_bool)
5720 pbool = &target;
5721 else
5722 poval = &target;
5724 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5725 false, MEMMODEL_SYNC_SEQ_CST,
5726 MEMMODEL_SYNC_SEQ_CST))
5727 return NULL_RTX;
5729 return target;
5732 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5733 general form is actually an atomic exchange, and some targets only
5734 support a reduced form with the second argument being a constant 1.
5735 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5736 the results. */
5738 static rtx
5739 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5740 rtx target)
5742 rtx val, mem;
5744 /* Expand the operands. */
5745 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5746 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5748 return expand_sync_lock_test_and_set (target, mem, val);
5751 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5753 static void
5754 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5756 rtx mem;
5758 /* Expand the operands. */
5759 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5761 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5764 /* Given an integer representing an ``enum memmodel'', verify its
5765 correctness and return the memory model enum. */
5767 static enum memmodel
5768 get_memmodel (tree exp)
5770 rtx op;
5771 unsigned HOST_WIDE_INT val;
5772 source_location loc
5773 = expansion_point_location_if_in_system_header (input_location);
5775 /* If the parameter is not a constant, it's a run time value so we'll just
5776 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5777 if (TREE_CODE (exp) != INTEGER_CST)
5778 return MEMMODEL_SEQ_CST;
5780 op = expand_normal (exp);
5782 val = INTVAL (op);
5783 if (targetm.memmodel_check)
5784 val = targetm.memmodel_check (val);
5785 else if (val & ~MEMMODEL_MASK)
5787 warning_at (loc, OPT_Winvalid_memory_model,
5788 "unknown architecture specifier in memory model to builtin");
5789 return MEMMODEL_SEQ_CST;
5792 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5793 if (memmodel_base (val) >= MEMMODEL_LAST)
5795 warning_at (loc, OPT_Winvalid_memory_model,
5796 "invalid memory model argument to builtin");
5797 return MEMMODEL_SEQ_CST;
5800 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5801 be conservative and promote consume to acquire. */
5802 if (val == MEMMODEL_CONSUME)
5803 val = MEMMODEL_ACQUIRE;
5805 return (enum memmodel) val;
5808 /* Expand the __atomic_exchange intrinsic:
5809 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5810 EXP is the CALL_EXPR.
5811 TARGET is an optional place for us to store the results. */
5813 static rtx
5814 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5816 rtx val, mem;
5817 enum memmodel model;
5819 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5821 if (!flag_inline_atomics)
5822 return NULL_RTX;
5824 /* Expand the operands. */
5825 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5826 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5828 return expand_atomic_exchange (target, mem, val, model);
5831 /* Expand the __atomic_compare_exchange intrinsic:
5832 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5833 TYPE desired, BOOL weak,
5834 enum memmodel success,
5835 enum memmodel failure)
5836 EXP is the CALL_EXPR.
5837 TARGET is an optional place for us to store the results. */
5839 static rtx
5840 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5841 rtx target)
5843 rtx expect, desired, mem, oldval;
5844 rtx_code_label *label;
5845 enum memmodel success, failure;
5846 tree weak;
5847 bool is_weak;
5848 source_location loc
5849 = expansion_point_location_if_in_system_header (input_location);
5851 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5852 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5854 if (failure > success)
5856 warning_at (loc, OPT_Winvalid_memory_model,
5857 "failure memory model cannot be stronger than success "
5858 "memory model for %<__atomic_compare_exchange%>");
5859 success = MEMMODEL_SEQ_CST;
5862 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5864 warning_at (loc, OPT_Winvalid_memory_model,
5865 "invalid failure memory model for "
5866 "%<__atomic_compare_exchange%>");
5867 failure = MEMMODEL_SEQ_CST;
5868 success = MEMMODEL_SEQ_CST;
5872 if (!flag_inline_atomics)
5873 return NULL_RTX;
5875 /* Expand the operands. */
5876 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5878 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5879 expect = convert_memory_address (Pmode, expect);
5880 expect = gen_rtx_MEM (mode, expect);
5881 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5883 weak = CALL_EXPR_ARG (exp, 3);
5884 is_weak = false;
5885 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5886 is_weak = true;
5888 if (target == const0_rtx)
5889 target = NULL;
5891 /* Lest the rtl backend create a race condition with an imporoper store
5892 to memory, always create a new pseudo for OLDVAL. */
5893 oldval = NULL;
5895 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5896 is_weak, success, failure))
5897 return NULL_RTX;
5899 /* Conditionally store back to EXPECT, lest we create a race condition
5900 with an improper store to memory. */
5901 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5902 the normal case where EXPECT is totally private, i.e. a register. At
5903 which point the store can be unconditional. */
5904 label = gen_label_rtx ();
5905 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5906 GET_MODE (target), 1, label);
5907 emit_move_insn (expect, oldval);
5908 emit_label (label);
5910 return target;
5913 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5914 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5915 call. The weak parameter must be dropped to match the expected parameter
5916 list and the expected argument changed from value to pointer to memory
5917 slot. */
5919 static void
5920 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5922 unsigned int z;
5923 vec<tree, va_gc> *vec;
5925 vec_alloc (vec, 5);
5926 vec->quick_push (gimple_call_arg (call, 0));
5927 tree expected = gimple_call_arg (call, 1);
5928 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5929 TREE_TYPE (expected));
5930 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5931 if (expd != x)
5932 emit_move_insn (x, expd);
5933 tree v = make_tree (TREE_TYPE (expected), x);
5934 vec->quick_push (build1 (ADDR_EXPR,
5935 build_pointer_type (TREE_TYPE (expected)), v));
5936 vec->quick_push (gimple_call_arg (call, 2));
5937 /* Skip the boolean weak parameter. */
5938 for (z = 4; z < 6; z++)
5939 vec->quick_push (gimple_call_arg (call, z));
5940 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
5941 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
5942 gcc_assert (bytes_log2 < 5);
5943 built_in_function fncode
5944 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5945 + bytes_log2);
5946 tree fndecl = builtin_decl_explicit (fncode);
5947 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
5948 fndecl);
5949 tree exp = build_call_vec (boolean_type_node, fn, vec);
5950 tree lhs = gimple_call_lhs (call);
5951 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
5952 if (lhs)
5954 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5955 if (GET_MODE (boolret) != mode)
5956 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5957 x = force_reg (mode, x);
5958 write_complex_part (target, boolret, true);
5959 write_complex_part (target, x, false);
5963 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
5965 void
5966 expand_ifn_atomic_compare_exchange (gcall *call)
5968 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
5969 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
5970 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
5971 rtx expect, desired, mem, oldval, boolret;
5972 enum memmodel success, failure;
5973 tree lhs;
5974 bool is_weak;
5975 source_location loc
5976 = expansion_point_location_if_in_system_header (gimple_location (call));
5978 success = get_memmodel (gimple_call_arg (call, 4));
5979 failure = get_memmodel (gimple_call_arg (call, 5));
5981 if (failure > success)
5983 warning_at (loc, OPT_Winvalid_memory_model,
5984 "failure memory model cannot be stronger than success "
5985 "memory model for %<__atomic_compare_exchange%>");
5986 success = MEMMODEL_SEQ_CST;
5989 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5991 warning_at (loc, OPT_Winvalid_memory_model,
5992 "invalid failure memory model for "
5993 "%<__atomic_compare_exchange%>");
5994 failure = MEMMODEL_SEQ_CST;
5995 success = MEMMODEL_SEQ_CST;
5998 if (!flag_inline_atomics)
6000 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6001 return;
6004 /* Expand the operands. */
6005 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6007 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6008 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6010 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6012 boolret = NULL;
6013 oldval = NULL;
6015 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6016 is_weak, success, failure))
6018 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6019 return;
6022 lhs = gimple_call_lhs (call);
6023 if (lhs)
6025 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6026 if (GET_MODE (boolret) != mode)
6027 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6028 write_complex_part (target, boolret, true);
6029 write_complex_part (target, oldval, false);
6033 /* Expand the __atomic_load intrinsic:
6034 TYPE __atomic_load (TYPE *object, enum memmodel)
6035 EXP is the CALL_EXPR.
6036 TARGET is an optional place for us to store the results. */
6038 static rtx
6039 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6041 rtx mem;
6042 enum memmodel model;
6044 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6045 if (is_mm_release (model) || is_mm_acq_rel (model))
6047 source_location loc
6048 = expansion_point_location_if_in_system_header (input_location);
6049 warning_at (loc, OPT_Winvalid_memory_model,
6050 "invalid memory model for %<__atomic_load%>");
6051 model = MEMMODEL_SEQ_CST;
6054 if (!flag_inline_atomics)
6055 return NULL_RTX;
6057 /* Expand the operand. */
6058 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6060 return expand_atomic_load (target, mem, model);
6064 /* Expand the __atomic_store intrinsic:
6065 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6066 EXP is the CALL_EXPR.
6067 TARGET is an optional place for us to store the results. */
6069 static rtx
6070 expand_builtin_atomic_store (machine_mode mode, tree exp)
6072 rtx mem, val;
6073 enum memmodel model;
6075 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6076 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6077 || is_mm_release (model)))
6079 source_location loc
6080 = expansion_point_location_if_in_system_header (input_location);
6081 warning_at (loc, OPT_Winvalid_memory_model,
6082 "invalid memory model for %<__atomic_store%>");
6083 model = MEMMODEL_SEQ_CST;
6086 if (!flag_inline_atomics)
6087 return NULL_RTX;
6089 /* Expand the operands. */
6090 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6091 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6093 return expand_atomic_store (mem, val, model, false);
6096 /* Expand the __atomic_fetch_XXX intrinsic:
6097 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6098 EXP is the CALL_EXPR.
6099 TARGET is an optional place for us to store the results.
6100 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6101 FETCH_AFTER is true if returning the result of the operation.
6102 FETCH_AFTER is false if returning the value before the operation.
6103 IGNORE is true if the result is not used.
6104 EXT_CALL is the correct builtin for an external call if this cannot be
6105 resolved to an instruction sequence. */
6107 static rtx
6108 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6109 enum rtx_code code, bool fetch_after,
6110 bool ignore, enum built_in_function ext_call)
6112 rtx val, mem, ret;
6113 enum memmodel model;
6114 tree fndecl;
6115 tree addr;
6117 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6119 /* Expand the operands. */
6120 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6121 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6123 /* Only try generating instructions if inlining is turned on. */
6124 if (flag_inline_atomics)
6126 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6127 if (ret)
6128 return ret;
6131 /* Return if a different routine isn't needed for the library call. */
6132 if (ext_call == BUILT_IN_NONE)
6133 return NULL_RTX;
6135 /* Change the call to the specified function. */
6136 fndecl = get_callee_fndecl (exp);
6137 addr = CALL_EXPR_FN (exp);
6138 STRIP_NOPS (addr);
6140 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6141 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6143 /* If we will emit code after the call, the call can not be a tail call.
6144 If it is emitted as a tail call, a barrier is emitted after it, and
6145 then all trailing code is removed. */
6146 if (!ignore)
6147 CALL_EXPR_TAILCALL (exp) = 0;
6149 /* Expand the call here so we can emit trailing code. */
6150 ret = expand_call (exp, target, ignore);
6152 /* Replace the original function just in case it matters. */
6153 TREE_OPERAND (addr, 0) = fndecl;
6155 /* Then issue the arithmetic correction to return the right result. */
6156 if (!ignore)
6158 if (code == NOT)
6160 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6161 OPTAB_LIB_WIDEN);
6162 ret = expand_simple_unop (mode, NOT, ret, target, true);
6164 else
6165 ret = expand_simple_binop (mode, code, ret, val, target, true,
6166 OPTAB_LIB_WIDEN);
6168 return ret;
6171 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6173 void
6174 expand_ifn_atomic_bit_test_and (gcall *call)
6176 tree ptr = gimple_call_arg (call, 0);
6177 tree bit = gimple_call_arg (call, 1);
6178 tree flag = gimple_call_arg (call, 2);
6179 tree lhs = gimple_call_lhs (call);
6180 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6181 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6182 enum rtx_code code;
6183 optab optab;
6184 struct expand_operand ops[5];
6186 gcc_assert (flag_inline_atomics);
6188 if (gimple_call_num_args (call) == 4)
6189 model = get_memmodel (gimple_call_arg (call, 3));
6191 rtx mem = get_builtin_sync_mem (ptr, mode);
6192 rtx val = expand_expr_force_mode (bit, mode);
6194 switch (gimple_call_internal_fn (call))
6196 case IFN_ATOMIC_BIT_TEST_AND_SET:
6197 code = IOR;
6198 optab = atomic_bit_test_and_set_optab;
6199 break;
6200 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6201 code = XOR;
6202 optab = atomic_bit_test_and_complement_optab;
6203 break;
6204 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6205 code = AND;
6206 optab = atomic_bit_test_and_reset_optab;
6207 break;
6208 default:
6209 gcc_unreachable ();
6212 if (lhs == NULL_TREE)
6214 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6215 val, NULL_RTX, true, OPTAB_DIRECT);
6216 if (code == AND)
6217 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6218 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6219 return;
6222 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6223 enum insn_code icode = direct_optab_handler (optab, mode);
6224 gcc_assert (icode != CODE_FOR_nothing);
6225 create_output_operand (&ops[0], target, mode);
6226 create_fixed_operand (&ops[1], mem);
6227 create_convert_operand_to (&ops[2], val, mode, true);
6228 create_integer_operand (&ops[3], model);
6229 create_integer_operand (&ops[4], integer_onep (flag));
6230 if (maybe_expand_insn (icode, 5, ops))
6231 return;
6233 rtx bitval = val;
6234 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6235 val, NULL_RTX, true, OPTAB_DIRECT);
6236 rtx maskval = val;
6237 if (code == AND)
6238 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6239 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6240 code, model, false);
6241 if (integer_onep (flag))
6243 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6244 NULL_RTX, true, OPTAB_DIRECT);
6245 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6246 true, OPTAB_DIRECT);
6248 else
6249 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6250 OPTAB_DIRECT);
6251 if (result != target)
6252 emit_move_insn (target, result);
6255 /* Expand an atomic clear operation.
6256 void _atomic_clear (BOOL *obj, enum memmodel)
6257 EXP is the call expression. */
6259 static rtx
6260 expand_builtin_atomic_clear (tree exp)
6262 machine_mode mode;
6263 rtx mem, ret;
6264 enum memmodel model;
6266 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6267 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6268 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6270 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6272 source_location loc
6273 = expansion_point_location_if_in_system_header (input_location);
6274 warning_at (loc, OPT_Winvalid_memory_model,
6275 "invalid memory model for %<__atomic_store%>");
6276 model = MEMMODEL_SEQ_CST;
6279 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6280 Failing that, a store is issued by __atomic_store. The only way this can
6281 fail is if the bool type is larger than a word size. Unlikely, but
6282 handle it anyway for completeness. Assume a single threaded model since
6283 there is no atomic support in this case, and no barriers are required. */
6284 ret = expand_atomic_store (mem, const0_rtx, model, true);
6285 if (!ret)
6286 emit_move_insn (mem, const0_rtx);
6287 return const0_rtx;
6290 /* Expand an atomic test_and_set operation.
6291 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6292 EXP is the call expression. */
6294 static rtx
6295 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6297 rtx mem;
6298 enum memmodel model;
6299 machine_mode mode;
6301 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6302 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6303 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6305 return expand_atomic_test_and_set (target, mem, model);
6309 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6310 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6312 static tree
6313 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6315 int size;
6316 machine_mode mode;
6317 unsigned int mode_align, type_align;
6319 if (TREE_CODE (arg0) != INTEGER_CST)
6320 return NULL_TREE;
6322 /* We need a corresponding integer mode for the access to be lock-free. */
6323 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6324 if (!int_mode_for_size (size, 0).exists (&mode))
6325 return boolean_false_node;
6327 mode_align = GET_MODE_ALIGNMENT (mode);
6329 if (TREE_CODE (arg1) == INTEGER_CST)
6331 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6333 /* Either this argument is null, or it's a fake pointer encoding
6334 the alignment of the object. */
6335 val = least_bit_hwi (val);
6336 val *= BITS_PER_UNIT;
6338 if (val == 0 || mode_align < val)
6339 type_align = mode_align;
6340 else
6341 type_align = val;
6343 else
6345 tree ttype = TREE_TYPE (arg1);
6347 /* This function is usually invoked and folded immediately by the front
6348 end before anything else has a chance to look at it. The pointer
6349 parameter at this point is usually cast to a void *, so check for that
6350 and look past the cast. */
6351 if (CONVERT_EXPR_P (arg1)
6352 && POINTER_TYPE_P (ttype)
6353 && VOID_TYPE_P (TREE_TYPE (ttype))
6354 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6355 arg1 = TREE_OPERAND (arg1, 0);
6357 ttype = TREE_TYPE (arg1);
6358 gcc_assert (POINTER_TYPE_P (ttype));
6360 /* Get the underlying type of the object. */
6361 ttype = TREE_TYPE (ttype);
6362 type_align = TYPE_ALIGN (ttype);
6365 /* If the object has smaller alignment, the lock free routines cannot
6366 be used. */
6367 if (type_align < mode_align)
6368 return boolean_false_node;
6370 /* Check if a compare_and_swap pattern exists for the mode which represents
6371 the required size. The pattern is not allowed to fail, so the existence
6372 of the pattern indicates support is present. Also require that an
6373 atomic load exists for the required size. */
6374 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6375 return boolean_true_node;
6376 else
6377 return boolean_false_node;
6380 /* Return true if the parameters to call EXP represent an object which will
6381 always generate lock free instructions. The first argument represents the
6382 size of the object, and the second parameter is a pointer to the object
6383 itself. If NULL is passed for the object, then the result is based on
6384 typical alignment for an object of the specified size. Otherwise return
6385 false. */
6387 static rtx
6388 expand_builtin_atomic_always_lock_free (tree exp)
6390 tree size;
6391 tree arg0 = CALL_EXPR_ARG (exp, 0);
6392 tree arg1 = CALL_EXPR_ARG (exp, 1);
6394 if (TREE_CODE (arg0) != INTEGER_CST)
6396 error ("non-constant argument 1 to __atomic_always_lock_free");
6397 return const0_rtx;
6400 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6401 if (size == boolean_true_node)
6402 return const1_rtx;
6403 return const0_rtx;
6406 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6407 is lock free on this architecture. */
6409 static tree
6410 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6412 if (!flag_inline_atomics)
6413 return NULL_TREE;
6415 /* If it isn't always lock free, don't generate a result. */
6416 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6417 return boolean_true_node;
6419 return NULL_TREE;
6422 /* Return true if the parameters to call EXP represent an object which will
6423 always generate lock free instructions. The first argument represents the
6424 size of the object, and the second parameter is a pointer to the object
6425 itself. If NULL is passed for the object, then the result is based on
6426 typical alignment for an object of the specified size. Otherwise return
6427 NULL*/
6429 static rtx
6430 expand_builtin_atomic_is_lock_free (tree exp)
6432 tree size;
6433 tree arg0 = CALL_EXPR_ARG (exp, 0);
6434 tree arg1 = CALL_EXPR_ARG (exp, 1);
6436 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6438 error ("non-integer argument 1 to __atomic_is_lock_free");
6439 return NULL_RTX;
6442 if (!flag_inline_atomics)
6443 return NULL_RTX;
6445 /* If the value is known at compile time, return the RTX for it. */
6446 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6447 if (size == boolean_true_node)
6448 return const1_rtx;
6450 return NULL_RTX;
6453 /* Expand the __atomic_thread_fence intrinsic:
6454 void __atomic_thread_fence (enum memmodel)
6455 EXP is the CALL_EXPR. */
6457 static void
6458 expand_builtin_atomic_thread_fence (tree exp)
6460 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6461 expand_mem_thread_fence (model);
6464 /* Expand the __atomic_signal_fence intrinsic:
6465 void __atomic_signal_fence (enum memmodel)
6466 EXP is the CALL_EXPR. */
6468 static void
6469 expand_builtin_atomic_signal_fence (tree exp)
6471 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6472 expand_mem_signal_fence (model);
6475 /* Expand the __sync_synchronize intrinsic. */
6477 static void
6478 expand_builtin_sync_synchronize (void)
6480 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6483 static rtx
6484 expand_builtin_thread_pointer (tree exp, rtx target)
6486 enum insn_code icode;
6487 if (!validate_arglist (exp, VOID_TYPE))
6488 return const0_rtx;
6489 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6490 if (icode != CODE_FOR_nothing)
6492 struct expand_operand op;
6493 /* If the target is not sutitable then create a new target. */
6494 if (target == NULL_RTX
6495 || !REG_P (target)
6496 || GET_MODE (target) != Pmode)
6497 target = gen_reg_rtx (Pmode);
6498 create_output_operand (&op, target, Pmode);
6499 expand_insn (icode, 1, &op);
6500 return target;
6502 error ("__builtin_thread_pointer is not supported on this target");
6503 return const0_rtx;
6506 static void
6507 expand_builtin_set_thread_pointer (tree exp)
6509 enum insn_code icode;
6510 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6511 return;
6512 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6513 if (icode != CODE_FOR_nothing)
6515 struct expand_operand op;
6516 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6517 Pmode, EXPAND_NORMAL);
6518 create_input_operand (&op, val, Pmode);
6519 expand_insn (icode, 1, &op);
6520 return;
6522 error ("__builtin_set_thread_pointer is not supported on this target");
6526 /* Emit code to restore the current value of stack. */
6528 static void
6529 expand_stack_restore (tree var)
6531 rtx_insn *prev;
6532 rtx sa = expand_normal (var);
6534 sa = convert_memory_address (Pmode, sa);
6536 prev = get_last_insn ();
6537 emit_stack_restore (SAVE_BLOCK, sa);
6539 record_new_stack_level ();
6541 fixup_args_size_notes (prev, get_last_insn (), 0);
6544 /* Emit code to save the current value of stack. */
6546 static rtx
6547 expand_stack_save (void)
6549 rtx ret = NULL_RTX;
6551 emit_stack_save (SAVE_BLOCK, &ret);
6552 return ret;
6555 /* Emit code to get the openacc gang, worker or vector id or size. */
6557 static rtx
6558 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
6560 const char *name;
6561 rtx fallback_retval;
6562 rtx_insn *(*gen_fn) (rtx, rtx);
6563 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
6565 case BUILT_IN_GOACC_PARLEVEL_ID:
6566 name = "__builtin_goacc_parlevel_id";
6567 fallback_retval = const0_rtx;
6568 gen_fn = targetm.gen_oacc_dim_pos;
6569 break;
6570 case BUILT_IN_GOACC_PARLEVEL_SIZE:
6571 name = "__builtin_goacc_parlevel_size";
6572 fallback_retval = const1_rtx;
6573 gen_fn = targetm.gen_oacc_dim_size;
6574 break;
6575 default:
6576 gcc_unreachable ();
6579 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
6581 error ("%qs only supported in OpenACC code", name);
6582 return const0_rtx;
6585 tree arg = CALL_EXPR_ARG (exp, 0);
6586 if (TREE_CODE (arg) != INTEGER_CST)
6588 error ("non-constant argument 0 to %qs", name);
6589 return const0_rtx;
6592 int dim = TREE_INT_CST_LOW (arg);
6593 switch (dim)
6595 case GOMP_DIM_GANG:
6596 case GOMP_DIM_WORKER:
6597 case GOMP_DIM_VECTOR:
6598 break;
6599 default:
6600 error ("illegal argument 0 to %qs", name);
6601 return const0_rtx;
6604 if (ignore)
6605 return target;
6607 if (target == NULL_RTX)
6608 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6610 if (!targetm.have_oacc_dim_size ())
6612 emit_move_insn (target, fallback_retval);
6613 return target;
6616 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
6617 emit_insn (gen_fn (reg, GEN_INT (dim)));
6618 if (reg != target)
6619 emit_move_insn (target, reg);
6621 return target;
6624 /* Expand an expression EXP that calls a built-in function,
6625 with result going to TARGET if that's convenient
6626 (and in mode MODE if that's convenient).
6627 SUBTARGET may be used as the target for computing one of EXP's operands.
6628 IGNORE is nonzero if the value is to be ignored. */
6631 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
6632 int ignore)
6634 tree fndecl = get_callee_fndecl (exp);
6635 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6636 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6637 int flags;
6639 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6640 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6642 /* When ASan is enabled, we don't want to expand some memory/string
6643 builtins and rely on libsanitizer's hooks. This allows us to avoid
6644 redundant checks and be sure, that possible overflow will be detected
6645 by ASan. */
6647 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
6648 return expand_call (exp, target, ignore);
6650 /* When not optimizing, generate calls to library functions for a certain
6651 set of builtins. */
6652 if (!optimize
6653 && !called_as_built_in (fndecl)
6654 && fcode != BUILT_IN_FORK
6655 && fcode != BUILT_IN_EXECL
6656 && fcode != BUILT_IN_EXECV
6657 && fcode != BUILT_IN_EXECLP
6658 && fcode != BUILT_IN_EXECLE
6659 && fcode != BUILT_IN_EXECVP
6660 && fcode != BUILT_IN_EXECVE
6661 && !ALLOCA_FUNCTION_CODE_P (fcode)
6662 && fcode != BUILT_IN_FREE)
6663 return expand_call (exp, target, ignore);
6665 /* The built-in function expanders test for target == const0_rtx
6666 to determine whether the function's result will be ignored. */
6667 if (ignore)
6668 target = const0_rtx;
6670 /* If the result of a pure or const built-in function is ignored, and
6671 none of its arguments are volatile, we can avoid expanding the
6672 built-in call and just evaluate the arguments for side-effects. */
6673 if (target == const0_rtx
6674 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6675 && !(flags & ECF_LOOPING_CONST_OR_PURE))
6677 bool volatilep = false;
6678 tree arg;
6679 call_expr_arg_iterator iter;
6681 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6682 if (TREE_THIS_VOLATILE (arg))
6684 volatilep = true;
6685 break;
6688 if (! volatilep)
6690 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6691 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6692 return const0_rtx;
6696 switch (fcode)
6698 CASE_FLT_FN (BUILT_IN_FABS):
6699 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
6700 case BUILT_IN_FABSD32:
6701 case BUILT_IN_FABSD64:
6702 case BUILT_IN_FABSD128:
6703 target = expand_builtin_fabs (exp, target, subtarget);
6704 if (target)
6705 return target;
6706 break;
6708 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6709 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
6710 target = expand_builtin_copysign (exp, target, subtarget);
6711 if (target)
6712 return target;
6713 break;
6715 /* Just do a normal library call if we were unable to fold
6716 the values. */
6717 CASE_FLT_FN (BUILT_IN_CABS):
6718 break;
6720 CASE_FLT_FN (BUILT_IN_FMA):
6721 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
6722 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6723 if (target)
6724 return target;
6725 break;
6727 CASE_FLT_FN (BUILT_IN_ILOGB):
6728 if (! flag_unsafe_math_optimizations)
6729 break;
6730 gcc_fallthrough ();
6731 CASE_FLT_FN (BUILT_IN_ISINF):
6732 CASE_FLT_FN (BUILT_IN_FINITE):
6733 case BUILT_IN_ISFINITE:
6734 case BUILT_IN_ISNORMAL:
6735 target = expand_builtin_interclass_mathfn (exp, target);
6736 if (target)
6737 return target;
6738 break;
6740 CASE_FLT_FN (BUILT_IN_ICEIL):
6741 CASE_FLT_FN (BUILT_IN_LCEIL):
6742 CASE_FLT_FN (BUILT_IN_LLCEIL):
6743 CASE_FLT_FN (BUILT_IN_LFLOOR):
6744 CASE_FLT_FN (BUILT_IN_IFLOOR):
6745 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6746 target = expand_builtin_int_roundingfn (exp, target);
6747 if (target)
6748 return target;
6749 break;
6751 CASE_FLT_FN (BUILT_IN_IRINT):
6752 CASE_FLT_FN (BUILT_IN_LRINT):
6753 CASE_FLT_FN (BUILT_IN_LLRINT):
6754 CASE_FLT_FN (BUILT_IN_IROUND):
6755 CASE_FLT_FN (BUILT_IN_LROUND):
6756 CASE_FLT_FN (BUILT_IN_LLROUND):
6757 target = expand_builtin_int_roundingfn_2 (exp, target);
6758 if (target)
6759 return target;
6760 break;
6762 CASE_FLT_FN (BUILT_IN_POWI):
6763 target = expand_builtin_powi (exp, target);
6764 if (target)
6765 return target;
6766 break;
6768 CASE_FLT_FN (BUILT_IN_CEXPI):
6769 target = expand_builtin_cexpi (exp, target);
6770 gcc_assert (target);
6771 return target;
6773 CASE_FLT_FN (BUILT_IN_SIN):
6774 CASE_FLT_FN (BUILT_IN_COS):
6775 if (! flag_unsafe_math_optimizations)
6776 break;
6777 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6778 if (target)
6779 return target;
6780 break;
6782 CASE_FLT_FN (BUILT_IN_SINCOS):
6783 if (! flag_unsafe_math_optimizations)
6784 break;
6785 target = expand_builtin_sincos (exp);
6786 if (target)
6787 return target;
6788 break;
6790 case BUILT_IN_APPLY_ARGS:
6791 return expand_builtin_apply_args ();
6793 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6794 FUNCTION with a copy of the parameters described by
6795 ARGUMENTS, and ARGSIZE. It returns a block of memory
6796 allocated on the stack into which is stored all the registers
6797 that might possibly be used for returning the result of a
6798 function. ARGUMENTS is the value returned by
6799 __builtin_apply_args. ARGSIZE is the number of bytes of
6800 arguments that must be copied. ??? How should this value be
6801 computed? We'll also need a safe worst case value for varargs
6802 functions. */
6803 case BUILT_IN_APPLY:
6804 if (!validate_arglist (exp, POINTER_TYPE,
6805 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6806 && !validate_arglist (exp, REFERENCE_TYPE,
6807 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6808 return const0_rtx;
6809 else
6811 rtx ops[3];
6813 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6814 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6815 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6817 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6820 /* __builtin_return (RESULT) causes the function to return the
6821 value described by RESULT. RESULT is address of the block of
6822 memory returned by __builtin_apply. */
6823 case BUILT_IN_RETURN:
6824 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6825 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6826 return const0_rtx;
6828 case BUILT_IN_SAVEREGS:
6829 return expand_builtin_saveregs ();
6831 case BUILT_IN_VA_ARG_PACK:
6832 /* All valid uses of __builtin_va_arg_pack () are removed during
6833 inlining. */
6834 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6835 return const0_rtx;
6837 case BUILT_IN_VA_ARG_PACK_LEN:
6838 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6839 inlining. */
6840 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6841 return const0_rtx;
6843 /* Return the address of the first anonymous stack arg. */
6844 case BUILT_IN_NEXT_ARG:
6845 if (fold_builtin_next_arg (exp, false))
6846 return const0_rtx;
6847 return expand_builtin_next_arg ();
6849 case BUILT_IN_CLEAR_CACHE:
6850 target = expand_builtin___clear_cache (exp);
6851 if (target)
6852 return target;
6853 break;
6855 case BUILT_IN_CLASSIFY_TYPE:
6856 return expand_builtin_classify_type (exp);
6858 case BUILT_IN_CONSTANT_P:
6859 return const0_rtx;
6861 case BUILT_IN_FRAME_ADDRESS:
6862 case BUILT_IN_RETURN_ADDRESS:
6863 return expand_builtin_frame_address (fndecl, exp);
6865 /* Returns the address of the area where the structure is returned.
6866 0 otherwise. */
6867 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6868 if (call_expr_nargs (exp) != 0
6869 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6870 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6871 return const0_rtx;
6872 else
6873 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6875 CASE_BUILT_IN_ALLOCA:
6876 target = expand_builtin_alloca (exp);
6877 if (target)
6878 return target;
6879 break;
6881 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
6882 return expand_asan_emit_allocas_unpoison (exp);
6884 case BUILT_IN_STACK_SAVE:
6885 return expand_stack_save ();
6887 case BUILT_IN_STACK_RESTORE:
6888 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6889 return const0_rtx;
6891 case BUILT_IN_BSWAP16:
6892 case BUILT_IN_BSWAP32:
6893 case BUILT_IN_BSWAP64:
6894 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6895 if (target)
6896 return target;
6897 break;
6899 CASE_INT_FN (BUILT_IN_FFS):
6900 target = expand_builtin_unop (target_mode, exp, target,
6901 subtarget, ffs_optab);
6902 if (target)
6903 return target;
6904 break;
6906 CASE_INT_FN (BUILT_IN_CLZ):
6907 target = expand_builtin_unop (target_mode, exp, target,
6908 subtarget, clz_optab);
6909 if (target)
6910 return target;
6911 break;
6913 CASE_INT_FN (BUILT_IN_CTZ):
6914 target = expand_builtin_unop (target_mode, exp, target,
6915 subtarget, ctz_optab);
6916 if (target)
6917 return target;
6918 break;
6920 CASE_INT_FN (BUILT_IN_CLRSB):
6921 target = expand_builtin_unop (target_mode, exp, target,
6922 subtarget, clrsb_optab);
6923 if (target)
6924 return target;
6925 break;
6927 CASE_INT_FN (BUILT_IN_POPCOUNT):
6928 target = expand_builtin_unop (target_mode, exp, target,
6929 subtarget, popcount_optab);
6930 if (target)
6931 return target;
6932 break;
6934 CASE_INT_FN (BUILT_IN_PARITY):
6935 target = expand_builtin_unop (target_mode, exp, target,
6936 subtarget, parity_optab);
6937 if (target)
6938 return target;
6939 break;
6941 case BUILT_IN_STRLEN:
6942 target = expand_builtin_strlen (exp, target, target_mode);
6943 if (target)
6944 return target;
6945 break;
6947 case BUILT_IN_STRCAT:
6948 target = expand_builtin_strcat (exp, target);
6949 if (target)
6950 return target;
6951 break;
6953 case BUILT_IN_STRCPY:
6954 target = expand_builtin_strcpy (exp, target);
6955 if (target)
6956 return target;
6957 break;
6959 case BUILT_IN_STRNCAT:
6960 target = expand_builtin_strncat (exp, target);
6961 if (target)
6962 return target;
6963 break;
6965 case BUILT_IN_STRNCPY:
6966 target = expand_builtin_strncpy (exp, target);
6967 if (target)
6968 return target;
6969 break;
6971 case BUILT_IN_STPCPY:
6972 target = expand_builtin_stpcpy (exp, target, mode);
6973 if (target)
6974 return target;
6975 break;
6977 case BUILT_IN_STPNCPY:
6978 target = expand_builtin_stpncpy (exp, target);
6979 if (target)
6980 return target;
6981 break;
6983 case BUILT_IN_MEMCHR:
6984 target = expand_builtin_memchr (exp, target);
6985 if (target)
6986 return target;
6987 break;
6989 case BUILT_IN_MEMCPY:
6990 target = expand_builtin_memcpy (exp, target);
6991 if (target)
6992 return target;
6993 break;
6995 case BUILT_IN_MEMMOVE:
6996 target = expand_builtin_memmove (exp, target);
6997 if (target)
6998 return target;
6999 break;
7001 case BUILT_IN_MEMPCPY:
7002 target = expand_builtin_mempcpy (exp, target);
7003 if (target)
7004 return target;
7005 break;
7007 case BUILT_IN_MEMSET:
7008 target = expand_builtin_memset (exp, target, mode);
7009 if (target)
7010 return target;
7011 break;
7013 case BUILT_IN_BZERO:
7014 target = expand_builtin_bzero (exp);
7015 if (target)
7016 return target;
7017 break;
7019 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7020 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
7021 when changing it to a strcmp call. */
7022 case BUILT_IN_STRCMP_EQ:
7023 target = expand_builtin_memcmp (exp, target, true);
7024 if (target)
7025 return target;
7027 /* Change this call back to a BUILT_IN_STRCMP. */
7028 TREE_OPERAND (exp, 1)
7029 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7031 /* Delete the last parameter. */
7032 unsigned int i;
7033 vec<tree, va_gc> *arg_vec;
7034 vec_alloc (arg_vec, 2);
7035 for (i = 0; i < 2; i++)
7036 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7037 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7038 /* FALLTHROUGH */
7040 case BUILT_IN_STRCMP:
7041 target = expand_builtin_strcmp (exp, target);
7042 if (target)
7043 return target;
7044 break;
7046 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7047 back to a BUILT_IN_STRNCMP. */
7048 case BUILT_IN_STRNCMP_EQ:
7049 target = expand_builtin_memcmp (exp, target, true);
7050 if (target)
7051 return target;
7053 /* Change it back to a BUILT_IN_STRNCMP. */
7054 TREE_OPERAND (exp, 1)
7055 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7056 /* FALLTHROUGH */
7058 case BUILT_IN_STRNCMP:
7059 target = expand_builtin_strncmp (exp, target, mode);
7060 if (target)
7061 return target;
7062 break;
7064 case BUILT_IN_BCMP:
7065 case BUILT_IN_MEMCMP:
7066 case BUILT_IN_MEMCMP_EQ:
7067 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7068 if (target)
7069 return target;
7070 if (fcode == BUILT_IN_MEMCMP_EQ)
7072 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7073 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7075 break;
7077 case BUILT_IN_SETJMP:
7078 /* This should have been lowered to the builtins below. */
7079 gcc_unreachable ();
7081 case BUILT_IN_SETJMP_SETUP:
7082 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7083 and the receiver label. */
7084 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7086 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7087 VOIDmode, EXPAND_NORMAL);
7088 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7089 rtx_insn *label_r = label_rtx (label);
7091 /* This is copied from the handling of non-local gotos. */
7092 expand_builtin_setjmp_setup (buf_addr, label_r);
7093 nonlocal_goto_handler_labels
7094 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7095 nonlocal_goto_handler_labels);
7096 /* ??? Do not let expand_label treat us as such since we would
7097 not want to be both on the list of non-local labels and on
7098 the list of forced labels. */
7099 FORCED_LABEL (label) = 0;
7100 return const0_rtx;
7102 break;
7104 case BUILT_IN_SETJMP_RECEIVER:
7105 /* __builtin_setjmp_receiver is passed the receiver label. */
7106 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7108 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7109 rtx_insn *label_r = label_rtx (label);
7111 expand_builtin_setjmp_receiver (label_r);
7112 return const0_rtx;
7114 break;
7116 /* __builtin_longjmp is passed a pointer to an array of five words.
7117 It's similar to the C library longjmp function but works with
7118 __builtin_setjmp above. */
7119 case BUILT_IN_LONGJMP:
7120 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7122 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7123 VOIDmode, EXPAND_NORMAL);
7124 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7126 if (value != const1_rtx)
7128 error ("%<__builtin_longjmp%> second argument must be 1");
7129 return const0_rtx;
7132 expand_builtin_longjmp (buf_addr, value);
7133 return const0_rtx;
7135 break;
7137 case BUILT_IN_NONLOCAL_GOTO:
7138 target = expand_builtin_nonlocal_goto (exp);
7139 if (target)
7140 return target;
7141 break;
7143 /* This updates the setjmp buffer that is its argument with the value
7144 of the current stack pointer. */
7145 case BUILT_IN_UPDATE_SETJMP_BUF:
7146 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7148 rtx buf_addr
7149 = expand_normal (CALL_EXPR_ARG (exp, 0));
7151 expand_builtin_update_setjmp_buf (buf_addr);
7152 return const0_rtx;
7154 break;
7156 case BUILT_IN_TRAP:
7157 expand_builtin_trap ();
7158 return const0_rtx;
7160 case BUILT_IN_UNREACHABLE:
7161 expand_builtin_unreachable ();
7162 return const0_rtx;
7164 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7165 case BUILT_IN_SIGNBITD32:
7166 case BUILT_IN_SIGNBITD64:
7167 case BUILT_IN_SIGNBITD128:
7168 target = expand_builtin_signbit (exp, target);
7169 if (target)
7170 return target;
7171 break;
7173 /* Various hooks for the DWARF 2 __throw routine. */
7174 case BUILT_IN_UNWIND_INIT:
7175 expand_builtin_unwind_init ();
7176 return const0_rtx;
7177 case BUILT_IN_DWARF_CFA:
7178 return virtual_cfa_rtx;
7179 #ifdef DWARF2_UNWIND_INFO
7180 case BUILT_IN_DWARF_SP_COLUMN:
7181 return expand_builtin_dwarf_sp_column ();
7182 case BUILT_IN_INIT_DWARF_REG_SIZES:
7183 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7184 return const0_rtx;
7185 #endif
7186 case BUILT_IN_FROB_RETURN_ADDR:
7187 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7188 case BUILT_IN_EXTRACT_RETURN_ADDR:
7189 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7190 case BUILT_IN_EH_RETURN:
7191 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7192 CALL_EXPR_ARG (exp, 1));
7193 return const0_rtx;
7194 case BUILT_IN_EH_RETURN_DATA_REGNO:
7195 return expand_builtin_eh_return_data_regno (exp);
7196 case BUILT_IN_EXTEND_POINTER:
7197 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7198 case BUILT_IN_EH_POINTER:
7199 return expand_builtin_eh_pointer (exp);
7200 case BUILT_IN_EH_FILTER:
7201 return expand_builtin_eh_filter (exp);
7202 case BUILT_IN_EH_COPY_VALUES:
7203 return expand_builtin_eh_copy_values (exp);
7205 case BUILT_IN_VA_START:
7206 return expand_builtin_va_start (exp);
7207 case BUILT_IN_VA_END:
7208 return expand_builtin_va_end (exp);
7209 case BUILT_IN_VA_COPY:
7210 return expand_builtin_va_copy (exp);
7211 case BUILT_IN_EXPECT:
7212 return expand_builtin_expect (exp, target);
7213 case BUILT_IN_ASSUME_ALIGNED:
7214 return expand_builtin_assume_aligned (exp, target);
7215 case BUILT_IN_PREFETCH:
7216 expand_builtin_prefetch (exp);
7217 return const0_rtx;
7219 case BUILT_IN_INIT_TRAMPOLINE:
7220 return expand_builtin_init_trampoline (exp, true);
7221 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7222 return expand_builtin_init_trampoline (exp, false);
7223 case BUILT_IN_ADJUST_TRAMPOLINE:
7224 return expand_builtin_adjust_trampoline (exp);
7226 case BUILT_IN_INIT_DESCRIPTOR:
7227 return expand_builtin_init_descriptor (exp);
7228 case BUILT_IN_ADJUST_DESCRIPTOR:
7229 return expand_builtin_adjust_descriptor (exp);
7231 case BUILT_IN_FORK:
7232 case BUILT_IN_EXECL:
7233 case BUILT_IN_EXECV:
7234 case BUILT_IN_EXECLP:
7235 case BUILT_IN_EXECLE:
7236 case BUILT_IN_EXECVP:
7237 case BUILT_IN_EXECVE:
7238 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7239 if (target)
7240 return target;
7241 break;
7243 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7244 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7245 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7246 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7247 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7248 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7249 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7250 if (target)
7251 return target;
7252 break;
7254 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7255 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7256 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7257 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7258 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7259 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7260 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7261 if (target)
7262 return target;
7263 break;
7265 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7266 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7267 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7268 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7269 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7270 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7271 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7272 if (target)
7273 return target;
7274 break;
7276 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7277 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7278 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7279 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7280 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7281 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7282 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7283 if (target)
7284 return target;
7285 break;
7287 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7288 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7289 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7290 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7291 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7292 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7293 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7294 if (target)
7295 return target;
7296 break;
7298 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7299 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7300 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7301 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7302 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7303 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7304 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7305 if (target)
7306 return target;
7307 break;
7309 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7310 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7311 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7312 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7313 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7314 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7315 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7316 if (target)
7317 return target;
7318 break;
7320 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7321 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7322 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7323 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7324 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7325 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7326 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7327 if (target)
7328 return target;
7329 break;
7331 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7332 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7333 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7334 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7335 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7336 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7337 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7338 if (target)
7339 return target;
7340 break;
7342 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7343 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7344 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7345 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7346 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7347 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7348 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7349 if (target)
7350 return target;
7351 break;
7353 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7354 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7355 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7356 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7357 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7358 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7359 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7360 if (target)
7361 return target;
7362 break;
7364 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7365 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7366 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7367 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7368 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7369 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7370 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7371 if (target)
7372 return target;
7373 break;
7375 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7376 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7377 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7378 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7379 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7380 if (mode == VOIDmode)
7381 mode = TYPE_MODE (boolean_type_node);
7382 if (!target || !register_operand (target, mode))
7383 target = gen_reg_rtx (mode);
7385 mode = get_builtin_sync_mode
7386 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7387 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7388 if (target)
7389 return target;
7390 break;
7392 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7393 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7394 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7395 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7396 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7397 mode = get_builtin_sync_mode
7398 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7399 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7400 if (target)
7401 return target;
7402 break;
7404 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7405 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7406 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7407 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7408 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7409 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7410 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7411 if (target)
7412 return target;
7413 break;
7415 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7416 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7417 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7418 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7419 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7420 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7421 expand_builtin_sync_lock_release (mode, exp);
7422 return const0_rtx;
7424 case BUILT_IN_SYNC_SYNCHRONIZE:
7425 expand_builtin_sync_synchronize ();
7426 return const0_rtx;
7428 case BUILT_IN_ATOMIC_EXCHANGE_1:
7429 case BUILT_IN_ATOMIC_EXCHANGE_2:
7430 case BUILT_IN_ATOMIC_EXCHANGE_4:
7431 case BUILT_IN_ATOMIC_EXCHANGE_8:
7432 case BUILT_IN_ATOMIC_EXCHANGE_16:
7433 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7434 target = expand_builtin_atomic_exchange (mode, exp, target);
7435 if (target)
7436 return target;
7437 break;
7439 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7440 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7441 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7442 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7443 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7445 unsigned int nargs, z;
7446 vec<tree, va_gc> *vec;
7448 mode =
7449 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7450 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7451 if (target)
7452 return target;
7454 /* If this is turned into an external library call, the weak parameter
7455 must be dropped to match the expected parameter list. */
7456 nargs = call_expr_nargs (exp);
7457 vec_alloc (vec, nargs - 1);
7458 for (z = 0; z < 3; z++)
7459 vec->quick_push (CALL_EXPR_ARG (exp, z));
7460 /* Skip the boolean weak parameter. */
7461 for (z = 4; z < 6; z++)
7462 vec->quick_push (CALL_EXPR_ARG (exp, z));
7463 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7464 break;
7467 case BUILT_IN_ATOMIC_LOAD_1:
7468 case BUILT_IN_ATOMIC_LOAD_2:
7469 case BUILT_IN_ATOMIC_LOAD_4:
7470 case BUILT_IN_ATOMIC_LOAD_8:
7471 case BUILT_IN_ATOMIC_LOAD_16:
7472 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7473 target = expand_builtin_atomic_load (mode, exp, target);
7474 if (target)
7475 return target;
7476 break;
7478 case BUILT_IN_ATOMIC_STORE_1:
7479 case BUILT_IN_ATOMIC_STORE_2:
7480 case BUILT_IN_ATOMIC_STORE_4:
7481 case BUILT_IN_ATOMIC_STORE_8:
7482 case BUILT_IN_ATOMIC_STORE_16:
7483 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7484 target = expand_builtin_atomic_store (mode, exp);
7485 if (target)
7486 return const0_rtx;
7487 break;
7489 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7490 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7491 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7492 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7493 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7495 enum built_in_function lib;
7496 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7497 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7498 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7499 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7500 ignore, lib);
7501 if (target)
7502 return target;
7503 break;
7505 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7506 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7507 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7508 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7509 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7511 enum built_in_function lib;
7512 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7513 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7514 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7515 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7516 ignore, lib);
7517 if (target)
7518 return target;
7519 break;
7521 case BUILT_IN_ATOMIC_AND_FETCH_1:
7522 case BUILT_IN_ATOMIC_AND_FETCH_2:
7523 case BUILT_IN_ATOMIC_AND_FETCH_4:
7524 case BUILT_IN_ATOMIC_AND_FETCH_8:
7525 case BUILT_IN_ATOMIC_AND_FETCH_16:
7527 enum built_in_function lib;
7528 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7529 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7530 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7531 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7532 ignore, lib);
7533 if (target)
7534 return target;
7535 break;
7537 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7538 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7539 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7540 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7541 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7543 enum built_in_function lib;
7544 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7545 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7546 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7547 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7548 ignore, lib);
7549 if (target)
7550 return target;
7551 break;
7553 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7554 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7555 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7556 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7557 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7559 enum built_in_function lib;
7560 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7561 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7562 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7563 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7564 ignore, lib);
7565 if (target)
7566 return target;
7567 break;
7569 case BUILT_IN_ATOMIC_OR_FETCH_1:
7570 case BUILT_IN_ATOMIC_OR_FETCH_2:
7571 case BUILT_IN_ATOMIC_OR_FETCH_4:
7572 case BUILT_IN_ATOMIC_OR_FETCH_8:
7573 case BUILT_IN_ATOMIC_OR_FETCH_16:
7575 enum built_in_function lib;
7576 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7577 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7578 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7579 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7580 ignore, lib);
7581 if (target)
7582 return target;
7583 break;
7585 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7586 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7587 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7588 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7589 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7590 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7591 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7592 ignore, BUILT_IN_NONE);
7593 if (target)
7594 return target;
7595 break;
7597 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7598 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7599 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7600 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7601 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7602 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7603 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7604 ignore, BUILT_IN_NONE);
7605 if (target)
7606 return target;
7607 break;
7609 case BUILT_IN_ATOMIC_FETCH_AND_1:
7610 case BUILT_IN_ATOMIC_FETCH_AND_2:
7611 case BUILT_IN_ATOMIC_FETCH_AND_4:
7612 case BUILT_IN_ATOMIC_FETCH_AND_8:
7613 case BUILT_IN_ATOMIC_FETCH_AND_16:
7614 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7615 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7616 ignore, BUILT_IN_NONE);
7617 if (target)
7618 return target;
7619 break;
7621 case BUILT_IN_ATOMIC_FETCH_NAND_1:
7622 case BUILT_IN_ATOMIC_FETCH_NAND_2:
7623 case BUILT_IN_ATOMIC_FETCH_NAND_4:
7624 case BUILT_IN_ATOMIC_FETCH_NAND_8:
7625 case BUILT_IN_ATOMIC_FETCH_NAND_16:
7626 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
7627 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
7628 ignore, BUILT_IN_NONE);
7629 if (target)
7630 return target;
7631 break;
7633 case BUILT_IN_ATOMIC_FETCH_XOR_1:
7634 case BUILT_IN_ATOMIC_FETCH_XOR_2:
7635 case BUILT_IN_ATOMIC_FETCH_XOR_4:
7636 case BUILT_IN_ATOMIC_FETCH_XOR_8:
7637 case BUILT_IN_ATOMIC_FETCH_XOR_16:
7638 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
7639 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
7640 ignore, BUILT_IN_NONE);
7641 if (target)
7642 return target;
7643 break;
7645 case BUILT_IN_ATOMIC_FETCH_OR_1:
7646 case BUILT_IN_ATOMIC_FETCH_OR_2:
7647 case BUILT_IN_ATOMIC_FETCH_OR_4:
7648 case BUILT_IN_ATOMIC_FETCH_OR_8:
7649 case BUILT_IN_ATOMIC_FETCH_OR_16:
7650 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
7651 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
7652 ignore, BUILT_IN_NONE);
7653 if (target)
7654 return target;
7655 break;
7657 case BUILT_IN_ATOMIC_TEST_AND_SET:
7658 return expand_builtin_atomic_test_and_set (exp, target);
7660 case BUILT_IN_ATOMIC_CLEAR:
7661 return expand_builtin_atomic_clear (exp);
7663 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
7664 return expand_builtin_atomic_always_lock_free (exp);
7666 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
7667 target = expand_builtin_atomic_is_lock_free (exp);
7668 if (target)
7669 return target;
7670 break;
7672 case BUILT_IN_ATOMIC_THREAD_FENCE:
7673 expand_builtin_atomic_thread_fence (exp);
7674 return const0_rtx;
7676 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
7677 expand_builtin_atomic_signal_fence (exp);
7678 return const0_rtx;
7680 case BUILT_IN_OBJECT_SIZE:
7681 return expand_builtin_object_size (exp);
7683 case BUILT_IN_MEMCPY_CHK:
7684 case BUILT_IN_MEMPCPY_CHK:
7685 case BUILT_IN_MEMMOVE_CHK:
7686 case BUILT_IN_MEMSET_CHK:
7687 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7688 if (target)
7689 return target;
7690 break;
7692 case BUILT_IN_STRCPY_CHK:
7693 case BUILT_IN_STPCPY_CHK:
7694 case BUILT_IN_STRNCPY_CHK:
7695 case BUILT_IN_STPNCPY_CHK:
7696 case BUILT_IN_STRCAT_CHK:
7697 case BUILT_IN_STRNCAT_CHK:
7698 case BUILT_IN_SNPRINTF_CHK:
7699 case BUILT_IN_VSNPRINTF_CHK:
7700 maybe_emit_chk_warning (exp, fcode);
7701 break;
7703 case BUILT_IN_SPRINTF_CHK:
7704 case BUILT_IN_VSPRINTF_CHK:
7705 maybe_emit_sprintf_chk_warning (exp, fcode);
7706 break;
7708 case BUILT_IN_FREE:
7709 if (warn_free_nonheap_object)
7710 maybe_emit_free_warning (exp);
7711 break;
7713 case BUILT_IN_THREAD_POINTER:
7714 return expand_builtin_thread_pointer (exp, target);
7716 case BUILT_IN_SET_THREAD_POINTER:
7717 expand_builtin_set_thread_pointer (exp);
7718 return const0_rtx;
7720 case BUILT_IN_ACC_ON_DEVICE:
7721 /* Do library call, if we failed to expand the builtin when
7722 folding. */
7723 break;
7725 case BUILT_IN_GOACC_PARLEVEL_ID:
7726 case BUILT_IN_GOACC_PARLEVEL_SIZE:
7727 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
7729 default: /* just do library call, if unknown builtin */
7730 break;
7733 /* The switch statement above can drop through to cause the function
7734 to be called normally. */
7735 return expand_call (exp, target, ignore);
7738 /* Determine whether a tree node represents a call to a built-in
7739 function. If the tree T is a call to a built-in function with
7740 the right number of arguments of the appropriate types, return
7741 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7742 Otherwise the return value is END_BUILTINS. */
7744 enum built_in_function
7745 builtin_mathfn_code (const_tree t)
7747 const_tree fndecl, arg, parmlist;
7748 const_tree argtype, parmtype;
7749 const_call_expr_arg_iterator iter;
7751 if (TREE_CODE (t) != CALL_EXPR)
7752 return END_BUILTINS;
7754 fndecl = get_callee_fndecl (t);
7755 if (fndecl == NULL_TREE
7756 || TREE_CODE (fndecl) != FUNCTION_DECL
7757 || ! DECL_BUILT_IN (fndecl)
7758 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7759 return END_BUILTINS;
7761 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7762 init_const_call_expr_arg_iterator (t, &iter);
7763 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7765 /* If a function doesn't take a variable number of arguments,
7766 the last element in the list will have type `void'. */
7767 parmtype = TREE_VALUE (parmlist);
7768 if (VOID_TYPE_P (parmtype))
7770 if (more_const_call_expr_args_p (&iter))
7771 return END_BUILTINS;
7772 return DECL_FUNCTION_CODE (fndecl);
7775 if (! more_const_call_expr_args_p (&iter))
7776 return END_BUILTINS;
7778 arg = next_const_call_expr_arg (&iter);
7779 argtype = TREE_TYPE (arg);
7781 if (SCALAR_FLOAT_TYPE_P (parmtype))
7783 if (! SCALAR_FLOAT_TYPE_P (argtype))
7784 return END_BUILTINS;
7786 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7788 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7789 return END_BUILTINS;
7791 else if (POINTER_TYPE_P (parmtype))
7793 if (! POINTER_TYPE_P (argtype))
7794 return END_BUILTINS;
7796 else if (INTEGRAL_TYPE_P (parmtype))
7798 if (! INTEGRAL_TYPE_P (argtype))
7799 return END_BUILTINS;
7801 else
7802 return END_BUILTINS;
7805 /* Variable-length argument list. */
7806 return DECL_FUNCTION_CODE (fndecl);
7809 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7810 evaluate to a constant. */
7812 static tree
7813 fold_builtin_constant_p (tree arg)
7815 /* We return 1 for a numeric type that's known to be a constant
7816 value at compile-time or for an aggregate type that's a
7817 literal constant. */
7818 STRIP_NOPS (arg);
7820 /* If we know this is a constant, emit the constant of one. */
7821 if (CONSTANT_CLASS_P (arg)
7822 || (TREE_CODE (arg) == CONSTRUCTOR
7823 && TREE_CONSTANT (arg)))
7824 return integer_one_node;
7825 if (TREE_CODE (arg) == ADDR_EXPR)
7827 tree op = TREE_OPERAND (arg, 0);
7828 if (TREE_CODE (op) == STRING_CST
7829 || (TREE_CODE (op) == ARRAY_REF
7830 && integer_zerop (TREE_OPERAND (op, 1))
7831 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7832 return integer_one_node;
7835 /* If this expression has side effects, show we don't know it to be a
7836 constant. Likewise if it's a pointer or aggregate type since in
7837 those case we only want literals, since those are only optimized
7838 when generating RTL, not later.
7839 And finally, if we are compiling an initializer, not code, we
7840 need to return a definite result now; there's not going to be any
7841 more optimization done. */
7842 if (TREE_SIDE_EFFECTS (arg)
7843 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7844 || POINTER_TYPE_P (TREE_TYPE (arg))
7845 || cfun == 0
7846 || folding_initializer
7847 || force_folding_builtin_constant_p)
7848 return integer_zero_node;
7850 return NULL_TREE;
7853 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7854 return it as a truthvalue. */
7856 static tree
7857 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7858 tree predictor)
7860 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7862 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7863 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7864 ret_type = TREE_TYPE (TREE_TYPE (fn));
7865 pred_type = TREE_VALUE (arg_types);
7866 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7868 pred = fold_convert_loc (loc, pred_type, pred);
7869 expected = fold_convert_loc (loc, expected_type, expected);
7870 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7871 predictor);
7873 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7874 build_int_cst (ret_type, 0));
7877 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7878 NULL_TREE if no simplification is possible. */
7880 tree
7881 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7883 tree inner, fndecl, inner_arg0;
7884 enum tree_code code;
7886 /* Distribute the expected value over short-circuiting operators.
7887 See through the cast from truthvalue_type_node to long. */
7888 inner_arg0 = arg0;
7889 while (CONVERT_EXPR_P (inner_arg0)
7890 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7891 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7892 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7894 /* If this is a builtin_expect within a builtin_expect keep the
7895 inner one. See through a comparison against a constant. It
7896 might have been added to create a thruthvalue. */
7897 inner = inner_arg0;
7899 if (COMPARISON_CLASS_P (inner)
7900 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7901 inner = TREE_OPERAND (inner, 0);
7903 if (TREE_CODE (inner) == CALL_EXPR
7904 && (fndecl = get_callee_fndecl (inner))
7905 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7906 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7907 return arg0;
7909 inner = inner_arg0;
7910 code = TREE_CODE (inner);
7911 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7913 tree op0 = TREE_OPERAND (inner, 0);
7914 tree op1 = TREE_OPERAND (inner, 1);
7915 arg1 = save_expr (arg1);
7917 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7918 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7919 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7921 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7924 /* If the argument isn't invariant then there's nothing else we can do. */
7925 if (!TREE_CONSTANT (inner_arg0))
7926 return NULL_TREE;
7928 /* If we expect that a comparison against the argument will fold to
7929 a constant return the constant. In practice, this means a true
7930 constant or the address of a non-weak symbol. */
7931 inner = inner_arg0;
7932 STRIP_NOPS (inner);
7933 if (TREE_CODE (inner) == ADDR_EXPR)
7937 inner = TREE_OPERAND (inner, 0);
7939 while (TREE_CODE (inner) == COMPONENT_REF
7940 || TREE_CODE (inner) == ARRAY_REF);
7941 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
7942 return NULL_TREE;
7945 /* Otherwise, ARG0 already has the proper type for the return value. */
7946 return arg0;
7949 /* Fold a call to __builtin_classify_type with argument ARG. */
7951 static tree
7952 fold_builtin_classify_type (tree arg)
7954 if (arg == 0)
7955 return build_int_cst (integer_type_node, no_type_class);
7957 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7960 /* Fold a call to __builtin_strlen with argument ARG. */
7962 static tree
7963 fold_builtin_strlen (location_t loc, tree type, tree arg)
7965 if (!validate_arg (arg, POINTER_TYPE))
7966 return NULL_TREE;
7967 else
7969 tree len = c_strlen (arg, 0);
7971 if (len)
7972 return fold_convert_loc (loc, type, len);
7974 return NULL_TREE;
7978 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7980 static tree
7981 fold_builtin_inf (location_t loc, tree type, int warn)
7983 REAL_VALUE_TYPE real;
7985 /* __builtin_inff is intended to be usable to define INFINITY on all
7986 targets. If an infinity is not available, INFINITY expands "to a
7987 positive constant of type float that overflows at translation
7988 time", footnote "In this case, using INFINITY will violate the
7989 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7990 Thus we pedwarn to ensure this constraint violation is
7991 diagnosed. */
7992 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7993 pedwarn (loc, 0, "target format does not support infinity");
7995 real_inf (&real);
7996 return build_real (type, real);
7999 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8000 NULL_TREE if no simplification can be made. */
8002 static tree
8003 fold_builtin_sincos (location_t loc,
8004 tree arg0, tree arg1, tree arg2)
8006 tree type;
8007 tree fndecl, call = NULL_TREE;
8009 if (!validate_arg (arg0, REAL_TYPE)
8010 || !validate_arg (arg1, POINTER_TYPE)
8011 || !validate_arg (arg2, POINTER_TYPE))
8012 return NULL_TREE;
8014 type = TREE_TYPE (arg0);
8016 /* Calculate the result when the argument is a constant. */
8017 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8018 if (fn == END_BUILTINS)
8019 return NULL_TREE;
8021 /* Canonicalize sincos to cexpi. */
8022 if (TREE_CODE (arg0) == REAL_CST)
8024 tree complex_type = build_complex_type (type);
8025 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8027 if (!call)
8029 if (!targetm.libc_has_function (function_c99_math_complex)
8030 || !builtin_decl_implicit_p (fn))
8031 return NULL_TREE;
8032 fndecl = builtin_decl_explicit (fn);
8033 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8034 call = builtin_save_expr (call);
8037 tree ptype = build_pointer_type (type);
8038 arg1 = fold_convert (ptype, arg1);
8039 arg2 = fold_convert (ptype, arg2);
8040 return build2 (COMPOUND_EXPR, void_type_node,
8041 build2 (MODIFY_EXPR, void_type_node,
8042 build_fold_indirect_ref_loc (loc, arg1),
8043 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8044 build2 (MODIFY_EXPR, void_type_node,
8045 build_fold_indirect_ref_loc (loc, arg2),
8046 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8049 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8050 Return NULL_TREE if no simplification can be made. */
8052 static tree
8053 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8055 if (!validate_arg (arg1, POINTER_TYPE)
8056 || !validate_arg (arg2, POINTER_TYPE)
8057 || !validate_arg (len, INTEGER_TYPE))
8058 return NULL_TREE;
8060 /* If the LEN parameter is zero, return zero. */
8061 if (integer_zerop (len))
8062 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8063 arg1, arg2);
8065 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8066 if (operand_equal_p (arg1, arg2, 0))
8067 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8069 /* If len parameter is one, return an expression corresponding to
8070 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8071 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8073 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8074 tree cst_uchar_ptr_node
8075 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8077 tree ind1
8078 = fold_convert_loc (loc, integer_type_node,
8079 build1 (INDIRECT_REF, cst_uchar_node,
8080 fold_convert_loc (loc,
8081 cst_uchar_ptr_node,
8082 arg1)));
8083 tree ind2
8084 = fold_convert_loc (loc, integer_type_node,
8085 build1 (INDIRECT_REF, cst_uchar_node,
8086 fold_convert_loc (loc,
8087 cst_uchar_ptr_node,
8088 arg2)));
8089 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8092 return NULL_TREE;
8095 /* Fold a call to builtin isascii with argument ARG. */
8097 static tree
8098 fold_builtin_isascii (location_t loc, tree arg)
8100 if (!validate_arg (arg, INTEGER_TYPE))
8101 return NULL_TREE;
8102 else
8104 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8105 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8106 build_int_cst (integer_type_node,
8107 ~ (unsigned HOST_WIDE_INT) 0x7f));
8108 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8109 arg, integer_zero_node);
8113 /* Fold a call to builtin toascii with argument ARG. */
8115 static tree
8116 fold_builtin_toascii (location_t loc, tree arg)
8118 if (!validate_arg (arg, INTEGER_TYPE))
8119 return NULL_TREE;
8121 /* Transform toascii(c) -> (c & 0x7f). */
8122 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8123 build_int_cst (integer_type_node, 0x7f));
8126 /* Fold a call to builtin isdigit with argument ARG. */
8128 static tree
8129 fold_builtin_isdigit (location_t loc, tree arg)
8131 if (!validate_arg (arg, INTEGER_TYPE))
8132 return NULL_TREE;
8133 else
8135 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8136 /* According to the C standard, isdigit is unaffected by locale.
8137 However, it definitely is affected by the target character set. */
8138 unsigned HOST_WIDE_INT target_digit0
8139 = lang_hooks.to_target_charset ('0');
8141 if (target_digit0 == 0)
8142 return NULL_TREE;
8144 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8145 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8146 build_int_cst (unsigned_type_node, target_digit0));
8147 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8148 build_int_cst (unsigned_type_node, 9));
8152 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8154 static tree
8155 fold_builtin_fabs (location_t loc, tree arg, tree type)
8157 if (!validate_arg (arg, REAL_TYPE))
8158 return NULL_TREE;
8160 arg = fold_convert_loc (loc, type, arg);
8161 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8164 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8166 static tree
8167 fold_builtin_abs (location_t loc, tree arg, tree type)
8169 if (!validate_arg (arg, INTEGER_TYPE))
8170 return NULL_TREE;
8172 arg = fold_convert_loc (loc, type, arg);
8173 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8176 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8178 static tree
8179 fold_builtin_carg (location_t loc, tree arg, tree type)
8181 if (validate_arg (arg, COMPLEX_TYPE)
8182 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8184 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8186 if (atan2_fn)
8188 tree new_arg = builtin_save_expr (arg);
8189 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8190 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8191 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8195 return NULL_TREE;
8198 /* Fold a call to builtin frexp, we can assume the base is 2. */
8200 static tree
8201 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8203 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8204 return NULL_TREE;
8206 STRIP_NOPS (arg0);
8208 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8209 return NULL_TREE;
8211 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8213 /* Proceed if a valid pointer type was passed in. */
8214 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8216 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8217 tree frac, exp;
8219 switch (value->cl)
8221 case rvc_zero:
8222 /* For +-0, return (*exp = 0, +-0). */
8223 exp = integer_zero_node;
8224 frac = arg0;
8225 break;
8226 case rvc_nan:
8227 case rvc_inf:
8228 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8229 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8230 case rvc_normal:
8232 /* Since the frexp function always expects base 2, and in
8233 GCC normalized significands are already in the range
8234 [0.5, 1.0), we have exactly what frexp wants. */
8235 REAL_VALUE_TYPE frac_rvt = *value;
8236 SET_REAL_EXP (&frac_rvt, 0);
8237 frac = build_real (rettype, frac_rvt);
8238 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8240 break;
8241 default:
8242 gcc_unreachable ();
8245 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8246 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8247 TREE_SIDE_EFFECTS (arg1) = 1;
8248 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8251 return NULL_TREE;
8254 /* Fold a call to builtin modf. */
8256 static tree
8257 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8259 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8260 return NULL_TREE;
8262 STRIP_NOPS (arg0);
8264 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8265 return NULL_TREE;
8267 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8269 /* Proceed if a valid pointer type was passed in. */
8270 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8272 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8273 REAL_VALUE_TYPE trunc, frac;
8275 switch (value->cl)
8277 case rvc_nan:
8278 case rvc_zero:
8279 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8280 trunc = frac = *value;
8281 break;
8282 case rvc_inf:
8283 /* For +-Inf, return (*arg1 = arg0, +-0). */
8284 frac = dconst0;
8285 frac.sign = value->sign;
8286 trunc = *value;
8287 break;
8288 case rvc_normal:
8289 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8290 real_trunc (&trunc, VOIDmode, value);
8291 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8292 /* If the original number was negative and already
8293 integral, then the fractional part is -0.0. */
8294 if (value->sign && frac.cl == rvc_zero)
8295 frac.sign = value->sign;
8296 break;
8299 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8300 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8301 build_real (rettype, trunc));
8302 TREE_SIDE_EFFECTS (arg1) = 1;
8303 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8304 build_real (rettype, frac));
8307 return NULL_TREE;
8310 /* Given a location LOC, an interclass builtin function decl FNDECL
8311 and its single argument ARG, return an folded expression computing
8312 the same, or NULL_TREE if we either couldn't or didn't want to fold
8313 (the latter happen if there's an RTL instruction available). */
8315 static tree
8316 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8318 machine_mode mode;
8320 if (!validate_arg (arg, REAL_TYPE))
8321 return NULL_TREE;
8323 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8324 return NULL_TREE;
8326 mode = TYPE_MODE (TREE_TYPE (arg));
8328 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8330 /* If there is no optab, try generic code. */
8331 switch (DECL_FUNCTION_CODE (fndecl))
8333 tree result;
8335 CASE_FLT_FN (BUILT_IN_ISINF):
8337 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8338 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8339 tree type = TREE_TYPE (arg);
8340 REAL_VALUE_TYPE r;
8341 char buf[128];
8343 if (is_ibm_extended)
8345 /* NaN and Inf are encoded in the high-order double value
8346 only. The low-order value is not significant. */
8347 type = double_type_node;
8348 mode = DFmode;
8349 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8351 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8352 real_from_string (&r, buf);
8353 result = build_call_expr (isgr_fn, 2,
8354 fold_build1_loc (loc, ABS_EXPR, type, arg),
8355 build_real (type, r));
8356 return result;
8358 CASE_FLT_FN (BUILT_IN_FINITE):
8359 case BUILT_IN_ISFINITE:
8361 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8362 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8363 tree type = TREE_TYPE (arg);
8364 REAL_VALUE_TYPE r;
8365 char buf[128];
8367 if (is_ibm_extended)
8369 /* NaN and Inf are encoded in the high-order double value
8370 only. The low-order value is not significant. */
8371 type = double_type_node;
8372 mode = DFmode;
8373 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8375 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8376 real_from_string (&r, buf);
8377 result = build_call_expr (isle_fn, 2,
8378 fold_build1_loc (loc, ABS_EXPR, type, arg),
8379 build_real (type, r));
8380 /*result = fold_build2_loc (loc, UNGT_EXPR,
8381 TREE_TYPE (TREE_TYPE (fndecl)),
8382 fold_build1_loc (loc, ABS_EXPR, type, arg),
8383 build_real (type, r));
8384 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8385 TREE_TYPE (TREE_TYPE (fndecl)),
8386 result);*/
8387 return result;
8389 case BUILT_IN_ISNORMAL:
8391 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8392 islessequal(fabs(x),DBL_MAX). */
8393 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8394 tree type = TREE_TYPE (arg);
8395 tree orig_arg, max_exp, min_exp;
8396 machine_mode orig_mode = mode;
8397 REAL_VALUE_TYPE rmax, rmin;
8398 char buf[128];
8400 orig_arg = arg = builtin_save_expr (arg);
8401 if (is_ibm_extended)
8403 /* Use double to test the normal range of IBM extended
8404 precision. Emin for IBM extended precision is
8405 different to emin for IEEE double, being 53 higher
8406 since the low double exponent is at least 53 lower
8407 than the high double exponent. */
8408 type = double_type_node;
8409 mode = DFmode;
8410 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8412 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8414 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8415 real_from_string (&rmax, buf);
8416 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8417 real_from_string (&rmin, buf);
8418 max_exp = build_real (type, rmax);
8419 min_exp = build_real (type, rmin);
8421 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8422 if (is_ibm_extended)
8424 /* Testing the high end of the range is done just using
8425 the high double, using the same test as isfinite().
8426 For the subnormal end of the range we first test the
8427 high double, then if its magnitude is equal to the
8428 limit of 0x1p-969, we test whether the low double is
8429 non-zero and opposite sign to the high double. */
8430 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8431 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8432 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8433 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8434 arg, min_exp);
8435 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8436 complex_double_type_node, orig_arg);
8437 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8438 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8439 tree zero = build_real (type, dconst0);
8440 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8441 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8442 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8443 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8444 fold_build3 (COND_EXPR,
8445 integer_type_node,
8446 hilt, logt, lolt));
8447 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8448 eq_min, ok_lo);
8449 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8450 gt_min, eq_min);
8452 else
8454 tree const isge_fn
8455 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8456 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8458 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8459 max_exp, min_exp);
8460 return result;
8462 default:
8463 break;
8466 return NULL_TREE;
8469 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8470 ARG is the argument for the call. */
8472 static tree
8473 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8475 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8477 if (!validate_arg (arg, REAL_TYPE))
8478 return NULL_TREE;
8480 switch (builtin_index)
8482 case BUILT_IN_ISINF:
8483 if (!HONOR_INFINITIES (arg))
8484 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8486 return NULL_TREE;
8488 case BUILT_IN_ISINF_SIGN:
8490 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8491 /* In a boolean context, GCC will fold the inner COND_EXPR to
8492 1. So e.g. "if (isinf_sign(x))" would be folded to just
8493 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8494 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8495 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8496 tree tmp = NULL_TREE;
8498 arg = builtin_save_expr (arg);
8500 if (signbit_fn && isinf_fn)
8502 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8503 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8505 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8506 signbit_call, integer_zero_node);
8507 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8508 isinf_call, integer_zero_node);
8510 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8511 integer_minus_one_node, integer_one_node);
8512 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8513 isinf_call, tmp,
8514 integer_zero_node);
8517 return tmp;
8520 case BUILT_IN_ISFINITE:
8521 if (!HONOR_NANS (arg)
8522 && !HONOR_INFINITIES (arg))
8523 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8525 return NULL_TREE;
8527 case BUILT_IN_ISNAN:
8528 if (!HONOR_NANS (arg))
8529 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8532 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8533 if (is_ibm_extended)
8535 /* NaN and Inf are encoded in the high-order double value
8536 only. The low-order value is not significant. */
8537 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8540 arg = builtin_save_expr (arg);
8541 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8543 default:
8544 gcc_unreachable ();
8548 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8549 This builtin will generate code to return the appropriate floating
8550 point classification depending on the value of the floating point
8551 number passed in. The possible return values must be supplied as
8552 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8553 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8554 one floating point argument which is "type generic". */
8556 static tree
8557 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8559 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8560 arg, type, res, tmp;
8561 machine_mode mode;
8562 REAL_VALUE_TYPE r;
8563 char buf[128];
8565 /* Verify the required arguments in the original call. */
8566 if (nargs != 6
8567 || !validate_arg (args[0], INTEGER_TYPE)
8568 || !validate_arg (args[1], INTEGER_TYPE)
8569 || !validate_arg (args[2], INTEGER_TYPE)
8570 || !validate_arg (args[3], INTEGER_TYPE)
8571 || !validate_arg (args[4], INTEGER_TYPE)
8572 || !validate_arg (args[5], REAL_TYPE))
8573 return NULL_TREE;
8575 fp_nan = args[0];
8576 fp_infinite = args[1];
8577 fp_normal = args[2];
8578 fp_subnormal = args[3];
8579 fp_zero = args[4];
8580 arg = args[5];
8581 type = TREE_TYPE (arg);
8582 mode = TYPE_MODE (type);
8583 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8585 /* fpclassify(x) ->
8586 isnan(x) ? FP_NAN :
8587 (fabs(x) == Inf ? FP_INFINITE :
8588 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8589 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8591 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8592 build_real (type, dconst0));
8593 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8594 tmp, fp_zero, fp_subnormal);
8596 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8597 real_from_string (&r, buf);
8598 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8599 arg, build_real (type, r));
8600 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8602 if (HONOR_INFINITIES (mode))
8604 real_inf (&r);
8605 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8606 build_real (type, r));
8607 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8608 fp_infinite, res);
8611 if (HONOR_NANS (mode))
8613 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8614 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8617 return res;
8620 /* Fold a call to an unordered comparison function such as
8621 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8622 being called and ARG0 and ARG1 are the arguments for the call.
8623 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8624 the opposite of the desired result. UNORDERED_CODE is used
8625 for modes that can hold NaNs and ORDERED_CODE is used for
8626 the rest. */
8628 static tree
8629 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8630 enum tree_code unordered_code,
8631 enum tree_code ordered_code)
8633 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8634 enum tree_code code;
8635 tree type0, type1;
8636 enum tree_code code0, code1;
8637 tree cmp_type = NULL_TREE;
8639 type0 = TREE_TYPE (arg0);
8640 type1 = TREE_TYPE (arg1);
8642 code0 = TREE_CODE (type0);
8643 code1 = TREE_CODE (type1);
8645 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8646 /* Choose the wider of two real types. */
8647 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8648 ? type0 : type1;
8649 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8650 cmp_type = type0;
8651 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8652 cmp_type = type1;
8654 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8655 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8657 if (unordered_code == UNORDERED_EXPR)
8659 if (!HONOR_NANS (arg0))
8660 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8661 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8664 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8665 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8666 fold_build2_loc (loc, code, type, arg0, arg1));
8669 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8670 arithmetics if it can never overflow, or into internal functions that
8671 return both result of arithmetics and overflowed boolean flag in
8672 a complex integer result, or some other check for overflow.
8673 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8674 checking part of that. */
8676 static tree
8677 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8678 tree arg0, tree arg1, tree arg2)
8680 enum internal_fn ifn = IFN_LAST;
8681 /* The code of the expression corresponding to the type-generic
8682 built-in, or ERROR_MARK for the type-specific ones. */
8683 enum tree_code opcode = ERROR_MARK;
8684 bool ovf_only = false;
8686 switch (fcode)
8688 case BUILT_IN_ADD_OVERFLOW_P:
8689 ovf_only = true;
8690 /* FALLTHRU */
8691 case BUILT_IN_ADD_OVERFLOW:
8692 opcode = PLUS_EXPR;
8693 /* FALLTHRU */
8694 case BUILT_IN_SADD_OVERFLOW:
8695 case BUILT_IN_SADDL_OVERFLOW:
8696 case BUILT_IN_SADDLL_OVERFLOW:
8697 case BUILT_IN_UADD_OVERFLOW:
8698 case BUILT_IN_UADDL_OVERFLOW:
8699 case BUILT_IN_UADDLL_OVERFLOW:
8700 ifn = IFN_ADD_OVERFLOW;
8701 break;
8702 case BUILT_IN_SUB_OVERFLOW_P:
8703 ovf_only = true;
8704 /* FALLTHRU */
8705 case BUILT_IN_SUB_OVERFLOW:
8706 opcode = MINUS_EXPR;
8707 /* FALLTHRU */
8708 case BUILT_IN_SSUB_OVERFLOW:
8709 case BUILT_IN_SSUBL_OVERFLOW:
8710 case BUILT_IN_SSUBLL_OVERFLOW:
8711 case BUILT_IN_USUB_OVERFLOW:
8712 case BUILT_IN_USUBL_OVERFLOW:
8713 case BUILT_IN_USUBLL_OVERFLOW:
8714 ifn = IFN_SUB_OVERFLOW;
8715 break;
8716 case BUILT_IN_MUL_OVERFLOW_P:
8717 ovf_only = true;
8718 /* FALLTHRU */
8719 case BUILT_IN_MUL_OVERFLOW:
8720 opcode = MULT_EXPR;
8721 /* FALLTHRU */
8722 case BUILT_IN_SMUL_OVERFLOW:
8723 case BUILT_IN_SMULL_OVERFLOW:
8724 case BUILT_IN_SMULLL_OVERFLOW:
8725 case BUILT_IN_UMUL_OVERFLOW:
8726 case BUILT_IN_UMULL_OVERFLOW:
8727 case BUILT_IN_UMULLL_OVERFLOW:
8728 ifn = IFN_MUL_OVERFLOW;
8729 break;
8730 default:
8731 gcc_unreachable ();
8734 /* For the "generic" overloads, the first two arguments can have different
8735 types and the last argument determines the target type to use to check
8736 for overflow. The arguments of the other overloads all have the same
8737 type. */
8738 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
8740 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8741 arguments are constant, attempt to fold the built-in call into a constant
8742 expression indicating whether or not it detected an overflow. */
8743 if (ovf_only
8744 && TREE_CODE (arg0) == INTEGER_CST
8745 && TREE_CODE (arg1) == INTEGER_CST)
8746 /* Perform the computation in the target type and check for overflow. */
8747 return omit_one_operand_loc (loc, boolean_type_node,
8748 arith_overflowed_p (opcode, type, arg0, arg1)
8749 ? boolean_true_node : boolean_false_node,
8750 arg2);
8752 tree ctype = build_complex_type (type);
8753 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8754 2, arg0, arg1);
8755 tree tgt = save_expr (call);
8756 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8757 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8758 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8760 if (ovf_only)
8761 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
8763 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8764 tree store
8765 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8766 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8769 /* Fold a call to __builtin_FILE to a constant string. */
8771 static inline tree
8772 fold_builtin_FILE (location_t loc)
8774 if (const char *fname = LOCATION_FILE (loc))
8776 /* The documentation says this builtin is equivalent to the preprocessor
8777 __FILE__ macro so it appears appropriate to use the same file prefix
8778 mappings. */
8779 fname = remap_macro_filename (fname);
8780 return build_string_literal (strlen (fname) + 1, fname);
8783 return build_string_literal (1, "");
8786 /* Fold a call to __builtin_FUNCTION to a constant string. */
8788 static inline tree
8789 fold_builtin_FUNCTION ()
8791 const char *name = "";
8793 if (current_function_decl)
8794 name = lang_hooks.decl_printable_name (current_function_decl, 0);
8796 return build_string_literal (strlen (name) + 1, name);
8799 /* Fold a call to __builtin_LINE to an integer constant. */
8801 static inline tree
8802 fold_builtin_LINE (location_t loc, tree type)
8804 return build_int_cst (type, LOCATION_LINE (loc));
8807 /* Fold a call to built-in function FNDECL with 0 arguments.
8808 This function returns NULL_TREE if no simplification was possible. */
8810 static tree
8811 fold_builtin_0 (location_t loc, tree fndecl)
8813 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8814 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8815 switch (fcode)
8817 case BUILT_IN_FILE:
8818 return fold_builtin_FILE (loc);
8820 case BUILT_IN_FUNCTION:
8821 return fold_builtin_FUNCTION ();
8823 case BUILT_IN_LINE:
8824 return fold_builtin_LINE (loc, type);
8826 CASE_FLT_FN (BUILT_IN_INF):
8827 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
8828 case BUILT_IN_INFD32:
8829 case BUILT_IN_INFD64:
8830 case BUILT_IN_INFD128:
8831 return fold_builtin_inf (loc, type, true);
8833 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8834 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
8835 return fold_builtin_inf (loc, type, false);
8837 case BUILT_IN_CLASSIFY_TYPE:
8838 return fold_builtin_classify_type (NULL_TREE);
8840 default:
8841 break;
8843 return NULL_TREE;
8846 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8847 This function returns NULL_TREE if no simplification was possible. */
8849 static tree
8850 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8852 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8853 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8855 if (TREE_CODE (arg0) == ERROR_MARK)
8856 return NULL_TREE;
8858 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
8859 return ret;
8861 switch (fcode)
8863 case BUILT_IN_CONSTANT_P:
8865 tree val = fold_builtin_constant_p (arg0);
8867 /* Gimplification will pull the CALL_EXPR for the builtin out of
8868 an if condition. When not optimizing, we'll not CSE it back.
8869 To avoid link error types of regressions, return false now. */
8870 if (!val && !optimize)
8871 val = integer_zero_node;
8873 return val;
8876 case BUILT_IN_CLASSIFY_TYPE:
8877 return fold_builtin_classify_type (arg0);
8879 case BUILT_IN_STRLEN:
8880 return fold_builtin_strlen (loc, type, arg0);
8882 CASE_FLT_FN (BUILT_IN_FABS):
8883 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8884 case BUILT_IN_FABSD32:
8885 case BUILT_IN_FABSD64:
8886 case BUILT_IN_FABSD128:
8887 return fold_builtin_fabs (loc, arg0, type);
8889 case BUILT_IN_ABS:
8890 case BUILT_IN_LABS:
8891 case BUILT_IN_LLABS:
8892 case BUILT_IN_IMAXABS:
8893 return fold_builtin_abs (loc, arg0, type);
8895 CASE_FLT_FN (BUILT_IN_CONJ):
8896 if (validate_arg (arg0, COMPLEX_TYPE)
8897 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8898 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8899 break;
8901 CASE_FLT_FN (BUILT_IN_CREAL):
8902 if (validate_arg (arg0, COMPLEX_TYPE)
8903 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8904 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8905 break;
8907 CASE_FLT_FN (BUILT_IN_CIMAG):
8908 if (validate_arg (arg0, COMPLEX_TYPE)
8909 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8910 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8911 break;
8913 CASE_FLT_FN (BUILT_IN_CARG):
8914 return fold_builtin_carg (loc, arg0, type);
8916 case BUILT_IN_ISASCII:
8917 return fold_builtin_isascii (loc, arg0);
8919 case BUILT_IN_TOASCII:
8920 return fold_builtin_toascii (loc, arg0);
8922 case BUILT_IN_ISDIGIT:
8923 return fold_builtin_isdigit (loc, arg0);
8925 CASE_FLT_FN (BUILT_IN_FINITE):
8926 case BUILT_IN_FINITED32:
8927 case BUILT_IN_FINITED64:
8928 case BUILT_IN_FINITED128:
8929 case BUILT_IN_ISFINITE:
8931 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
8932 if (ret)
8933 return ret;
8934 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8937 CASE_FLT_FN (BUILT_IN_ISINF):
8938 case BUILT_IN_ISINFD32:
8939 case BUILT_IN_ISINFD64:
8940 case BUILT_IN_ISINFD128:
8942 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
8943 if (ret)
8944 return ret;
8945 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8948 case BUILT_IN_ISNORMAL:
8949 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8951 case BUILT_IN_ISINF_SIGN:
8952 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
8954 CASE_FLT_FN (BUILT_IN_ISNAN):
8955 case BUILT_IN_ISNAND32:
8956 case BUILT_IN_ISNAND64:
8957 case BUILT_IN_ISNAND128:
8958 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
8960 case BUILT_IN_FREE:
8961 if (integer_zerop (arg0))
8962 return build_empty_stmt (loc);
8963 break;
8965 default:
8966 break;
8969 return NULL_TREE;
8973 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8974 This function returns NULL_TREE if no simplification was possible. */
8976 static tree
8977 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
8979 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8980 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8982 if (TREE_CODE (arg0) == ERROR_MARK
8983 || TREE_CODE (arg1) == ERROR_MARK)
8984 return NULL_TREE;
8986 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
8987 return ret;
8989 switch (fcode)
8991 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
8992 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
8993 if (validate_arg (arg0, REAL_TYPE)
8994 && validate_arg (arg1, POINTER_TYPE))
8995 return do_mpfr_lgamma_r (arg0, arg1, type);
8996 break;
8998 CASE_FLT_FN (BUILT_IN_FREXP):
8999 return fold_builtin_frexp (loc, arg0, arg1, type);
9001 CASE_FLT_FN (BUILT_IN_MODF):
9002 return fold_builtin_modf (loc, arg0, arg1, type);
9004 case BUILT_IN_STRSPN:
9005 return fold_builtin_strspn (loc, arg0, arg1);
9007 case BUILT_IN_STRCSPN:
9008 return fold_builtin_strcspn (loc, arg0, arg1);
9010 case BUILT_IN_STRPBRK:
9011 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9013 case BUILT_IN_EXPECT:
9014 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
9016 case BUILT_IN_ISGREATER:
9017 return fold_builtin_unordered_cmp (loc, fndecl,
9018 arg0, arg1, UNLE_EXPR, LE_EXPR);
9019 case BUILT_IN_ISGREATEREQUAL:
9020 return fold_builtin_unordered_cmp (loc, fndecl,
9021 arg0, arg1, UNLT_EXPR, LT_EXPR);
9022 case BUILT_IN_ISLESS:
9023 return fold_builtin_unordered_cmp (loc, fndecl,
9024 arg0, arg1, UNGE_EXPR, GE_EXPR);
9025 case BUILT_IN_ISLESSEQUAL:
9026 return fold_builtin_unordered_cmp (loc, fndecl,
9027 arg0, arg1, UNGT_EXPR, GT_EXPR);
9028 case BUILT_IN_ISLESSGREATER:
9029 return fold_builtin_unordered_cmp (loc, fndecl,
9030 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9031 case BUILT_IN_ISUNORDERED:
9032 return fold_builtin_unordered_cmp (loc, fndecl,
9033 arg0, arg1, UNORDERED_EXPR,
9034 NOP_EXPR);
9036 /* We do the folding for va_start in the expander. */
9037 case BUILT_IN_VA_START:
9038 break;
9040 case BUILT_IN_OBJECT_SIZE:
9041 return fold_builtin_object_size (arg0, arg1);
9043 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9044 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9046 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9047 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9049 default:
9050 break;
9052 return NULL_TREE;
9055 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9056 and ARG2.
9057 This function returns NULL_TREE if no simplification was possible. */
9059 static tree
9060 fold_builtin_3 (location_t loc, tree fndecl,
9061 tree arg0, tree arg1, tree arg2)
9063 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9064 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9066 if (TREE_CODE (arg0) == ERROR_MARK
9067 || TREE_CODE (arg1) == ERROR_MARK
9068 || TREE_CODE (arg2) == ERROR_MARK)
9069 return NULL_TREE;
9071 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9072 arg0, arg1, arg2))
9073 return ret;
9075 switch (fcode)
9078 CASE_FLT_FN (BUILT_IN_SINCOS):
9079 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9081 CASE_FLT_FN (BUILT_IN_REMQUO):
9082 if (validate_arg (arg0, REAL_TYPE)
9083 && validate_arg (arg1, REAL_TYPE)
9084 && validate_arg (arg2, POINTER_TYPE))
9085 return do_mpfr_remquo (arg0, arg1, arg2);
9086 break;
9088 case BUILT_IN_MEMCMP:
9089 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9091 case BUILT_IN_EXPECT:
9092 return fold_builtin_expect (loc, arg0, arg1, arg2);
9094 case BUILT_IN_ADD_OVERFLOW:
9095 case BUILT_IN_SUB_OVERFLOW:
9096 case BUILT_IN_MUL_OVERFLOW:
9097 case BUILT_IN_ADD_OVERFLOW_P:
9098 case BUILT_IN_SUB_OVERFLOW_P:
9099 case BUILT_IN_MUL_OVERFLOW_P:
9100 case BUILT_IN_SADD_OVERFLOW:
9101 case BUILT_IN_SADDL_OVERFLOW:
9102 case BUILT_IN_SADDLL_OVERFLOW:
9103 case BUILT_IN_SSUB_OVERFLOW:
9104 case BUILT_IN_SSUBL_OVERFLOW:
9105 case BUILT_IN_SSUBLL_OVERFLOW:
9106 case BUILT_IN_SMUL_OVERFLOW:
9107 case BUILT_IN_SMULL_OVERFLOW:
9108 case BUILT_IN_SMULLL_OVERFLOW:
9109 case BUILT_IN_UADD_OVERFLOW:
9110 case BUILT_IN_UADDL_OVERFLOW:
9111 case BUILT_IN_UADDLL_OVERFLOW:
9112 case BUILT_IN_USUB_OVERFLOW:
9113 case BUILT_IN_USUBL_OVERFLOW:
9114 case BUILT_IN_USUBLL_OVERFLOW:
9115 case BUILT_IN_UMUL_OVERFLOW:
9116 case BUILT_IN_UMULL_OVERFLOW:
9117 case BUILT_IN_UMULLL_OVERFLOW:
9118 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9120 default:
9121 break;
9123 return NULL_TREE;
9126 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9127 arguments. IGNORE is true if the result of the
9128 function call is ignored. This function returns NULL_TREE if no
9129 simplification was possible. */
9131 tree
9132 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9134 tree ret = NULL_TREE;
9136 switch (nargs)
9138 case 0:
9139 ret = fold_builtin_0 (loc, fndecl);
9140 break;
9141 case 1:
9142 ret = fold_builtin_1 (loc, fndecl, args[0]);
9143 break;
9144 case 2:
9145 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9146 break;
9147 case 3:
9148 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9149 break;
9150 default:
9151 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9152 break;
9154 if (ret)
9156 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9157 SET_EXPR_LOCATION (ret, loc);
9158 TREE_NO_WARNING (ret) = 1;
9159 return ret;
9161 return NULL_TREE;
9164 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9165 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9166 of arguments in ARGS to be omitted. OLDNARGS is the number of
9167 elements in ARGS. */
9169 static tree
9170 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9171 int skip, tree fndecl, int n, va_list newargs)
9173 int nargs = oldnargs - skip + n;
9174 tree *buffer;
9176 if (n > 0)
9178 int i, j;
9180 buffer = XALLOCAVEC (tree, nargs);
9181 for (i = 0; i < n; i++)
9182 buffer[i] = va_arg (newargs, tree);
9183 for (j = skip; j < oldnargs; j++, i++)
9184 buffer[i] = args[j];
9186 else
9187 buffer = args + skip;
9189 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9192 /* Return true if FNDECL shouldn't be folded right now.
9193 If a built-in function has an inline attribute always_inline
9194 wrapper, defer folding it after always_inline functions have
9195 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9196 might not be performed. */
9198 bool
9199 avoid_folding_inline_builtin (tree fndecl)
9201 return (DECL_DECLARED_INLINE_P (fndecl)
9202 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9203 && cfun
9204 && !cfun->always_inline_functions_inlined
9205 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9208 /* A wrapper function for builtin folding that prevents warnings for
9209 "statement without effect" and the like, caused by removing the
9210 call node earlier than the warning is generated. */
9212 tree
9213 fold_call_expr (location_t loc, tree exp, bool ignore)
9215 tree ret = NULL_TREE;
9216 tree fndecl = get_callee_fndecl (exp);
9217 if (fndecl
9218 && TREE_CODE (fndecl) == FUNCTION_DECL
9219 && DECL_BUILT_IN (fndecl)
9220 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9221 yet. Defer folding until we see all the arguments
9222 (after inlining). */
9223 && !CALL_EXPR_VA_ARG_PACK (exp))
9225 int nargs = call_expr_nargs (exp);
9227 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9228 instead last argument is __builtin_va_arg_pack (). Defer folding
9229 even in that case, until arguments are finalized. */
9230 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9232 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9233 if (fndecl2
9234 && TREE_CODE (fndecl2) == FUNCTION_DECL
9235 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9236 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9237 return NULL_TREE;
9240 if (avoid_folding_inline_builtin (fndecl))
9241 return NULL_TREE;
9243 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9244 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9245 CALL_EXPR_ARGP (exp), ignore);
9246 else
9248 tree *args = CALL_EXPR_ARGP (exp);
9249 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9250 if (ret)
9251 return ret;
9254 return NULL_TREE;
9257 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9258 N arguments are passed in the array ARGARRAY. Return a folded
9259 expression or NULL_TREE if no simplification was possible. */
9261 tree
9262 fold_builtin_call_array (location_t loc, tree,
9263 tree fn,
9264 int n,
9265 tree *argarray)
9267 if (TREE_CODE (fn) != ADDR_EXPR)
9268 return NULL_TREE;
9270 tree fndecl = TREE_OPERAND (fn, 0);
9271 if (TREE_CODE (fndecl) == FUNCTION_DECL
9272 && DECL_BUILT_IN (fndecl))
9274 /* If last argument is __builtin_va_arg_pack (), arguments to this
9275 function are not finalized yet. Defer folding until they are. */
9276 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9278 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9279 if (fndecl2
9280 && TREE_CODE (fndecl2) == FUNCTION_DECL
9281 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9282 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9283 return NULL_TREE;
9285 if (avoid_folding_inline_builtin (fndecl))
9286 return NULL_TREE;
9287 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9288 return targetm.fold_builtin (fndecl, n, argarray, false);
9289 else
9290 return fold_builtin_n (loc, fndecl, argarray, n, false);
9293 return NULL_TREE;
9296 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9297 along with N new arguments specified as the "..." parameters. SKIP
9298 is the number of arguments in EXP to be omitted. This function is used
9299 to do varargs-to-varargs transformations. */
9301 static tree
9302 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9304 va_list ap;
9305 tree t;
9307 va_start (ap, n);
9308 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9309 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9310 va_end (ap);
9312 return t;
9315 /* Validate a single argument ARG against a tree code CODE representing
9316 a type. Return true when argument is valid. */
9318 static bool
9319 validate_arg (const_tree arg, enum tree_code code)
9321 if (!arg)
9322 return false;
9323 else if (code == POINTER_TYPE)
9324 return POINTER_TYPE_P (TREE_TYPE (arg));
9325 else if (code == INTEGER_TYPE)
9326 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9327 return code == TREE_CODE (TREE_TYPE (arg));
9330 /* This function validates the types of a function call argument list
9331 against a specified list of tree_codes. If the last specifier is a 0,
9332 that represents an ellipses, otherwise the last specifier must be a
9333 VOID_TYPE.
9335 This is the GIMPLE version of validate_arglist. Eventually we want to
9336 completely convert builtins.c to work from GIMPLEs and the tree based
9337 validate_arglist will then be removed. */
9339 bool
9340 validate_gimple_arglist (const gcall *call, ...)
9342 enum tree_code code;
9343 bool res = 0;
9344 va_list ap;
9345 const_tree arg;
9346 size_t i;
9348 va_start (ap, call);
9349 i = 0;
9353 code = (enum tree_code) va_arg (ap, int);
9354 switch (code)
9356 case 0:
9357 /* This signifies an ellipses, any further arguments are all ok. */
9358 res = true;
9359 goto end;
9360 case VOID_TYPE:
9361 /* This signifies an endlink, if no arguments remain, return
9362 true, otherwise return false. */
9363 res = (i == gimple_call_num_args (call));
9364 goto end;
9365 default:
9366 /* If no parameters remain or the parameter's code does not
9367 match the specified code, return false. Otherwise continue
9368 checking any remaining arguments. */
9369 arg = gimple_call_arg (call, i++);
9370 if (!validate_arg (arg, code))
9371 goto end;
9372 break;
9375 while (1);
9377 /* We need gotos here since we can only have one VA_CLOSE in a
9378 function. */
9379 end: ;
9380 va_end (ap);
9382 return res;
9385 /* Default target-specific builtin expander that does nothing. */
9388 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9389 rtx target ATTRIBUTE_UNUSED,
9390 rtx subtarget ATTRIBUTE_UNUSED,
9391 machine_mode mode ATTRIBUTE_UNUSED,
9392 int ignore ATTRIBUTE_UNUSED)
9394 return NULL_RTX;
9397 /* Returns true is EXP represents data that would potentially reside
9398 in a readonly section. */
9400 bool
9401 readonly_data_expr (tree exp)
9403 STRIP_NOPS (exp);
9405 if (TREE_CODE (exp) != ADDR_EXPR)
9406 return false;
9408 exp = get_base_address (TREE_OPERAND (exp, 0));
9409 if (!exp)
9410 return false;
9412 /* Make sure we call decl_readonly_section only for trees it
9413 can handle (since it returns true for everything it doesn't
9414 understand). */
9415 if (TREE_CODE (exp) == STRING_CST
9416 || TREE_CODE (exp) == CONSTRUCTOR
9417 || (VAR_P (exp) && TREE_STATIC (exp)))
9418 return decl_readonly_section (exp, 0);
9419 else
9420 return false;
9423 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9424 to the call, and TYPE is its return type.
9426 Return NULL_TREE if no simplification was possible, otherwise return the
9427 simplified form of the call as a tree.
9429 The simplified form may be a constant or other expression which
9430 computes the same value, but in a more efficient manner (including
9431 calls to other builtin functions).
9433 The call may contain arguments which need to be evaluated, but
9434 which are not useful to determine the result of the call. In
9435 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9436 COMPOUND_EXPR will be an argument which must be evaluated.
9437 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9438 COMPOUND_EXPR in the chain will contain the tree for the simplified
9439 form of the builtin function call. */
9441 static tree
9442 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9444 if (!validate_arg (s1, POINTER_TYPE)
9445 || !validate_arg (s2, POINTER_TYPE))
9446 return NULL_TREE;
9447 else
9449 tree fn;
9450 const char *p1, *p2;
9452 p2 = c_getstr (s2);
9453 if (p2 == NULL)
9454 return NULL_TREE;
9456 p1 = c_getstr (s1);
9457 if (p1 != NULL)
9459 const char *r = strpbrk (p1, p2);
9460 tree tem;
9462 if (r == NULL)
9463 return build_int_cst (TREE_TYPE (s1), 0);
9465 /* Return an offset into the constant string argument. */
9466 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9467 return fold_convert_loc (loc, type, tem);
9470 if (p2[0] == '\0')
9471 /* strpbrk(x, "") == NULL.
9472 Evaluate and ignore s1 in case it had side-effects. */
9473 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
9475 if (p2[1] != '\0')
9476 return NULL_TREE; /* Really call strpbrk. */
9478 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9479 if (!fn)
9480 return NULL_TREE;
9482 /* New argument list transforming strpbrk(s1, s2) to
9483 strchr(s1, s2[0]). */
9484 return build_call_expr_loc (loc, fn, 2, s1,
9485 build_int_cst (integer_type_node, p2[0]));
9489 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9490 to the call.
9492 Return NULL_TREE if no simplification was possible, otherwise return the
9493 simplified form of the call as a tree.
9495 The simplified form may be a constant or other expression which
9496 computes the same value, but in a more efficient manner (including
9497 calls to other builtin functions).
9499 The call may contain arguments which need to be evaluated, but
9500 which are not useful to determine the result of the call. In
9501 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9502 COMPOUND_EXPR will be an argument which must be evaluated.
9503 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9504 COMPOUND_EXPR in the chain will contain the tree for the simplified
9505 form of the builtin function call. */
9507 static tree
9508 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9510 if (!validate_arg (s1, POINTER_TYPE)
9511 || !validate_arg (s2, POINTER_TYPE))
9512 return NULL_TREE;
9513 else
9515 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9517 /* If either argument is "", return NULL_TREE. */
9518 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9519 /* Evaluate and ignore both arguments in case either one has
9520 side-effects. */
9521 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9522 s1, s2);
9523 return NULL_TREE;
9527 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9528 to the call.
9530 Return NULL_TREE if no simplification was possible, otherwise return the
9531 simplified form of the call as a tree.
9533 The simplified form may be a constant or other expression which
9534 computes the same value, but in a more efficient manner (including
9535 calls to other builtin functions).
9537 The call may contain arguments which need to be evaluated, but
9538 which are not useful to determine the result of the call. In
9539 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9540 COMPOUND_EXPR will be an argument which must be evaluated.
9541 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9542 COMPOUND_EXPR in the chain will contain the tree for the simplified
9543 form of the builtin function call. */
9545 static tree
9546 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9548 if (!validate_arg (s1, POINTER_TYPE)
9549 || !validate_arg (s2, POINTER_TYPE))
9550 return NULL_TREE;
9551 else
9553 /* If the first argument is "", return NULL_TREE. */
9554 const char *p1 = c_getstr (s1);
9555 if (p1 && *p1 == '\0')
9557 /* Evaluate and ignore argument s2 in case it has
9558 side-effects. */
9559 return omit_one_operand_loc (loc, size_type_node,
9560 size_zero_node, s2);
9563 /* If the second argument is "", return __builtin_strlen(s1). */
9564 const char *p2 = c_getstr (s2);
9565 if (p2 && *p2 == '\0')
9567 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9569 /* If the replacement _DECL isn't initialized, don't do the
9570 transformation. */
9571 if (!fn)
9572 return NULL_TREE;
9574 return build_call_expr_loc (loc, fn, 1, s1);
9576 return NULL_TREE;
9580 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9581 produced. False otherwise. This is done so that we don't output the error
9582 or warning twice or three times. */
9584 bool
9585 fold_builtin_next_arg (tree exp, bool va_start_p)
9587 tree fntype = TREE_TYPE (current_function_decl);
9588 int nargs = call_expr_nargs (exp);
9589 tree arg;
9590 /* There is good chance the current input_location points inside the
9591 definition of the va_start macro (perhaps on the token for
9592 builtin) in a system header, so warnings will not be emitted.
9593 Use the location in real source code. */
9594 source_location current_location =
9595 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9596 NULL);
9598 if (!stdarg_p (fntype))
9600 error ("%<va_start%> used in function with fixed args");
9601 return true;
9604 if (va_start_p)
9606 if (va_start_p && (nargs != 2))
9608 error ("wrong number of arguments to function %<va_start%>");
9609 return true;
9611 arg = CALL_EXPR_ARG (exp, 1);
9613 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9614 when we checked the arguments and if needed issued a warning. */
9615 else
9617 if (nargs == 0)
9619 /* Evidently an out of date version of <stdarg.h>; can't validate
9620 va_start's second argument, but can still work as intended. */
9621 warning_at (current_location,
9622 OPT_Wvarargs,
9623 "%<__builtin_next_arg%> called without an argument");
9624 return true;
9626 else if (nargs > 1)
9628 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9629 return true;
9631 arg = CALL_EXPR_ARG (exp, 0);
9634 if (TREE_CODE (arg) == SSA_NAME)
9635 arg = SSA_NAME_VAR (arg);
9637 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9638 or __builtin_next_arg (0) the first time we see it, after checking
9639 the arguments and if needed issuing a warning. */
9640 if (!integer_zerop (arg))
9642 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9644 /* Strip off all nops for the sake of the comparison. This
9645 is not quite the same as STRIP_NOPS. It does more.
9646 We must also strip off INDIRECT_EXPR for C++ reference
9647 parameters. */
9648 while (CONVERT_EXPR_P (arg)
9649 || TREE_CODE (arg) == INDIRECT_REF)
9650 arg = TREE_OPERAND (arg, 0);
9651 if (arg != last_parm)
9653 /* FIXME: Sometimes with the tree optimizers we can get the
9654 not the last argument even though the user used the last
9655 argument. We just warn and set the arg to be the last
9656 argument so that we will get wrong-code because of
9657 it. */
9658 warning_at (current_location,
9659 OPT_Wvarargs,
9660 "second parameter of %<va_start%> not last named argument");
9663 /* Undefined by C99 7.15.1.4p4 (va_start):
9664 "If the parameter parmN is declared with the register storage
9665 class, with a function or array type, or with a type that is
9666 not compatible with the type that results after application of
9667 the default argument promotions, the behavior is undefined."
9669 else if (DECL_REGISTER (arg))
9671 warning_at (current_location,
9672 OPT_Wvarargs,
9673 "undefined behavior when second parameter of "
9674 "%<va_start%> is declared with %<register%> storage");
9677 /* We want to verify the second parameter just once before the tree
9678 optimizers are run and then avoid keeping it in the tree,
9679 as otherwise we could warn even for correct code like:
9680 void foo (int i, ...)
9681 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9682 if (va_start_p)
9683 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9684 else
9685 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9687 return false;
9691 /* Expand a call EXP to __builtin_object_size. */
9693 static rtx
9694 expand_builtin_object_size (tree exp)
9696 tree ost;
9697 int object_size_type;
9698 tree fndecl = get_callee_fndecl (exp);
9700 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9702 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
9703 exp, fndecl);
9704 expand_builtin_trap ();
9705 return const0_rtx;
9708 ost = CALL_EXPR_ARG (exp, 1);
9709 STRIP_NOPS (ost);
9711 if (TREE_CODE (ost) != INTEGER_CST
9712 || tree_int_cst_sgn (ost) < 0
9713 || compare_tree_int (ost, 3) > 0)
9715 error ("%Klast argument of %qD is not integer constant between 0 and 3",
9716 exp, fndecl);
9717 expand_builtin_trap ();
9718 return const0_rtx;
9721 object_size_type = tree_to_shwi (ost);
9723 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9726 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9727 FCODE is the BUILT_IN_* to use.
9728 Return NULL_RTX if we failed; the caller should emit a normal call,
9729 otherwise try to get the result in TARGET, if convenient (and in
9730 mode MODE if that's convenient). */
9732 static rtx
9733 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9734 enum built_in_function fcode)
9736 if (!validate_arglist (exp,
9737 POINTER_TYPE,
9738 fcode == BUILT_IN_MEMSET_CHK
9739 ? INTEGER_TYPE : POINTER_TYPE,
9740 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9741 return NULL_RTX;
9743 tree dest = CALL_EXPR_ARG (exp, 0);
9744 tree src = CALL_EXPR_ARG (exp, 1);
9745 tree len = CALL_EXPR_ARG (exp, 2);
9746 tree size = CALL_EXPR_ARG (exp, 3);
9748 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
9749 /*str=*/NULL_TREE, size);
9751 if (!tree_fits_uhwi_p (size))
9752 return NULL_RTX;
9754 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
9756 /* Avoid transforming the checking call to an ordinary one when
9757 an overflow has been detected or when the call couldn't be
9758 validated because the size is not constant. */
9759 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
9760 return NULL_RTX;
9762 tree fn = NULL_TREE;
9763 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9764 mem{cpy,pcpy,move,set} is available. */
9765 switch (fcode)
9767 case BUILT_IN_MEMCPY_CHK:
9768 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
9769 break;
9770 case BUILT_IN_MEMPCPY_CHK:
9771 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
9772 break;
9773 case BUILT_IN_MEMMOVE_CHK:
9774 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
9775 break;
9776 case BUILT_IN_MEMSET_CHK:
9777 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
9778 break;
9779 default:
9780 break;
9783 if (! fn)
9784 return NULL_RTX;
9786 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
9787 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9788 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9789 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9791 else if (fcode == BUILT_IN_MEMSET_CHK)
9792 return NULL_RTX;
9793 else
9795 unsigned int dest_align = get_pointer_alignment (dest);
9797 /* If DEST is not a pointer type, call the normal function. */
9798 if (dest_align == 0)
9799 return NULL_RTX;
9801 /* If SRC and DEST are the same (and not volatile), do nothing. */
9802 if (operand_equal_p (src, dest, 0))
9804 tree expr;
9806 if (fcode != BUILT_IN_MEMPCPY_CHK)
9808 /* Evaluate and ignore LEN in case it has side-effects. */
9809 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9810 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9813 expr = fold_build_pointer_plus (dest, len);
9814 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9817 /* __memmove_chk special case. */
9818 if (fcode == BUILT_IN_MEMMOVE_CHK)
9820 unsigned int src_align = get_pointer_alignment (src);
9822 if (src_align == 0)
9823 return NULL_RTX;
9825 /* If src is categorized for a readonly section we can use
9826 normal __memcpy_chk. */
9827 if (readonly_data_expr (src))
9829 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
9830 if (!fn)
9831 return NULL_RTX;
9832 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9833 dest, src, len, size);
9834 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9835 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9836 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9839 return NULL_RTX;
9843 /* Emit warning if a buffer overflow is detected at compile time. */
9845 static void
9846 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9848 /* The source string. */
9849 tree srcstr = NULL_TREE;
9850 /* The size of the destination object. */
9851 tree objsize = NULL_TREE;
9852 /* The string that is being concatenated with (as in __strcat_chk)
9853 or null if it isn't. */
9854 tree catstr = NULL_TREE;
9855 /* The maximum length of the source sequence in a bounded operation
9856 (such as __strncat_chk) or null if the operation isn't bounded
9857 (such as __strcat_chk). */
9858 tree maxread = NULL_TREE;
9859 /* The exact size of the access (such as in __strncpy_chk). */
9860 tree size = NULL_TREE;
9862 switch (fcode)
9864 case BUILT_IN_STRCPY_CHK:
9865 case BUILT_IN_STPCPY_CHK:
9866 srcstr = CALL_EXPR_ARG (exp, 1);
9867 objsize = CALL_EXPR_ARG (exp, 2);
9868 break;
9870 case BUILT_IN_STRCAT_CHK:
9871 /* For __strcat_chk the warning will be emitted only if overflowing
9872 by at least strlen (dest) + 1 bytes. */
9873 catstr = CALL_EXPR_ARG (exp, 0);
9874 srcstr = CALL_EXPR_ARG (exp, 1);
9875 objsize = CALL_EXPR_ARG (exp, 2);
9876 break;
9878 case BUILT_IN_STRNCAT_CHK:
9879 catstr = CALL_EXPR_ARG (exp, 0);
9880 srcstr = CALL_EXPR_ARG (exp, 1);
9881 maxread = CALL_EXPR_ARG (exp, 2);
9882 objsize = CALL_EXPR_ARG (exp, 3);
9883 break;
9885 case BUILT_IN_STRNCPY_CHK:
9886 case BUILT_IN_STPNCPY_CHK:
9887 srcstr = CALL_EXPR_ARG (exp, 1);
9888 size = CALL_EXPR_ARG (exp, 2);
9889 objsize = CALL_EXPR_ARG (exp, 3);
9890 break;
9892 case BUILT_IN_SNPRINTF_CHK:
9893 case BUILT_IN_VSNPRINTF_CHK:
9894 maxread = CALL_EXPR_ARG (exp, 1);
9895 objsize = CALL_EXPR_ARG (exp, 3);
9896 break;
9897 default:
9898 gcc_unreachable ();
9901 if (catstr && maxread)
9903 /* Check __strncat_chk. There is no way to determine the length
9904 of the string to which the source string is being appended so
9905 just warn when the length of the source string is not known. */
9906 check_strncat_sizes (exp, objsize);
9907 return;
9910 /* The destination argument is the first one for all built-ins above. */
9911 tree dst = CALL_EXPR_ARG (exp, 0);
9913 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
9916 /* Emit warning if a buffer overflow is detected at compile time
9917 in __sprintf_chk/__vsprintf_chk calls. */
9919 static void
9920 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
9922 tree size, len, fmt;
9923 const char *fmt_str;
9924 int nargs = call_expr_nargs (exp);
9926 /* Verify the required arguments in the original call. */
9928 if (nargs < 4)
9929 return;
9930 size = CALL_EXPR_ARG (exp, 2);
9931 fmt = CALL_EXPR_ARG (exp, 3);
9933 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9934 return;
9936 /* Check whether the format is a literal string constant. */
9937 fmt_str = c_getstr (fmt);
9938 if (fmt_str == NULL)
9939 return;
9941 if (!init_target_chars ())
9942 return;
9944 /* If the format doesn't contain % args or %%, we know its size. */
9945 if (strchr (fmt_str, target_percent) == 0)
9946 len = build_int_cstu (size_type_node, strlen (fmt_str));
9947 /* If the format is "%s" and first ... argument is a string literal,
9948 we know it too. */
9949 else if (fcode == BUILT_IN_SPRINTF_CHK
9950 && strcmp (fmt_str, target_percent_s) == 0)
9952 tree arg;
9954 if (nargs < 5)
9955 return;
9956 arg = CALL_EXPR_ARG (exp, 4);
9957 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
9958 return;
9960 len = c_strlen (arg, 1);
9961 if (!len || ! tree_fits_uhwi_p (len))
9962 return;
9964 else
9965 return;
9967 /* Add one for the terminating nul. */
9968 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
9970 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
9971 /*maxread=*/NULL_TREE, len, size);
9974 /* Emit warning if a free is called with address of a variable. */
9976 static void
9977 maybe_emit_free_warning (tree exp)
9979 tree arg = CALL_EXPR_ARG (exp, 0);
9981 STRIP_NOPS (arg);
9982 if (TREE_CODE (arg) != ADDR_EXPR)
9983 return;
9985 arg = get_base_address (TREE_OPERAND (arg, 0));
9986 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
9987 return;
9989 if (SSA_VAR_P (arg))
9990 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9991 "%Kattempt to free a non-heap object %qD", exp, arg);
9992 else
9993 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9994 "%Kattempt to free a non-heap object", exp);
9997 /* Fold a call to __builtin_object_size with arguments PTR and OST,
9998 if possible. */
10000 static tree
10001 fold_builtin_object_size (tree ptr, tree ost)
10003 unsigned HOST_WIDE_INT bytes;
10004 int object_size_type;
10006 if (!validate_arg (ptr, POINTER_TYPE)
10007 || !validate_arg (ost, INTEGER_TYPE))
10008 return NULL_TREE;
10010 STRIP_NOPS (ost);
10012 if (TREE_CODE (ost) != INTEGER_CST
10013 || tree_int_cst_sgn (ost) < 0
10014 || compare_tree_int (ost, 3) > 0)
10015 return NULL_TREE;
10017 object_size_type = tree_to_shwi (ost);
10019 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10020 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10021 and (size_t) 0 for types 2 and 3. */
10022 if (TREE_SIDE_EFFECTS (ptr))
10023 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10025 if (TREE_CODE (ptr) == ADDR_EXPR)
10027 compute_builtin_object_size (ptr, object_size_type, &bytes);
10028 if (wi::fits_to_tree_p (bytes, size_type_node))
10029 return build_int_cstu (size_type_node, bytes);
10031 else if (TREE_CODE (ptr) == SSA_NAME)
10033 /* If object size is not known yet, delay folding until
10034 later. Maybe subsequent passes will help determining
10035 it. */
10036 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10037 && wi::fits_to_tree_p (bytes, size_type_node))
10038 return build_int_cstu (size_type_node, bytes);
10041 return NULL_TREE;
10044 /* Builtins with folding operations that operate on "..." arguments
10045 need special handling; we need to store the arguments in a convenient
10046 data structure before attempting any folding. Fortunately there are
10047 only a few builtins that fall into this category. FNDECL is the
10048 function, EXP is the CALL_EXPR for the call. */
10050 static tree
10051 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10053 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10054 tree ret = NULL_TREE;
10056 switch (fcode)
10058 case BUILT_IN_FPCLASSIFY:
10059 ret = fold_builtin_fpclassify (loc, args, nargs);
10060 break;
10062 default:
10063 break;
10065 if (ret)
10067 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10068 SET_EXPR_LOCATION (ret, loc);
10069 TREE_NO_WARNING (ret) = 1;
10070 return ret;
10072 return NULL_TREE;
10075 /* Initialize format string characters in the target charset. */
10077 bool
10078 init_target_chars (void)
10080 static bool init;
10081 if (!init)
10083 target_newline = lang_hooks.to_target_charset ('\n');
10084 target_percent = lang_hooks.to_target_charset ('%');
10085 target_c = lang_hooks.to_target_charset ('c');
10086 target_s = lang_hooks.to_target_charset ('s');
10087 if (target_newline == 0 || target_percent == 0 || target_c == 0
10088 || target_s == 0)
10089 return false;
10091 target_percent_c[0] = target_percent;
10092 target_percent_c[1] = target_c;
10093 target_percent_c[2] = '\0';
10095 target_percent_s[0] = target_percent;
10096 target_percent_s[1] = target_s;
10097 target_percent_s[2] = '\0';
10099 target_percent_s_newline[0] = target_percent;
10100 target_percent_s_newline[1] = target_s;
10101 target_percent_s_newline[2] = target_newline;
10102 target_percent_s_newline[3] = '\0';
10104 init = true;
10106 return true;
10109 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10110 and no overflow/underflow occurred. INEXACT is true if M was not
10111 exactly calculated. TYPE is the tree type for the result. This
10112 function assumes that you cleared the MPFR flags and then
10113 calculated M to see if anything subsequently set a flag prior to
10114 entering this function. Return NULL_TREE if any checks fail. */
10116 static tree
10117 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10119 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10120 overflow/underflow occurred. If -frounding-math, proceed iff the
10121 result of calling FUNC was exact. */
10122 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10123 && (!flag_rounding_math || !inexact))
10125 REAL_VALUE_TYPE rr;
10127 real_from_mpfr (&rr, m, type, GMP_RNDN);
10128 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10129 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10130 but the mpft_t is not, then we underflowed in the
10131 conversion. */
10132 if (real_isfinite (&rr)
10133 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10135 REAL_VALUE_TYPE rmode;
10137 real_convert (&rmode, TYPE_MODE (type), &rr);
10138 /* Proceed iff the specified mode can hold the value. */
10139 if (real_identical (&rmode, &rr))
10140 return build_real (type, rmode);
10143 return NULL_TREE;
10146 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10147 number and no overflow/underflow occurred. INEXACT is true if M
10148 was not exactly calculated. TYPE is the tree type for the result.
10149 This function assumes that you cleared the MPFR flags and then
10150 calculated M to see if anything subsequently set a flag prior to
10151 entering this function. Return NULL_TREE if any checks fail, if
10152 FORCE_CONVERT is true, then bypass the checks. */
10154 static tree
10155 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10157 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10158 overflow/underflow occurred. If -frounding-math, proceed iff the
10159 result of calling FUNC was exact. */
10160 if (force_convert
10161 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10162 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10163 && (!flag_rounding_math || !inexact)))
10165 REAL_VALUE_TYPE re, im;
10167 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10168 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10169 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10170 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10171 but the mpft_t is not, then we underflowed in the
10172 conversion. */
10173 if (force_convert
10174 || (real_isfinite (&re) && real_isfinite (&im)
10175 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10176 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10178 REAL_VALUE_TYPE re_mode, im_mode;
10180 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10181 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10182 /* Proceed iff the specified mode can hold the value. */
10183 if (force_convert
10184 || (real_identical (&re_mode, &re)
10185 && real_identical (&im_mode, &im)))
10186 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10187 build_real (TREE_TYPE (type), im_mode));
10190 return NULL_TREE;
10193 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10194 the pointer *(ARG_QUO) and return the result. The type is taken
10195 from the type of ARG0 and is used for setting the precision of the
10196 calculation and results. */
10198 static tree
10199 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10201 tree const type = TREE_TYPE (arg0);
10202 tree result = NULL_TREE;
10204 STRIP_NOPS (arg0);
10205 STRIP_NOPS (arg1);
10207 /* To proceed, MPFR must exactly represent the target floating point
10208 format, which only happens when the target base equals two. */
10209 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10210 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10211 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10213 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10214 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10216 if (real_isfinite (ra0) && real_isfinite (ra1))
10218 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10219 const int prec = fmt->p;
10220 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10221 tree result_rem;
10222 long integer_quo;
10223 mpfr_t m0, m1;
10225 mpfr_inits2 (prec, m0, m1, NULL);
10226 mpfr_from_real (m0, ra0, GMP_RNDN);
10227 mpfr_from_real (m1, ra1, GMP_RNDN);
10228 mpfr_clear_flags ();
10229 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10230 /* Remquo is independent of the rounding mode, so pass
10231 inexact=0 to do_mpfr_ckconv(). */
10232 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10233 mpfr_clears (m0, m1, NULL);
10234 if (result_rem)
10236 /* MPFR calculates quo in the host's long so it may
10237 return more bits in quo than the target int can hold
10238 if sizeof(host long) > sizeof(target int). This can
10239 happen even for native compilers in LP64 mode. In
10240 these cases, modulo the quo value with the largest
10241 number that the target int can hold while leaving one
10242 bit for the sign. */
10243 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10244 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10246 /* Dereference the quo pointer argument. */
10247 arg_quo = build_fold_indirect_ref (arg_quo);
10248 /* Proceed iff a valid pointer type was passed in. */
10249 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10251 /* Set the value. */
10252 tree result_quo
10253 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10254 build_int_cst (TREE_TYPE (arg_quo),
10255 integer_quo));
10256 TREE_SIDE_EFFECTS (result_quo) = 1;
10257 /* Combine the quo assignment with the rem. */
10258 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10259 result_quo, result_rem));
10264 return result;
10267 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10268 resulting value as a tree with type TYPE. The mpfr precision is
10269 set to the precision of TYPE. We assume that this mpfr function
10270 returns zero if the result could be calculated exactly within the
10271 requested precision. In addition, the integer pointer represented
10272 by ARG_SG will be dereferenced and set to the appropriate signgam
10273 (-1,1) value. */
10275 static tree
10276 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10278 tree result = NULL_TREE;
10280 STRIP_NOPS (arg);
10282 /* To proceed, MPFR must exactly represent the target floating point
10283 format, which only happens when the target base equals two. Also
10284 verify ARG is a constant and that ARG_SG is an int pointer. */
10285 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10286 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10287 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10288 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10290 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10292 /* In addition to NaN and Inf, the argument cannot be zero or a
10293 negative integer. */
10294 if (real_isfinite (ra)
10295 && ra->cl != rvc_zero
10296 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10298 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10299 const int prec = fmt->p;
10300 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10301 int inexact, sg;
10302 mpfr_t m;
10303 tree result_lg;
10305 mpfr_init2 (m, prec);
10306 mpfr_from_real (m, ra, GMP_RNDN);
10307 mpfr_clear_flags ();
10308 inexact = mpfr_lgamma (m, &sg, m, rnd);
10309 result_lg = do_mpfr_ckconv (m, type, inexact);
10310 mpfr_clear (m);
10311 if (result_lg)
10313 tree result_sg;
10315 /* Dereference the arg_sg pointer argument. */
10316 arg_sg = build_fold_indirect_ref (arg_sg);
10317 /* Assign the signgam value into *arg_sg. */
10318 result_sg = fold_build2 (MODIFY_EXPR,
10319 TREE_TYPE (arg_sg), arg_sg,
10320 build_int_cst (TREE_TYPE (arg_sg), sg));
10321 TREE_SIDE_EFFECTS (result_sg) = 1;
10322 /* Combine the signgam assignment with the lgamma result. */
10323 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10324 result_sg, result_lg));
10329 return result;
10332 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10333 mpc function FUNC on it and return the resulting value as a tree
10334 with type TYPE. The mpfr precision is set to the precision of
10335 TYPE. We assume that function FUNC returns zero if the result
10336 could be calculated exactly within the requested precision. If
10337 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10338 in the arguments and/or results. */
10340 tree
10341 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10342 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10344 tree result = NULL_TREE;
10346 STRIP_NOPS (arg0);
10347 STRIP_NOPS (arg1);
10349 /* To proceed, MPFR must exactly represent the target floating point
10350 format, which only happens when the target base equals two. */
10351 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10352 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10353 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10354 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10355 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10357 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10358 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10359 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10360 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10362 if (do_nonfinite
10363 || (real_isfinite (re0) && real_isfinite (im0)
10364 && real_isfinite (re1) && real_isfinite (im1)))
10366 const struct real_format *const fmt =
10367 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10368 const int prec = fmt->p;
10369 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10370 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10371 int inexact;
10372 mpc_t m0, m1;
10374 mpc_init2 (m0, prec);
10375 mpc_init2 (m1, prec);
10376 mpfr_from_real (mpc_realref (m0), re0, rnd);
10377 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10378 mpfr_from_real (mpc_realref (m1), re1, rnd);
10379 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10380 mpfr_clear_flags ();
10381 inexact = func (m0, m0, m1, crnd);
10382 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10383 mpc_clear (m0);
10384 mpc_clear (m1);
10388 return result;
10391 /* A wrapper function for builtin folding that prevents warnings for
10392 "statement without effect" and the like, caused by removing the
10393 call node earlier than the warning is generated. */
10395 tree
10396 fold_call_stmt (gcall *stmt, bool ignore)
10398 tree ret = NULL_TREE;
10399 tree fndecl = gimple_call_fndecl (stmt);
10400 location_t loc = gimple_location (stmt);
10401 if (fndecl
10402 && TREE_CODE (fndecl) == FUNCTION_DECL
10403 && DECL_BUILT_IN (fndecl)
10404 && !gimple_call_va_arg_pack_p (stmt))
10406 int nargs = gimple_call_num_args (stmt);
10407 tree *args = (nargs > 0
10408 ? gimple_call_arg_ptr (stmt, 0)
10409 : &error_mark_node);
10411 if (avoid_folding_inline_builtin (fndecl))
10412 return NULL_TREE;
10413 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10415 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10417 else
10419 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10420 if (ret)
10422 /* Propagate location information from original call to
10423 expansion of builtin. Otherwise things like
10424 maybe_emit_chk_warning, that operate on the expansion
10425 of a builtin, will use the wrong location information. */
10426 if (gimple_has_location (stmt))
10428 tree realret = ret;
10429 if (TREE_CODE (ret) == NOP_EXPR)
10430 realret = TREE_OPERAND (ret, 0);
10431 if (CAN_HAVE_LOCATION_P (realret)
10432 && !EXPR_HAS_LOCATION (realret))
10433 SET_EXPR_LOCATION (realret, loc);
10434 return realret;
10436 return ret;
10440 return NULL_TREE;
10443 /* Look up the function in builtin_decl that corresponds to DECL
10444 and set ASMSPEC as its user assembler name. DECL must be a
10445 function decl that declares a builtin. */
10447 void
10448 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10450 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10451 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10452 && asmspec != 0);
10454 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10455 set_user_assembler_name (builtin, asmspec);
10457 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10458 && INT_TYPE_SIZE < BITS_PER_WORD)
10460 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
10461 set_user_assembler_libfunc ("ffs", asmspec);
10462 set_optab_libfunc (ffs_optab, mode, "ffs");
10466 /* Return true if DECL is a builtin that expands to a constant or similarly
10467 simple code. */
10468 bool
10469 is_simple_builtin (tree decl)
10471 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10472 switch (DECL_FUNCTION_CODE (decl))
10474 /* Builtins that expand to constants. */
10475 case BUILT_IN_CONSTANT_P:
10476 case BUILT_IN_EXPECT:
10477 case BUILT_IN_OBJECT_SIZE:
10478 case BUILT_IN_UNREACHABLE:
10479 /* Simple register moves or loads from stack. */
10480 case BUILT_IN_ASSUME_ALIGNED:
10481 case BUILT_IN_RETURN_ADDRESS:
10482 case BUILT_IN_EXTRACT_RETURN_ADDR:
10483 case BUILT_IN_FROB_RETURN_ADDR:
10484 case BUILT_IN_RETURN:
10485 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10486 case BUILT_IN_FRAME_ADDRESS:
10487 case BUILT_IN_VA_END:
10488 case BUILT_IN_STACK_SAVE:
10489 case BUILT_IN_STACK_RESTORE:
10490 /* Exception state returns or moves registers around. */
10491 case BUILT_IN_EH_FILTER:
10492 case BUILT_IN_EH_POINTER:
10493 case BUILT_IN_EH_COPY_VALUES:
10494 return true;
10496 default:
10497 return false;
10500 return false;
10503 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10504 most probably expanded inline into reasonably simple code. This is a
10505 superset of is_simple_builtin. */
10506 bool
10507 is_inexpensive_builtin (tree decl)
10509 if (!decl)
10510 return false;
10511 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10512 return true;
10513 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10514 switch (DECL_FUNCTION_CODE (decl))
10516 case BUILT_IN_ABS:
10517 CASE_BUILT_IN_ALLOCA:
10518 case BUILT_IN_BSWAP16:
10519 case BUILT_IN_BSWAP32:
10520 case BUILT_IN_BSWAP64:
10521 case BUILT_IN_CLZ:
10522 case BUILT_IN_CLZIMAX:
10523 case BUILT_IN_CLZL:
10524 case BUILT_IN_CLZLL:
10525 case BUILT_IN_CTZ:
10526 case BUILT_IN_CTZIMAX:
10527 case BUILT_IN_CTZL:
10528 case BUILT_IN_CTZLL:
10529 case BUILT_IN_FFS:
10530 case BUILT_IN_FFSIMAX:
10531 case BUILT_IN_FFSL:
10532 case BUILT_IN_FFSLL:
10533 case BUILT_IN_IMAXABS:
10534 case BUILT_IN_FINITE:
10535 case BUILT_IN_FINITEF:
10536 case BUILT_IN_FINITEL:
10537 case BUILT_IN_FINITED32:
10538 case BUILT_IN_FINITED64:
10539 case BUILT_IN_FINITED128:
10540 case BUILT_IN_FPCLASSIFY:
10541 case BUILT_IN_ISFINITE:
10542 case BUILT_IN_ISINF_SIGN:
10543 case BUILT_IN_ISINF:
10544 case BUILT_IN_ISINFF:
10545 case BUILT_IN_ISINFL:
10546 case BUILT_IN_ISINFD32:
10547 case BUILT_IN_ISINFD64:
10548 case BUILT_IN_ISINFD128:
10549 case BUILT_IN_ISNAN:
10550 case BUILT_IN_ISNANF:
10551 case BUILT_IN_ISNANL:
10552 case BUILT_IN_ISNAND32:
10553 case BUILT_IN_ISNAND64:
10554 case BUILT_IN_ISNAND128:
10555 case BUILT_IN_ISNORMAL:
10556 case BUILT_IN_ISGREATER:
10557 case BUILT_IN_ISGREATEREQUAL:
10558 case BUILT_IN_ISLESS:
10559 case BUILT_IN_ISLESSEQUAL:
10560 case BUILT_IN_ISLESSGREATER:
10561 case BUILT_IN_ISUNORDERED:
10562 case BUILT_IN_VA_ARG_PACK:
10563 case BUILT_IN_VA_ARG_PACK_LEN:
10564 case BUILT_IN_VA_COPY:
10565 case BUILT_IN_TRAP:
10566 case BUILT_IN_SAVEREGS:
10567 case BUILT_IN_POPCOUNTL:
10568 case BUILT_IN_POPCOUNTLL:
10569 case BUILT_IN_POPCOUNTIMAX:
10570 case BUILT_IN_POPCOUNT:
10571 case BUILT_IN_PARITYL:
10572 case BUILT_IN_PARITYLL:
10573 case BUILT_IN_PARITYIMAX:
10574 case BUILT_IN_PARITY:
10575 case BUILT_IN_LABS:
10576 case BUILT_IN_LLABS:
10577 case BUILT_IN_PREFETCH:
10578 case BUILT_IN_ACC_ON_DEVICE:
10579 return true;
10581 default:
10582 return is_simple_builtin (decl);
10585 return false;
10588 /* Return true if T is a constant and the value cast to a target char
10589 can be represented by a host char.
10590 Store the casted char constant in *P if so. */
10592 bool
10593 target_char_cst_p (tree t, char *p)
10595 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
10596 return false;
10598 *p = (char)tree_to_uhwi (t);
10599 return true;
10602 /* Return the maximum object size. */
10604 tree
10605 max_object_size (void)
10607 /* To do: Make this a configurable parameter. */
10608 return TYPE_MAX_VALUE (ptrdiff_type_node);