PR tree-optimization/85259 - Missing -Wstringop-overflow= since r256683
[official-gcc.git] / gcc / builtins.c
blob6b3e6b2ea961f49e366a34ec10502ab5af316623
1 /* Expand builtin functions.
2 Copyright (C) 1988-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-restrict.h"
47 #include "stor-layout.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "tree-object-size.h"
51 #include "realmpfr.h"
52 #include "cfgrtl.h"
53 #include "except.h"
54 #include "dojump.h"
55 #include "explow.h"
56 #include "stmt.h"
57 #include "expr.h"
58 #include "libfuncs.h"
59 #include "output.h"
60 #include "typeclass.h"
61 #include "langhooks.h"
62 #include "value-prof.h"
63 #include "builtins.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "asan.h"
67 #include "internal-fn.h"
68 #include "case-cfn-macros.h"
69 #include "gimple-fold.h"
70 #include "intl.h"
71 #include "file-prefix-map.h" /* remap_macro_filename() */
72 #include "gomp-constants.h"
73 #include "omp-general.h"
75 struct target_builtins default_target_builtins;
76 #if SWITCHABLE_TARGET
77 struct target_builtins *this_target_builtins = &default_target_builtins;
78 #endif
80 /* Define the names of the builtin function types and codes. */
81 const char *const built_in_class_names[BUILT_IN_LAST]
82 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
84 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
85 const char * built_in_names[(int) END_BUILTINS] =
87 #include "builtins.def"
90 /* Setup an array of builtin_info_type, make sure each element decl is
91 initialized to NULL_TREE. */
92 builtin_info_type builtin_info[(int)END_BUILTINS];
94 /* Non-zero if __builtin_constant_p should be folded right away. */
95 bool force_folding_builtin_constant_p;
97 static rtx c_readstr (const char *, scalar_int_mode);
98 static int target_char_cast (tree, char *);
99 static rtx get_memory_rtx (tree, tree);
100 static int apply_args_size (void);
101 static int apply_result_size (void);
102 static rtx result_vector (int, rtx);
103 static void expand_builtin_prefetch (tree);
104 static rtx expand_builtin_apply_args (void);
105 static rtx expand_builtin_apply_args_1 (void);
106 static rtx expand_builtin_apply (rtx, rtx, rtx);
107 static void expand_builtin_return (rtx);
108 static enum type_class type_to_class (tree);
109 static rtx expand_builtin_classify_type (tree);
110 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
111 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
112 static rtx expand_builtin_interclass_mathfn (tree, rtx);
113 static rtx expand_builtin_sincos (tree);
114 static rtx expand_builtin_cexpi (tree, rtx);
115 static rtx expand_builtin_int_roundingfn (tree, rtx);
116 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
117 static rtx expand_builtin_next_arg (void);
118 static rtx expand_builtin_va_start (tree);
119 static rtx expand_builtin_va_end (tree);
120 static rtx expand_builtin_va_copy (tree);
121 static rtx expand_builtin_strcmp (tree, rtx);
122 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
123 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
124 static rtx expand_builtin_memchr (tree, rtx);
125 static rtx expand_builtin_memcpy (tree, rtx);
126 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
127 rtx target, tree exp, int endp);
128 static rtx expand_builtin_memmove (tree, rtx);
129 static rtx expand_builtin_mempcpy (tree, rtx);
130 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, int);
131 static rtx expand_builtin_strcat (tree, rtx);
132 static rtx expand_builtin_strcpy (tree, rtx);
133 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
134 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
135 static rtx expand_builtin_stpncpy (tree, rtx);
136 static rtx expand_builtin_strncat (tree, rtx);
137 static rtx expand_builtin_strncpy (tree, rtx);
138 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
139 static rtx expand_builtin_memset (tree, rtx, machine_mode);
140 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
141 static rtx expand_builtin_bzero (tree);
142 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
143 static rtx expand_builtin_alloca (tree);
144 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
145 static rtx expand_builtin_frame_address (tree, tree);
146 static tree stabilize_va_list_loc (location_t, tree, int);
147 static rtx expand_builtin_expect (tree, rtx);
148 static tree fold_builtin_constant_p (tree);
149 static tree fold_builtin_classify_type (tree);
150 static tree fold_builtin_strlen (location_t, tree, tree);
151 static tree fold_builtin_inf (location_t, tree, int);
152 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
153 static bool validate_arg (const_tree, enum tree_code code);
154 static rtx expand_builtin_fabs (tree, rtx, rtx);
155 static rtx expand_builtin_signbit (tree, rtx);
156 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
157 static tree fold_builtin_isascii (location_t, tree);
158 static tree fold_builtin_toascii (location_t, tree);
159 static tree fold_builtin_isdigit (location_t, tree);
160 static tree fold_builtin_fabs (location_t, tree, tree);
161 static tree fold_builtin_abs (location_t, tree, tree);
162 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
163 enum tree_code);
164 static tree fold_builtin_0 (location_t, tree);
165 static tree fold_builtin_1 (location_t, tree, tree);
166 static tree fold_builtin_2 (location_t, tree, tree, tree);
167 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
168 static tree fold_builtin_varargs (location_t, tree, tree*, int);
170 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
171 static tree fold_builtin_strspn (location_t, tree, tree);
172 static tree fold_builtin_strcspn (location_t, tree, tree);
174 static rtx expand_builtin_object_size (tree);
175 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
176 enum built_in_function);
177 static void maybe_emit_chk_warning (tree, enum built_in_function);
178 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
179 static void maybe_emit_free_warning (tree);
180 static tree fold_builtin_object_size (tree, tree);
182 unsigned HOST_WIDE_INT target_newline;
183 unsigned HOST_WIDE_INT target_percent;
184 static unsigned HOST_WIDE_INT target_c;
185 static unsigned HOST_WIDE_INT target_s;
186 char target_percent_c[3];
187 char target_percent_s[3];
188 char target_percent_s_newline[4];
189 static tree do_mpfr_remquo (tree, tree, tree);
190 static tree do_mpfr_lgamma_r (tree, tree, tree);
191 static void expand_builtin_sync_synchronize (void);
193 /* Return true if NAME starts with __builtin_ or __sync_. */
195 static bool
196 is_builtin_name (const char *name)
198 if (strncmp (name, "__builtin_", 10) == 0)
199 return true;
200 if (strncmp (name, "__sync_", 7) == 0)
201 return true;
202 if (strncmp (name, "__atomic_", 9) == 0)
203 return true;
204 return false;
208 /* Return true if DECL is a function symbol representing a built-in. */
210 bool
211 is_builtin_fn (tree decl)
213 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
216 /* Return true if NODE should be considered for inline expansion regardless
217 of the optimization level. This means whenever a function is invoked with
218 its "internal" name, which normally contains the prefix "__builtin". */
220 bool
221 called_as_built_in (tree node)
223 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
224 we want the name used to call the function, not the name it
225 will have. */
226 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
227 return is_builtin_name (name);
230 /* Compute values M and N such that M divides (address of EXP - N) and such
231 that N < M. If these numbers can be determined, store M in alignp and N in
232 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
233 *alignp and any bit-offset to *bitposp.
235 Note that the address (and thus the alignment) computed here is based
236 on the address to which a symbol resolves, whereas DECL_ALIGN is based
237 on the address at which an object is actually located. These two
238 addresses are not always the same. For example, on ARM targets,
239 the address &foo of a Thumb function foo() has the lowest bit set,
240 whereas foo() itself starts on an even address.
242 If ADDR_P is true we are taking the address of the memory reference EXP
243 and thus cannot rely on the access taking place. */
245 static bool
246 get_object_alignment_2 (tree exp, unsigned int *alignp,
247 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
249 poly_int64 bitsize, bitpos;
250 tree offset;
251 machine_mode mode;
252 int unsignedp, reversep, volatilep;
253 unsigned int align = BITS_PER_UNIT;
254 bool known_alignment = false;
256 /* Get the innermost object and the constant (bitpos) and possibly
257 variable (offset) offset of the access. */
258 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
259 &unsignedp, &reversep, &volatilep);
261 /* Extract alignment information from the innermost object and
262 possibly adjust bitpos and offset. */
263 if (TREE_CODE (exp) == FUNCTION_DECL)
265 /* Function addresses can encode extra information besides their
266 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
267 allows the low bit to be used as a virtual bit, we know
268 that the address itself must be at least 2-byte aligned. */
269 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
270 align = 2 * BITS_PER_UNIT;
272 else if (TREE_CODE (exp) == LABEL_DECL)
274 else if (TREE_CODE (exp) == CONST_DECL)
276 /* The alignment of a CONST_DECL is determined by its initializer. */
277 exp = DECL_INITIAL (exp);
278 align = TYPE_ALIGN (TREE_TYPE (exp));
279 if (CONSTANT_CLASS_P (exp))
280 align = targetm.constant_alignment (exp, align);
282 known_alignment = true;
284 else if (DECL_P (exp))
286 align = DECL_ALIGN (exp);
287 known_alignment = true;
289 else if (TREE_CODE (exp) == INDIRECT_REF
290 || TREE_CODE (exp) == MEM_REF
291 || TREE_CODE (exp) == TARGET_MEM_REF)
293 tree addr = TREE_OPERAND (exp, 0);
294 unsigned ptr_align;
295 unsigned HOST_WIDE_INT ptr_bitpos;
296 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
298 /* If the address is explicitely aligned, handle that. */
299 if (TREE_CODE (addr) == BIT_AND_EXPR
300 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
302 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
303 ptr_bitmask *= BITS_PER_UNIT;
304 align = least_bit_hwi (ptr_bitmask);
305 addr = TREE_OPERAND (addr, 0);
308 known_alignment
309 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
310 align = MAX (ptr_align, align);
312 /* Re-apply explicit alignment to the bitpos. */
313 ptr_bitpos &= ptr_bitmask;
315 /* The alignment of the pointer operand in a TARGET_MEM_REF
316 has to take the variable offset parts into account. */
317 if (TREE_CODE (exp) == TARGET_MEM_REF)
319 if (TMR_INDEX (exp))
321 unsigned HOST_WIDE_INT step = 1;
322 if (TMR_STEP (exp))
323 step = TREE_INT_CST_LOW (TMR_STEP (exp));
324 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
326 if (TMR_INDEX2 (exp))
327 align = BITS_PER_UNIT;
328 known_alignment = false;
331 /* When EXP is an actual memory reference then we can use
332 TYPE_ALIGN of a pointer indirection to derive alignment.
333 Do so only if get_pointer_alignment_1 did not reveal absolute
334 alignment knowledge and if using that alignment would
335 improve the situation. */
336 unsigned int talign;
337 if (!addr_p && !known_alignment
338 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
339 && talign > align)
340 align = talign;
341 else
343 /* Else adjust bitpos accordingly. */
344 bitpos += ptr_bitpos;
345 if (TREE_CODE (exp) == MEM_REF
346 || TREE_CODE (exp) == TARGET_MEM_REF)
347 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
350 else if (TREE_CODE (exp) == STRING_CST)
352 /* STRING_CST are the only constant objects we allow to be not
353 wrapped inside a CONST_DECL. */
354 align = TYPE_ALIGN (TREE_TYPE (exp));
355 if (CONSTANT_CLASS_P (exp))
356 align = targetm.constant_alignment (exp, align);
358 known_alignment = true;
361 /* If there is a non-constant offset part extract the maximum
362 alignment that can prevail. */
363 if (offset)
365 unsigned int trailing_zeros = tree_ctz (offset);
366 if (trailing_zeros < HOST_BITS_PER_INT)
368 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
369 if (inner)
370 align = MIN (align, inner);
374 /* Account for the alignment of runtime coefficients, so that the constant
375 bitpos is guaranteed to be accurate. */
376 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
377 if (alt_align != 0 && alt_align < align)
379 align = alt_align;
380 known_alignment = false;
383 *alignp = align;
384 *bitposp = bitpos.coeffs[0] & (align - 1);
385 return known_alignment;
388 /* For a memory reference expression EXP compute values M and N such that M
389 divides (&EXP - N) and such that N < M. If these numbers can be determined,
390 store M in alignp and N in *BITPOSP and return true. Otherwise return false
391 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
393 bool
394 get_object_alignment_1 (tree exp, unsigned int *alignp,
395 unsigned HOST_WIDE_INT *bitposp)
397 return get_object_alignment_2 (exp, alignp, bitposp, false);
400 /* Return the alignment in bits of EXP, an object. */
402 unsigned int
403 get_object_alignment (tree exp)
405 unsigned HOST_WIDE_INT bitpos = 0;
406 unsigned int align;
408 get_object_alignment_1 (exp, &align, &bitpos);
410 /* align and bitpos now specify known low bits of the pointer.
411 ptr & (align - 1) == bitpos. */
413 if (bitpos != 0)
414 align = least_bit_hwi (bitpos);
415 return align;
418 /* For a pointer valued expression EXP compute values M and N such that M
419 divides (EXP - N) and such that N < M. If these numbers can be determined,
420 store M in alignp and N in *BITPOSP and return true. Return false if
421 the results are just a conservative approximation.
423 If EXP is not a pointer, false is returned too. */
425 bool
426 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
427 unsigned HOST_WIDE_INT *bitposp)
429 STRIP_NOPS (exp);
431 if (TREE_CODE (exp) == ADDR_EXPR)
432 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
433 alignp, bitposp, true);
434 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
436 unsigned int align;
437 unsigned HOST_WIDE_INT bitpos;
438 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
439 &align, &bitpos);
440 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
441 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
442 else
444 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
445 if (trailing_zeros < HOST_BITS_PER_INT)
447 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
448 if (inner)
449 align = MIN (align, inner);
452 *alignp = align;
453 *bitposp = bitpos & (align - 1);
454 return res;
456 else if (TREE_CODE (exp) == SSA_NAME
457 && POINTER_TYPE_P (TREE_TYPE (exp)))
459 unsigned int ptr_align, ptr_misalign;
460 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
462 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
464 *bitposp = ptr_misalign * BITS_PER_UNIT;
465 *alignp = ptr_align * BITS_PER_UNIT;
466 /* Make sure to return a sensible alignment when the multiplication
467 by BITS_PER_UNIT overflowed. */
468 if (*alignp == 0)
469 *alignp = 1u << (HOST_BITS_PER_INT - 1);
470 /* We cannot really tell whether this result is an approximation. */
471 return false;
473 else
475 *bitposp = 0;
476 *alignp = BITS_PER_UNIT;
477 return false;
480 else if (TREE_CODE (exp) == INTEGER_CST)
482 *alignp = BIGGEST_ALIGNMENT;
483 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
484 & (BIGGEST_ALIGNMENT - 1));
485 return true;
488 *bitposp = 0;
489 *alignp = BITS_PER_UNIT;
490 return false;
493 /* Return the alignment in bits of EXP, a pointer valued expression.
494 The alignment returned is, by default, the alignment of the thing that
495 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
497 Otherwise, look at the expression to see if we can do better, i.e., if the
498 expression is actually pointing at an object whose alignment is tighter. */
500 unsigned int
501 get_pointer_alignment (tree exp)
503 unsigned HOST_WIDE_INT bitpos = 0;
504 unsigned int align;
506 get_pointer_alignment_1 (exp, &align, &bitpos);
508 /* align and bitpos now specify known low bits of the pointer.
509 ptr & (align - 1) == bitpos. */
511 if (bitpos != 0)
512 align = least_bit_hwi (bitpos);
514 return align;
517 /* Return the number of non-zero elements in the sequence
518 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
519 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
521 static unsigned
522 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
524 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
526 unsigned n;
528 if (eltsize == 1)
530 /* Optimize the common case of plain char. */
531 for (n = 0; n < maxelts; n++)
533 const char *elt = (const char*) ptr + n;
534 if (!*elt)
535 break;
538 else
540 for (n = 0; n < maxelts; n++)
542 const char *elt = (const char*) ptr + n * eltsize;
543 if (!memcmp (elt, "\0\0\0\0", eltsize))
544 break;
547 return n;
550 /* Compute the length of a null-terminated character string or wide
551 character string handling character sizes of 1, 2, and 4 bytes.
552 TREE_STRING_LENGTH is not the right way because it evaluates to
553 the size of the character array in bytes (as opposed to characters)
554 and because it can contain a zero byte in the middle.
556 ONLY_VALUE should be nonzero if the result is not going to be emitted
557 into the instruction stream and zero if it is going to be expanded.
558 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
559 is returned, otherwise NULL, since
560 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
561 evaluate the side-effects.
563 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
564 accesses. Note that this implies the result is not going to be emitted
565 into the instruction stream.
567 The value returned is of type `ssizetype'.
569 Unfortunately, string_constant can't access the values of const char
570 arrays with initializers, so neither can we do so here. */
572 tree
573 c_strlen (tree src, int only_value)
575 STRIP_NOPS (src);
576 if (TREE_CODE (src) == COND_EXPR
577 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
579 tree len1, len2;
581 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
582 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
583 if (tree_int_cst_equal (len1, len2))
584 return len1;
587 if (TREE_CODE (src) == COMPOUND_EXPR
588 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
589 return c_strlen (TREE_OPERAND (src, 1), only_value);
591 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
593 /* Offset from the beginning of the string in bytes. */
594 tree byteoff;
595 src = string_constant (src, &byteoff);
596 if (src == 0)
597 return NULL_TREE;
599 /* Determine the size of the string element. */
600 unsigned eltsize
601 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src))));
603 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
604 length of SRC. */
605 unsigned maxelts = TREE_STRING_LENGTH (src) / eltsize - 1;
607 /* PTR can point to the byte representation of any string type, including
608 char* and wchar_t*. */
609 const char *ptr = TREE_STRING_POINTER (src);
611 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
613 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
614 compute the offset to the following null if we don't know where to
615 start searching for it. */
616 if (string_length (ptr, eltsize, maxelts) < maxelts)
618 /* Return when an embedded null character is found. */
619 return NULL_TREE;
622 if (!maxelts)
623 return ssize_int (0);
625 /* We don't know the starting offset, but we do know that the string
626 has no internal zero bytes. We can assume that the offset falls
627 within the bounds of the string; otherwise, the programmer deserves
628 what he gets. Subtract the offset from the length of the string,
629 and return that. This would perhaps not be valid if we were dealing
630 with named arrays in addition to literal string constants. */
632 return size_diffop_loc (loc, size_int (maxelts * eltsize), byteoff);
635 /* Offset from the beginning of the string in elements. */
636 HOST_WIDE_INT eltoff;
638 /* We have a known offset into the string. Start searching there for
639 a null character if we can represent it as a single HOST_WIDE_INT. */
640 if (byteoff == 0)
641 eltoff = 0;
642 else if (! tree_fits_shwi_p (byteoff))
643 eltoff = -1;
644 else
645 eltoff = tree_to_shwi (byteoff) / eltsize;
647 /* If the offset is known to be out of bounds, warn, and call strlen at
648 runtime. */
649 if (eltoff < 0 || eltoff > maxelts)
651 /* Suppress multiple warnings for propagated constant strings. */
652 if (only_value != 2
653 && !TREE_NO_WARNING (src))
655 warning_at (loc, OPT_Warray_bounds,
656 "offset %qwi outside bounds of constant string",
657 eltoff);
658 TREE_NO_WARNING (src) = 1;
660 return NULL_TREE;
663 /* Use strlen to search for the first zero byte. Since any strings
664 constructed with build_string will have nulls appended, we win even
665 if we get handed something like (char[4])"abcd".
667 Since ELTOFF is our starting index into the string, no further
668 calculation is needed. */
669 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
670 maxelts - eltoff);
672 return ssize_int (len);
675 /* Return a constant integer corresponding to target reading
676 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
678 static rtx
679 c_readstr (const char *str, scalar_int_mode mode)
681 HOST_WIDE_INT ch;
682 unsigned int i, j;
683 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
685 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
686 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
687 / HOST_BITS_PER_WIDE_INT;
689 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
690 for (i = 0; i < len; i++)
691 tmp[i] = 0;
693 ch = 1;
694 for (i = 0; i < GET_MODE_SIZE (mode); i++)
696 j = i;
697 if (WORDS_BIG_ENDIAN)
698 j = GET_MODE_SIZE (mode) - i - 1;
699 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
700 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
701 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
702 j *= BITS_PER_UNIT;
704 if (ch)
705 ch = (unsigned char) str[i];
706 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
709 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
710 return immed_wide_int_const (c, mode);
713 /* Cast a target constant CST to target CHAR and if that value fits into
714 host char type, return zero and put that value into variable pointed to by
715 P. */
717 static int
718 target_char_cast (tree cst, char *p)
720 unsigned HOST_WIDE_INT val, hostval;
722 if (TREE_CODE (cst) != INTEGER_CST
723 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
724 return 1;
726 /* Do not care if it fits or not right here. */
727 val = TREE_INT_CST_LOW (cst);
729 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
730 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
732 hostval = val;
733 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
734 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
736 if (val != hostval)
737 return 1;
739 *p = hostval;
740 return 0;
743 /* Similar to save_expr, but assumes that arbitrary code is not executed
744 in between the multiple evaluations. In particular, we assume that a
745 non-addressable local variable will not be modified. */
747 static tree
748 builtin_save_expr (tree exp)
750 if (TREE_CODE (exp) == SSA_NAME
751 || (TREE_ADDRESSABLE (exp) == 0
752 && (TREE_CODE (exp) == PARM_DECL
753 || (VAR_P (exp) && !TREE_STATIC (exp)))))
754 return exp;
756 return save_expr (exp);
759 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
760 times to get the address of either a higher stack frame, or a return
761 address located within it (depending on FNDECL_CODE). */
763 static rtx
764 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
766 int i;
767 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
768 if (tem == NULL_RTX)
770 /* For a zero count with __builtin_return_address, we don't care what
771 frame address we return, because target-specific definitions will
772 override us. Therefore frame pointer elimination is OK, and using
773 the soft frame pointer is OK.
775 For a nonzero count, or a zero count with __builtin_frame_address,
776 we require a stable offset from the current frame pointer to the
777 previous one, so we must use the hard frame pointer, and
778 we must disable frame pointer elimination. */
779 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
780 tem = frame_pointer_rtx;
781 else
783 tem = hard_frame_pointer_rtx;
785 /* Tell reload not to eliminate the frame pointer. */
786 crtl->accesses_prior_frames = 1;
790 if (count > 0)
791 SETUP_FRAME_ADDRESSES ();
793 /* On the SPARC, the return address is not in the frame, it is in a
794 register. There is no way to access it off of the current frame
795 pointer, but it can be accessed off the previous frame pointer by
796 reading the value from the register window save area. */
797 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
798 count--;
800 /* Scan back COUNT frames to the specified frame. */
801 for (i = 0; i < count; i++)
803 /* Assume the dynamic chain pointer is in the word that the
804 frame address points to, unless otherwise specified. */
805 tem = DYNAMIC_CHAIN_ADDRESS (tem);
806 tem = memory_address (Pmode, tem);
807 tem = gen_frame_mem (Pmode, tem);
808 tem = copy_to_reg (tem);
811 /* For __builtin_frame_address, return what we've got. But, on
812 the SPARC for example, we may have to add a bias. */
813 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
814 return FRAME_ADDR_RTX (tem);
816 /* For __builtin_return_address, get the return address from that frame. */
817 #ifdef RETURN_ADDR_RTX
818 tem = RETURN_ADDR_RTX (count, tem);
819 #else
820 tem = memory_address (Pmode,
821 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
822 tem = gen_frame_mem (Pmode, tem);
823 #endif
824 return tem;
827 /* Alias set used for setjmp buffer. */
828 static alias_set_type setjmp_alias_set = -1;
830 /* Construct the leading half of a __builtin_setjmp call. Control will
831 return to RECEIVER_LABEL. This is also called directly by the SJLJ
832 exception handling code. */
834 void
835 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
837 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
838 rtx stack_save;
839 rtx mem;
841 if (setjmp_alias_set == -1)
842 setjmp_alias_set = new_alias_set ();
844 buf_addr = convert_memory_address (Pmode, buf_addr);
846 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
848 /* We store the frame pointer and the address of receiver_label in
849 the buffer and use the rest of it for the stack save area, which
850 is machine-dependent. */
852 mem = gen_rtx_MEM (Pmode, buf_addr);
853 set_mem_alias_set (mem, setjmp_alias_set);
854 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
856 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
857 GET_MODE_SIZE (Pmode))),
858 set_mem_alias_set (mem, setjmp_alias_set);
860 emit_move_insn (validize_mem (mem),
861 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
863 stack_save = gen_rtx_MEM (sa_mode,
864 plus_constant (Pmode, buf_addr,
865 2 * GET_MODE_SIZE (Pmode)));
866 set_mem_alias_set (stack_save, setjmp_alias_set);
867 emit_stack_save (SAVE_NONLOCAL, &stack_save);
869 /* If there is further processing to do, do it. */
870 if (targetm.have_builtin_setjmp_setup ())
871 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
873 /* We have a nonlocal label. */
874 cfun->has_nonlocal_label = 1;
877 /* Construct the trailing part of a __builtin_setjmp call. This is
878 also called directly by the SJLJ exception handling code.
879 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
881 void
882 expand_builtin_setjmp_receiver (rtx receiver_label)
884 rtx chain;
886 /* Mark the FP as used when we get here, so we have to make sure it's
887 marked as used by this function. */
888 emit_use (hard_frame_pointer_rtx);
890 /* Mark the static chain as clobbered here so life information
891 doesn't get messed up for it. */
892 chain = rtx_for_static_chain (current_function_decl, true);
893 if (chain && REG_P (chain))
894 emit_clobber (chain);
896 /* Now put in the code to restore the frame pointer, and argument
897 pointer, if needed. */
898 if (! targetm.have_nonlocal_goto ())
900 /* First adjust our frame pointer to its actual value. It was
901 previously set to the start of the virtual area corresponding to
902 the stacked variables when we branched here and now needs to be
903 adjusted to the actual hardware fp value.
905 Assignments to virtual registers are converted by
906 instantiate_virtual_regs into the corresponding assignment
907 to the underlying register (fp in this case) that makes
908 the original assignment true.
909 So the following insn will actually be decrementing fp by
910 TARGET_STARTING_FRAME_OFFSET. */
911 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
913 /* Restoring the frame pointer also modifies the hard frame pointer.
914 Mark it used (so that the previous assignment remains live once
915 the frame pointer is eliminated) and clobbered (to represent the
916 implicit update from the assignment). */
917 emit_use (hard_frame_pointer_rtx);
918 emit_clobber (hard_frame_pointer_rtx);
921 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
923 /* If the argument pointer can be eliminated in favor of the
924 frame pointer, we don't need to restore it. We assume here
925 that if such an elimination is present, it can always be used.
926 This is the case on all known machines; if we don't make this
927 assumption, we do unnecessary saving on many machines. */
928 size_t i;
929 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
931 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
932 if (elim_regs[i].from == ARG_POINTER_REGNUM
933 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
934 break;
936 if (i == ARRAY_SIZE (elim_regs))
938 /* Now restore our arg pointer from the address at which it
939 was saved in our stack frame. */
940 emit_move_insn (crtl->args.internal_arg_pointer,
941 copy_to_reg (get_arg_pointer_save_area ()));
945 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
946 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
947 else if (targetm.have_nonlocal_goto_receiver ())
948 emit_insn (targetm.gen_nonlocal_goto_receiver ());
949 else
950 { /* Nothing */ }
952 /* We must not allow the code we just generated to be reordered by
953 scheduling. Specifically, the update of the frame pointer must
954 happen immediately, not later. */
955 emit_insn (gen_blockage ());
958 /* __builtin_longjmp is passed a pointer to an array of five words (not
959 all will be used on all machines). It operates similarly to the C
960 library function of the same name, but is more efficient. Much of
961 the code below is copied from the handling of non-local gotos. */
963 static void
964 expand_builtin_longjmp (rtx buf_addr, rtx value)
966 rtx fp, lab, stack;
967 rtx_insn *insn, *last;
968 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
970 /* DRAP is needed for stack realign if longjmp is expanded to current
971 function */
972 if (SUPPORTS_STACK_ALIGNMENT)
973 crtl->need_drap = true;
975 if (setjmp_alias_set == -1)
976 setjmp_alias_set = new_alias_set ();
978 buf_addr = convert_memory_address (Pmode, buf_addr);
980 buf_addr = force_reg (Pmode, buf_addr);
982 /* We require that the user must pass a second argument of 1, because
983 that is what builtin_setjmp will return. */
984 gcc_assert (value == const1_rtx);
986 last = get_last_insn ();
987 if (targetm.have_builtin_longjmp ())
988 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
989 else
991 fp = gen_rtx_MEM (Pmode, buf_addr);
992 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
993 GET_MODE_SIZE (Pmode)));
995 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
996 2 * GET_MODE_SIZE (Pmode)));
997 set_mem_alias_set (fp, setjmp_alias_set);
998 set_mem_alias_set (lab, setjmp_alias_set);
999 set_mem_alias_set (stack, setjmp_alias_set);
1001 /* Pick up FP, label, and SP from the block and jump. This code is
1002 from expand_goto in stmt.c; see there for detailed comments. */
1003 if (targetm.have_nonlocal_goto ())
1004 /* We have to pass a value to the nonlocal_goto pattern that will
1005 get copied into the static_chain pointer, but it does not matter
1006 what that value is, because builtin_setjmp does not use it. */
1007 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1008 else
1010 lab = copy_to_reg (lab);
1012 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1013 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1015 emit_move_insn (hard_frame_pointer_rtx, fp);
1016 emit_stack_restore (SAVE_NONLOCAL, stack);
1018 emit_use (hard_frame_pointer_rtx);
1019 emit_use (stack_pointer_rtx);
1020 emit_indirect_jump (lab);
1024 /* Search backwards and mark the jump insn as a non-local goto.
1025 Note that this precludes the use of __builtin_longjmp to a
1026 __builtin_setjmp target in the same function. However, we've
1027 already cautioned the user that these functions are for
1028 internal exception handling use only. */
1029 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1031 gcc_assert (insn != last);
1033 if (JUMP_P (insn))
1035 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1036 break;
1038 else if (CALL_P (insn))
1039 break;
1043 static inline bool
1044 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1046 return (iter->i < iter->n);
1049 /* This function validates the types of a function call argument list
1050 against a specified list of tree_codes. If the last specifier is a 0,
1051 that represents an ellipsis, otherwise the last specifier must be a
1052 VOID_TYPE. */
1054 static bool
1055 validate_arglist (const_tree callexpr, ...)
1057 enum tree_code code;
1058 bool res = 0;
1059 va_list ap;
1060 const_call_expr_arg_iterator iter;
1061 const_tree arg;
1063 va_start (ap, callexpr);
1064 init_const_call_expr_arg_iterator (callexpr, &iter);
1066 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1067 tree fn = CALL_EXPR_FN (callexpr);
1068 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1070 for (unsigned argno = 1; ; ++argno)
1072 code = (enum tree_code) va_arg (ap, int);
1074 switch (code)
1076 case 0:
1077 /* This signifies an ellipses, any further arguments are all ok. */
1078 res = true;
1079 goto end;
1080 case VOID_TYPE:
1081 /* This signifies an endlink, if no arguments remain, return
1082 true, otherwise return false. */
1083 res = !more_const_call_expr_args_p (&iter);
1084 goto end;
1085 case POINTER_TYPE:
1086 /* The actual argument must be nonnull when either the whole
1087 called function has been declared nonnull, or when the formal
1088 argument corresponding to the actual argument has been. */
1089 if (argmap
1090 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1092 arg = next_const_call_expr_arg (&iter);
1093 if (!validate_arg (arg, code) || integer_zerop (arg))
1094 goto end;
1095 break;
1097 /* FALLTHRU */
1098 default:
1099 /* If no parameters remain or the parameter's code does not
1100 match the specified code, return false. Otherwise continue
1101 checking any remaining arguments. */
1102 arg = next_const_call_expr_arg (&iter);
1103 if (!validate_arg (arg, code))
1104 goto end;
1105 break;
1109 /* We need gotos here since we can only have one VA_CLOSE in a
1110 function. */
1111 end: ;
1112 va_end (ap);
1114 BITMAP_FREE (argmap);
1116 return res;
1119 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1120 and the address of the save area. */
1122 static rtx
1123 expand_builtin_nonlocal_goto (tree exp)
1125 tree t_label, t_save_area;
1126 rtx r_label, r_save_area, r_fp, r_sp;
1127 rtx_insn *insn;
1129 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1130 return NULL_RTX;
1132 t_label = CALL_EXPR_ARG (exp, 0);
1133 t_save_area = CALL_EXPR_ARG (exp, 1);
1135 r_label = expand_normal (t_label);
1136 r_label = convert_memory_address (Pmode, r_label);
1137 r_save_area = expand_normal (t_save_area);
1138 r_save_area = convert_memory_address (Pmode, r_save_area);
1139 /* Copy the address of the save location to a register just in case it was
1140 based on the frame pointer. */
1141 r_save_area = copy_to_reg (r_save_area);
1142 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1143 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1144 plus_constant (Pmode, r_save_area,
1145 GET_MODE_SIZE (Pmode)));
1147 crtl->has_nonlocal_goto = 1;
1149 /* ??? We no longer need to pass the static chain value, afaik. */
1150 if (targetm.have_nonlocal_goto ())
1151 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1152 else
1154 r_label = copy_to_reg (r_label);
1156 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1157 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1159 /* Restore frame pointer for containing function. */
1160 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1161 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1163 /* USE of hard_frame_pointer_rtx added for consistency;
1164 not clear if really needed. */
1165 emit_use (hard_frame_pointer_rtx);
1166 emit_use (stack_pointer_rtx);
1168 /* If the architecture is using a GP register, we must
1169 conservatively assume that the target function makes use of it.
1170 The prologue of functions with nonlocal gotos must therefore
1171 initialize the GP register to the appropriate value, and we
1172 must then make sure that this value is live at the point
1173 of the jump. (Note that this doesn't necessarily apply
1174 to targets with a nonlocal_goto pattern; they are free
1175 to implement it in their own way. Note also that this is
1176 a no-op if the GP register is a global invariant.) */
1177 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1178 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1179 emit_use (pic_offset_table_rtx);
1181 emit_indirect_jump (r_label);
1184 /* Search backwards to the jump insn and mark it as a
1185 non-local goto. */
1186 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1188 if (JUMP_P (insn))
1190 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1191 break;
1193 else if (CALL_P (insn))
1194 break;
1197 return const0_rtx;
1200 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1201 (not all will be used on all machines) that was passed to __builtin_setjmp.
1202 It updates the stack pointer in that block to the current value. This is
1203 also called directly by the SJLJ exception handling code. */
1205 void
1206 expand_builtin_update_setjmp_buf (rtx buf_addr)
1208 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1209 buf_addr = convert_memory_address (Pmode, buf_addr);
1210 rtx stack_save
1211 = gen_rtx_MEM (sa_mode,
1212 memory_address
1213 (sa_mode,
1214 plus_constant (Pmode, buf_addr,
1215 2 * GET_MODE_SIZE (Pmode))));
1217 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1220 /* Expand a call to __builtin_prefetch. For a target that does not support
1221 data prefetch, evaluate the memory address argument in case it has side
1222 effects. */
1224 static void
1225 expand_builtin_prefetch (tree exp)
1227 tree arg0, arg1, arg2;
1228 int nargs;
1229 rtx op0, op1, op2;
1231 if (!validate_arglist (exp, POINTER_TYPE, 0))
1232 return;
1234 arg0 = CALL_EXPR_ARG (exp, 0);
1236 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1237 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1238 locality). */
1239 nargs = call_expr_nargs (exp);
1240 if (nargs > 1)
1241 arg1 = CALL_EXPR_ARG (exp, 1);
1242 else
1243 arg1 = integer_zero_node;
1244 if (nargs > 2)
1245 arg2 = CALL_EXPR_ARG (exp, 2);
1246 else
1247 arg2 = integer_three_node;
1249 /* Argument 0 is an address. */
1250 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1252 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1253 if (TREE_CODE (arg1) != INTEGER_CST)
1255 error ("second argument to %<__builtin_prefetch%> must be a constant");
1256 arg1 = integer_zero_node;
1258 op1 = expand_normal (arg1);
1259 /* Argument 1 must be either zero or one. */
1260 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1262 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1263 " using zero");
1264 op1 = const0_rtx;
1267 /* Argument 2 (locality) must be a compile-time constant int. */
1268 if (TREE_CODE (arg2) != INTEGER_CST)
1270 error ("third argument to %<__builtin_prefetch%> must be a constant");
1271 arg2 = integer_zero_node;
1273 op2 = expand_normal (arg2);
1274 /* Argument 2 must be 0, 1, 2, or 3. */
1275 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1277 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1278 op2 = const0_rtx;
1281 if (targetm.have_prefetch ())
1283 struct expand_operand ops[3];
1285 create_address_operand (&ops[0], op0);
1286 create_integer_operand (&ops[1], INTVAL (op1));
1287 create_integer_operand (&ops[2], INTVAL (op2));
1288 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1289 return;
1292 /* Don't do anything with direct references to volatile memory, but
1293 generate code to handle other side effects. */
1294 if (!MEM_P (op0) && side_effects_p (op0))
1295 emit_insn (op0);
1298 /* Get a MEM rtx for expression EXP which is the address of an operand
1299 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1300 the maximum length of the block of memory that might be accessed or
1301 NULL if unknown. */
1303 static rtx
1304 get_memory_rtx (tree exp, tree len)
1306 tree orig_exp = exp;
1307 rtx addr, mem;
1309 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1310 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1311 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1312 exp = TREE_OPERAND (exp, 0);
1314 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1315 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1317 /* Get an expression we can use to find the attributes to assign to MEM.
1318 First remove any nops. */
1319 while (CONVERT_EXPR_P (exp)
1320 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1321 exp = TREE_OPERAND (exp, 0);
1323 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1324 (as builtin stringops may alias with anything). */
1325 exp = fold_build2 (MEM_REF,
1326 build_array_type (char_type_node,
1327 build_range_type (sizetype,
1328 size_one_node, len)),
1329 exp, build_int_cst (ptr_type_node, 0));
1331 /* If the MEM_REF has no acceptable address, try to get the base object
1332 from the original address we got, and build an all-aliasing
1333 unknown-sized access to that one. */
1334 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1335 set_mem_attributes (mem, exp, 0);
1336 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1337 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1338 0))))
1340 exp = build_fold_addr_expr (exp);
1341 exp = fold_build2 (MEM_REF,
1342 build_array_type (char_type_node,
1343 build_range_type (sizetype,
1344 size_zero_node,
1345 NULL)),
1346 exp, build_int_cst (ptr_type_node, 0));
1347 set_mem_attributes (mem, exp, 0);
1349 set_mem_alias_set (mem, 0);
1350 return mem;
1353 /* Built-in functions to perform an untyped call and return. */
1355 #define apply_args_mode \
1356 (this_target_builtins->x_apply_args_mode)
1357 #define apply_result_mode \
1358 (this_target_builtins->x_apply_result_mode)
1360 /* Return the size required for the block returned by __builtin_apply_args,
1361 and initialize apply_args_mode. */
1363 static int
1364 apply_args_size (void)
1366 static int size = -1;
1367 int align;
1368 unsigned int regno;
1370 /* The values computed by this function never change. */
1371 if (size < 0)
1373 /* The first value is the incoming arg-pointer. */
1374 size = GET_MODE_SIZE (Pmode);
1376 /* The second value is the structure value address unless this is
1377 passed as an "invisible" first argument. */
1378 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1379 size += GET_MODE_SIZE (Pmode);
1381 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1382 if (FUNCTION_ARG_REGNO_P (regno))
1384 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1386 gcc_assert (mode != VOIDmode);
1388 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1389 if (size % align != 0)
1390 size = CEIL (size, align) * align;
1391 size += GET_MODE_SIZE (mode);
1392 apply_args_mode[regno] = mode;
1394 else
1396 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1399 return size;
1402 /* Return the size required for the block returned by __builtin_apply,
1403 and initialize apply_result_mode. */
1405 static int
1406 apply_result_size (void)
1408 static int size = -1;
1409 int align, regno;
1411 /* The values computed by this function never change. */
1412 if (size < 0)
1414 size = 0;
1416 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1417 if (targetm.calls.function_value_regno_p (regno))
1419 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1421 gcc_assert (mode != VOIDmode);
1423 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1424 if (size % align != 0)
1425 size = CEIL (size, align) * align;
1426 size += GET_MODE_SIZE (mode);
1427 apply_result_mode[regno] = mode;
1429 else
1430 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1432 /* Allow targets that use untyped_call and untyped_return to override
1433 the size so that machine-specific information can be stored here. */
1434 #ifdef APPLY_RESULT_SIZE
1435 size = APPLY_RESULT_SIZE;
1436 #endif
1438 return size;
1441 /* Create a vector describing the result block RESULT. If SAVEP is true,
1442 the result block is used to save the values; otherwise it is used to
1443 restore the values. */
1445 static rtx
1446 result_vector (int savep, rtx result)
1448 int regno, size, align, nelts;
1449 fixed_size_mode mode;
1450 rtx reg, mem;
1451 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1453 size = nelts = 0;
1454 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1455 if ((mode = apply_result_mode[regno]) != VOIDmode)
1457 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1458 if (size % align != 0)
1459 size = CEIL (size, align) * align;
1460 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1461 mem = adjust_address (result, mode, size);
1462 savevec[nelts++] = (savep
1463 ? gen_rtx_SET (mem, reg)
1464 : gen_rtx_SET (reg, mem));
1465 size += GET_MODE_SIZE (mode);
1467 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1470 /* Save the state required to perform an untyped call with the same
1471 arguments as were passed to the current function. */
1473 static rtx
1474 expand_builtin_apply_args_1 (void)
1476 rtx registers, tem;
1477 int size, align, regno;
1478 fixed_size_mode mode;
1479 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1481 /* Create a block where the arg-pointer, structure value address,
1482 and argument registers can be saved. */
1483 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1485 /* Walk past the arg-pointer and structure value address. */
1486 size = GET_MODE_SIZE (Pmode);
1487 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1488 size += GET_MODE_SIZE (Pmode);
1490 /* Save each register used in calling a function to the block. */
1491 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1492 if ((mode = apply_args_mode[regno]) != VOIDmode)
1494 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1495 if (size % align != 0)
1496 size = CEIL (size, align) * align;
1498 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1500 emit_move_insn (adjust_address (registers, mode, size), tem);
1501 size += GET_MODE_SIZE (mode);
1504 /* Save the arg pointer to the block. */
1505 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1506 /* We need the pointer as the caller actually passed them to us, not
1507 as we might have pretended they were passed. Make sure it's a valid
1508 operand, as emit_move_insn isn't expected to handle a PLUS. */
1509 if (STACK_GROWS_DOWNWARD)
1511 = force_operand (plus_constant (Pmode, tem,
1512 crtl->args.pretend_args_size),
1513 NULL_RTX);
1514 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1516 size = GET_MODE_SIZE (Pmode);
1518 /* Save the structure value address unless this is passed as an
1519 "invisible" first argument. */
1520 if (struct_incoming_value)
1522 emit_move_insn (adjust_address (registers, Pmode, size),
1523 copy_to_reg (struct_incoming_value));
1524 size += GET_MODE_SIZE (Pmode);
1527 /* Return the address of the block. */
1528 return copy_addr_to_reg (XEXP (registers, 0));
1531 /* __builtin_apply_args returns block of memory allocated on
1532 the stack into which is stored the arg pointer, structure
1533 value address, static chain, and all the registers that might
1534 possibly be used in performing a function call. The code is
1535 moved to the start of the function so the incoming values are
1536 saved. */
1538 static rtx
1539 expand_builtin_apply_args (void)
1541 /* Don't do __builtin_apply_args more than once in a function.
1542 Save the result of the first call and reuse it. */
1543 if (apply_args_value != 0)
1544 return apply_args_value;
1546 /* When this function is called, it means that registers must be
1547 saved on entry to this function. So we migrate the
1548 call to the first insn of this function. */
1549 rtx temp;
1551 start_sequence ();
1552 temp = expand_builtin_apply_args_1 ();
1553 rtx_insn *seq = get_insns ();
1554 end_sequence ();
1556 apply_args_value = temp;
1558 /* Put the insns after the NOTE that starts the function.
1559 If this is inside a start_sequence, make the outer-level insn
1560 chain current, so the code is placed at the start of the
1561 function. If internal_arg_pointer is a non-virtual pseudo,
1562 it needs to be placed after the function that initializes
1563 that pseudo. */
1564 push_topmost_sequence ();
1565 if (REG_P (crtl->args.internal_arg_pointer)
1566 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1567 emit_insn_before (seq, parm_birth_insn);
1568 else
1569 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1570 pop_topmost_sequence ();
1571 return temp;
1575 /* Perform an untyped call and save the state required to perform an
1576 untyped return of whatever value was returned by the given function. */
1578 static rtx
1579 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1581 int size, align, regno;
1582 fixed_size_mode mode;
1583 rtx incoming_args, result, reg, dest, src;
1584 rtx_call_insn *call_insn;
1585 rtx old_stack_level = 0;
1586 rtx call_fusage = 0;
1587 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1589 arguments = convert_memory_address (Pmode, arguments);
1591 /* Create a block where the return registers can be saved. */
1592 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1594 /* Fetch the arg pointer from the ARGUMENTS block. */
1595 incoming_args = gen_reg_rtx (Pmode);
1596 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1597 if (!STACK_GROWS_DOWNWARD)
1598 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1599 incoming_args, 0, OPTAB_LIB_WIDEN);
1601 /* Push a new argument block and copy the arguments. Do not allow
1602 the (potential) memcpy call below to interfere with our stack
1603 manipulations. */
1604 do_pending_stack_adjust ();
1605 NO_DEFER_POP;
1607 /* Save the stack with nonlocal if available. */
1608 if (targetm.have_save_stack_nonlocal ())
1609 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1610 else
1611 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1613 /* Allocate a block of memory onto the stack and copy the memory
1614 arguments to the outgoing arguments address. We can pass TRUE
1615 as the 4th argument because we just saved the stack pointer
1616 and will restore it right after the call. */
1617 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1619 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1620 may have already set current_function_calls_alloca to true.
1621 current_function_calls_alloca won't be set if argsize is zero,
1622 so we have to guarantee need_drap is true here. */
1623 if (SUPPORTS_STACK_ALIGNMENT)
1624 crtl->need_drap = true;
1626 dest = virtual_outgoing_args_rtx;
1627 if (!STACK_GROWS_DOWNWARD)
1629 if (CONST_INT_P (argsize))
1630 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1631 else
1632 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1634 dest = gen_rtx_MEM (BLKmode, dest);
1635 set_mem_align (dest, PARM_BOUNDARY);
1636 src = gen_rtx_MEM (BLKmode, incoming_args);
1637 set_mem_align (src, PARM_BOUNDARY);
1638 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1640 /* Refer to the argument block. */
1641 apply_args_size ();
1642 arguments = gen_rtx_MEM (BLKmode, arguments);
1643 set_mem_align (arguments, PARM_BOUNDARY);
1645 /* Walk past the arg-pointer and structure value address. */
1646 size = GET_MODE_SIZE (Pmode);
1647 if (struct_value)
1648 size += GET_MODE_SIZE (Pmode);
1650 /* Restore each of the registers previously saved. Make USE insns
1651 for each of these registers for use in making the call. */
1652 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1653 if ((mode = apply_args_mode[regno]) != VOIDmode)
1655 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1656 if (size % align != 0)
1657 size = CEIL (size, align) * align;
1658 reg = gen_rtx_REG (mode, regno);
1659 emit_move_insn (reg, adjust_address (arguments, mode, size));
1660 use_reg (&call_fusage, reg);
1661 size += GET_MODE_SIZE (mode);
1664 /* Restore the structure value address unless this is passed as an
1665 "invisible" first argument. */
1666 size = GET_MODE_SIZE (Pmode);
1667 if (struct_value)
1669 rtx value = gen_reg_rtx (Pmode);
1670 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1671 emit_move_insn (struct_value, value);
1672 if (REG_P (struct_value))
1673 use_reg (&call_fusage, struct_value);
1674 size += GET_MODE_SIZE (Pmode);
1677 /* All arguments and registers used for the call are set up by now! */
1678 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1680 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1681 and we don't want to load it into a register as an optimization,
1682 because prepare_call_address already did it if it should be done. */
1683 if (GET_CODE (function) != SYMBOL_REF)
1684 function = memory_address (FUNCTION_MODE, function);
1686 /* Generate the actual call instruction and save the return value. */
1687 if (targetm.have_untyped_call ())
1689 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1690 emit_call_insn (targetm.gen_untyped_call (mem, result,
1691 result_vector (1, result)));
1693 else if (targetm.have_call_value ())
1695 rtx valreg = 0;
1697 /* Locate the unique return register. It is not possible to
1698 express a call that sets more than one return register using
1699 call_value; use untyped_call for that. In fact, untyped_call
1700 only needs to save the return registers in the given block. */
1701 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1702 if ((mode = apply_result_mode[regno]) != VOIDmode)
1704 gcc_assert (!valreg); /* have_untyped_call required. */
1706 valreg = gen_rtx_REG (mode, regno);
1709 emit_insn (targetm.gen_call_value (valreg,
1710 gen_rtx_MEM (FUNCTION_MODE, function),
1711 const0_rtx, NULL_RTX, const0_rtx));
1713 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1715 else
1716 gcc_unreachable ();
1718 /* Find the CALL insn we just emitted, and attach the register usage
1719 information. */
1720 call_insn = last_call_insn ();
1721 add_function_usage_to (call_insn, call_fusage);
1723 /* Restore the stack. */
1724 if (targetm.have_save_stack_nonlocal ())
1725 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1726 else
1727 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1728 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1730 OK_DEFER_POP;
1732 /* Return the address of the result block. */
1733 result = copy_addr_to_reg (XEXP (result, 0));
1734 return convert_memory_address (ptr_mode, result);
1737 /* Perform an untyped return. */
1739 static void
1740 expand_builtin_return (rtx result)
1742 int size, align, regno;
1743 fixed_size_mode mode;
1744 rtx reg;
1745 rtx_insn *call_fusage = 0;
1747 result = convert_memory_address (Pmode, result);
1749 apply_result_size ();
1750 result = gen_rtx_MEM (BLKmode, result);
1752 if (targetm.have_untyped_return ())
1754 rtx vector = result_vector (0, result);
1755 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1756 emit_barrier ();
1757 return;
1760 /* Restore the return value and note that each value is used. */
1761 size = 0;
1762 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1763 if ((mode = apply_result_mode[regno]) != VOIDmode)
1765 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1766 if (size % align != 0)
1767 size = CEIL (size, align) * align;
1768 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1769 emit_move_insn (reg, adjust_address (result, mode, size));
1771 push_to_sequence (call_fusage);
1772 emit_use (reg);
1773 call_fusage = get_insns ();
1774 end_sequence ();
1775 size += GET_MODE_SIZE (mode);
1778 /* Put the USE insns before the return. */
1779 emit_insn (call_fusage);
1781 /* Return whatever values was restored by jumping directly to the end
1782 of the function. */
1783 expand_naked_return ();
1786 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1788 static enum type_class
1789 type_to_class (tree type)
1791 switch (TREE_CODE (type))
1793 case VOID_TYPE: return void_type_class;
1794 case INTEGER_TYPE: return integer_type_class;
1795 case ENUMERAL_TYPE: return enumeral_type_class;
1796 case BOOLEAN_TYPE: return boolean_type_class;
1797 case POINTER_TYPE: return pointer_type_class;
1798 case REFERENCE_TYPE: return reference_type_class;
1799 case OFFSET_TYPE: return offset_type_class;
1800 case REAL_TYPE: return real_type_class;
1801 case COMPLEX_TYPE: return complex_type_class;
1802 case FUNCTION_TYPE: return function_type_class;
1803 case METHOD_TYPE: return method_type_class;
1804 case RECORD_TYPE: return record_type_class;
1805 case UNION_TYPE:
1806 case QUAL_UNION_TYPE: return union_type_class;
1807 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1808 ? string_type_class : array_type_class);
1809 case LANG_TYPE: return lang_type_class;
1810 default: return no_type_class;
1814 /* Expand a call EXP to __builtin_classify_type. */
1816 static rtx
1817 expand_builtin_classify_type (tree exp)
1819 if (call_expr_nargs (exp))
1820 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1821 return GEN_INT (no_type_class);
1824 /* This helper macro, meant to be used in mathfn_built_in below, determines
1825 which among a set of builtin math functions is appropriate for a given type
1826 mode. The `F' (float) and `L' (long double) are automatically generated
1827 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1828 types, there are additional types that are considered with 'F32', 'F64',
1829 'F128', etc. suffixes. */
1830 #define CASE_MATHFN(MATHFN) \
1831 CASE_CFN_##MATHFN: \
1832 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1833 fcodel = BUILT_IN_##MATHFN##L ; break;
1834 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1835 types. */
1836 #define CASE_MATHFN_FLOATN(MATHFN) \
1837 CASE_CFN_##MATHFN: \
1838 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1839 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1840 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1841 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1842 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1843 break;
1844 /* Similar to above, but appends _R after any F/L suffix. */
1845 #define CASE_MATHFN_REENT(MATHFN) \
1846 case CFN_BUILT_IN_##MATHFN##_R: \
1847 case CFN_BUILT_IN_##MATHFN##F_R: \
1848 case CFN_BUILT_IN_##MATHFN##L_R: \
1849 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1850 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1852 /* Return a function equivalent to FN but operating on floating-point
1853 values of type TYPE, or END_BUILTINS if no such function exists.
1854 This is purely an operation on function codes; it does not guarantee
1855 that the target actually has an implementation of the function. */
1857 static built_in_function
1858 mathfn_built_in_2 (tree type, combined_fn fn)
1860 tree mtype;
1861 built_in_function fcode, fcodef, fcodel;
1862 built_in_function fcodef16 = END_BUILTINS;
1863 built_in_function fcodef32 = END_BUILTINS;
1864 built_in_function fcodef64 = END_BUILTINS;
1865 built_in_function fcodef128 = END_BUILTINS;
1866 built_in_function fcodef32x = END_BUILTINS;
1867 built_in_function fcodef64x = END_BUILTINS;
1868 built_in_function fcodef128x = END_BUILTINS;
1870 switch (fn)
1872 CASE_MATHFN (ACOS)
1873 CASE_MATHFN (ACOSH)
1874 CASE_MATHFN (ASIN)
1875 CASE_MATHFN (ASINH)
1876 CASE_MATHFN (ATAN)
1877 CASE_MATHFN (ATAN2)
1878 CASE_MATHFN (ATANH)
1879 CASE_MATHFN (CBRT)
1880 CASE_MATHFN_FLOATN (CEIL)
1881 CASE_MATHFN (CEXPI)
1882 CASE_MATHFN_FLOATN (COPYSIGN)
1883 CASE_MATHFN (COS)
1884 CASE_MATHFN (COSH)
1885 CASE_MATHFN (DREM)
1886 CASE_MATHFN (ERF)
1887 CASE_MATHFN (ERFC)
1888 CASE_MATHFN (EXP)
1889 CASE_MATHFN (EXP10)
1890 CASE_MATHFN (EXP2)
1891 CASE_MATHFN (EXPM1)
1892 CASE_MATHFN (FABS)
1893 CASE_MATHFN (FDIM)
1894 CASE_MATHFN_FLOATN (FLOOR)
1895 CASE_MATHFN_FLOATN (FMA)
1896 CASE_MATHFN_FLOATN (FMAX)
1897 CASE_MATHFN_FLOATN (FMIN)
1898 CASE_MATHFN (FMOD)
1899 CASE_MATHFN (FREXP)
1900 CASE_MATHFN (GAMMA)
1901 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1902 CASE_MATHFN (HUGE_VAL)
1903 CASE_MATHFN (HYPOT)
1904 CASE_MATHFN (ILOGB)
1905 CASE_MATHFN (ICEIL)
1906 CASE_MATHFN (IFLOOR)
1907 CASE_MATHFN (INF)
1908 CASE_MATHFN (IRINT)
1909 CASE_MATHFN (IROUND)
1910 CASE_MATHFN (ISINF)
1911 CASE_MATHFN (J0)
1912 CASE_MATHFN (J1)
1913 CASE_MATHFN (JN)
1914 CASE_MATHFN (LCEIL)
1915 CASE_MATHFN (LDEXP)
1916 CASE_MATHFN (LFLOOR)
1917 CASE_MATHFN (LGAMMA)
1918 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1919 CASE_MATHFN (LLCEIL)
1920 CASE_MATHFN (LLFLOOR)
1921 CASE_MATHFN (LLRINT)
1922 CASE_MATHFN (LLROUND)
1923 CASE_MATHFN (LOG)
1924 CASE_MATHFN (LOG10)
1925 CASE_MATHFN (LOG1P)
1926 CASE_MATHFN (LOG2)
1927 CASE_MATHFN (LOGB)
1928 CASE_MATHFN (LRINT)
1929 CASE_MATHFN (LROUND)
1930 CASE_MATHFN (MODF)
1931 CASE_MATHFN (NAN)
1932 CASE_MATHFN (NANS)
1933 CASE_MATHFN_FLOATN (NEARBYINT)
1934 CASE_MATHFN (NEXTAFTER)
1935 CASE_MATHFN (NEXTTOWARD)
1936 CASE_MATHFN (POW)
1937 CASE_MATHFN (POWI)
1938 CASE_MATHFN (POW10)
1939 CASE_MATHFN (REMAINDER)
1940 CASE_MATHFN (REMQUO)
1941 CASE_MATHFN_FLOATN (RINT)
1942 CASE_MATHFN_FLOATN (ROUND)
1943 CASE_MATHFN (SCALB)
1944 CASE_MATHFN (SCALBLN)
1945 CASE_MATHFN (SCALBN)
1946 CASE_MATHFN (SIGNBIT)
1947 CASE_MATHFN (SIGNIFICAND)
1948 CASE_MATHFN (SIN)
1949 CASE_MATHFN (SINCOS)
1950 CASE_MATHFN (SINH)
1951 CASE_MATHFN_FLOATN (SQRT)
1952 CASE_MATHFN (TAN)
1953 CASE_MATHFN (TANH)
1954 CASE_MATHFN (TGAMMA)
1955 CASE_MATHFN_FLOATN (TRUNC)
1956 CASE_MATHFN (Y0)
1957 CASE_MATHFN (Y1)
1958 CASE_MATHFN (YN)
1960 default:
1961 return END_BUILTINS;
1964 mtype = TYPE_MAIN_VARIANT (type);
1965 if (mtype == double_type_node)
1966 return fcode;
1967 else if (mtype == float_type_node)
1968 return fcodef;
1969 else if (mtype == long_double_type_node)
1970 return fcodel;
1971 else if (mtype == float16_type_node)
1972 return fcodef16;
1973 else if (mtype == float32_type_node)
1974 return fcodef32;
1975 else if (mtype == float64_type_node)
1976 return fcodef64;
1977 else if (mtype == float128_type_node)
1978 return fcodef128;
1979 else if (mtype == float32x_type_node)
1980 return fcodef32x;
1981 else if (mtype == float64x_type_node)
1982 return fcodef64x;
1983 else if (mtype == float128x_type_node)
1984 return fcodef128x;
1985 else
1986 return END_BUILTINS;
1989 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1990 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1991 otherwise use the explicit declaration. If we can't do the conversion,
1992 return null. */
1994 static tree
1995 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
1997 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1998 if (fcode2 == END_BUILTINS)
1999 return NULL_TREE;
2001 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2002 return NULL_TREE;
2004 return builtin_decl_explicit (fcode2);
2007 /* Like mathfn_built_in_1, but always use the implicit array. */
2009 tree
2010 mathfn_built_in (tree type, combined_fn fn)
2012 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2015 /* Like mathfn_built_in_1, but take a built_in_function and
2016 always use the implicit array. */
2018 tree
2019 mathfn_built_in (tree type, enum built_in_function fn)
2021 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2024 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2025 return its code, otherwise return IFN_LAST. Note that this function
2026 only tests whether the function is defined in internals.def, not whether
2027 it is actually available on the target. */
2029 internal_fn
2030 associated_internal_fn (tree fndecl)
2032 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2033 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2034 switch (DECL_FUNCTION_CODE (fndecl))
2036 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2037 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2038 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2039 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2040 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2041 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2042 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2043 #include "internal-fn.def"
2045 CASE_FLT_FN (BUILT_IN_POW10):
2046 return IFN_EXP10;
2048 CASE_FLT_FN (BUILT_IN_DREM):
2049 return IFN_REMAINDER;
2051 CASE_FLT_FN (BUILT_IN_SCALBN):
2052 CASE_FLT_FN (BUILT_IN_SCALBLN):
2053 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2054 return IFN_LDEXP;
2055 return IFN_LAST;
2057 default:
2058 return IFN_LAST;
2062 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2063 on the current target by a call to an internal function, return the
2064 code of that internal function, otherwise return IFN_LAST. The caller
2065 is responsible for ensuring that any side-effects of the built-in
2066 call are dealt with correctly. E.g. if CALL sets errno, the caller
2067 must decide that the errno result isn't needed or make it available
2068 in some other way. */
2070 internal_fn
2071 replacement_internal_fn (gcall *call)
2073 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2075 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2076 if (ifn != IFN_LAST)
2078 tree_pair types = direct_internal_fn_types (ifn, call);
2079 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2080 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2081 return ifn;
2084 return IFN_LAST;
2087 /* Expand a call to the builtin trinary math functions (fma).
2088 Return NULL_RTX if a normal call should be emitted rather than expanding the
2089 function in-line. EXP is the expression that is a call to the builtin
2090 function; if convenient, the result should be placed in TARGET.
2091 SUBTARGET may be used as the target for computing one of EXP's
2092 operands. */
2094 static rtx
2095 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2097 optab builtin_optab;
2098 rtx op0, op1, op2, result;
2099 rtx_insn *insns;
2100 tree fndecl = get_callee_fndecl (exp);
2101 tree arg0, arg1, arg2;
2102 machine_mode mode;
2104 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2105 return NULL_RTX;
2107 arg0 = CALL_EXPR_ARG (exp, 0);
2108 arg1 = CALL_EXPR_ARG (exp, 1);
2109 arg2 = CALL_EXPR_ARG (exp, 2);
2111 switch (DECL_FUNCTION_CODE (fndecl))
2113 CASE_FLT_FN (BUILT_IN_FMA):
2114 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2115 builtin_optab = fma_optab; break;
2116 default:
2117 gcc_unreachable ();
2120 /* Make a suitable register to place result in. */
2121 mode = TYPE_MODE (TREE_TYPE (exp));
2123 /* Before working hard, check whether the instruction is available. */
2124 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2125 return NULL_RTX;
2127 result = gen_reg_rtx (mode);
2129 /* Always stabilize the argument list. */
2130 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2131 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2132 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2134 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2135 op1 = expand_normal (arg1);
2136 op2 = expand_normal (arg2);
2138 start_sequence ();
2140 /* Compute into RESULT.
2141 Set RESULT to wherever the result comes back. */
2142 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2143 result, 0);
2145 /* If we were unable to expand via the builtin, stop the sequence
2146 (without outputting the insns) and call to the library function
2147 with the stabilized argument list. */
2148 if (result == 0)
2150 end_sequence ();
2151 return expand_call (exp, target, target == const0_rtx);
2154 /* Output the entire sequence. */
2155 insns = get_insns ();
2156 end_sequence ();
2157 emit_insn (insns);
2159 return result;
2162 /* Expand a call to the builtin sin and cos math functions.
2163 Return NULL_RTX if a normal call should be emitted rather than expanding the
2164 function in-line. EXP is the expression that is a call to the builtin
2165 function; if convenient, the result should be placed in TARGET.
2166 SUBTARGET may be used as the target for computing one of EXP's
2167 operands. */
2169 static rtx
2170 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2172 optab builtin_optab;
2173 rtx op0;
2174 rtx_insn *insns;
2175 tree fndecl = get_callee_fndecl (exp);
2176 machine_mode mode;
2177 tree arg;
2179 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2180 return NULL_RTX;
2182 arg = CALL_EXPR_ARG (exp, 0);
2184 switch (DECL_FUNCTION_CODE (fndecl))
2186 CASE_FLT_FN (BUILT_IN_SIN):
2187 CASE_FLT_FN (BUILT_IN_COS):
2188 builtin_optab = sincos_optab; break;
2189 default:
2190 gcc_unreachable ();
2193 /* Make a suitable register to place result in. */
2194 mode = TYPE_MODE (TREE_TYPE (exp));
2196 /* Check if sincos insn is available, otherwise fallback
2197 to sin or cos insn. */
2198 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2199 switch (DECL_FUNCTION_CODE (fndecl))
2201 CASE_FLT_FN (BUILT_IN_SIN):
2202 builtin_optab = sin_optab; break;
2203 CASE_FLT_FN (BUILT_IN_COS):
2204 builtin_optab = cos_optab; break;
2205 default:
2206 gcc_unreachable ();
2209 /* Before working hard, check whether the instruction is available. */
2210 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2212 rtx result = gen_reg_rtx (mode);
2214 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2215 need to expand the argument again. This way, we will not perform
2216 side-effects more the once. */
2217 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2219 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2221 start_sequence ();
2223 /* Compute into RESULT.
2224 Set RESULT to wherever the result comes back. */
2225 if (builtin_optab == sincos_optab)
2227 int ok;
2229 switch (DECL_FUNCTION_CODE (fndecl))
2231 CASE_FLT_FN (BUILT_IN_SIN):
2232 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2233 break;
2234 CASE_FLT_FN (BUILT_IN_COS):
2235 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2236 break;
2237 default:
2238 gcc_unreachable ();
2240 gcc_assert (ok);
2242 else
2243 result = expand_unop (mode, builtin_optab, op0, result, 0);
2245 if (result != 0)
2247 /* Output the entire sequence. */
2248 insns = get_insns ();
2249 end_sequence ();
2250 emit_insn (insns);
2251 return result;
2254 /* If we were unable to expand via the builtin, stop the sequence
2255 (without outputting the insns) and call to the library function
2256 with the stabilized argument list. */
2257 end_sequence ();
2260 return expand_call (exp, target, target == const0_rtx);
2263 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2264 return an RTL instruction code that implements the functionality.
2265 If that isn't possible or available return CODE_FOR_nothing. */
2267 static enum insn_code
2268 interclass_mathfn_icode (tree arg, tree fndecl)
2270 bool errno_set = false;
2271 optab builtin_optab = unknown_optab;
2272 machine_mode mode;
2274 switch (DECL_FUNCTION_CODE (fndecl))
2276 CASE_FLT_FN (BUILT_IN_ILOGB):
2277 errno_set = true; builtin_optab = ilogb_optab; break;
2278 CASE_FLT_FN (BUILT_IN_ISINF):
2279 builtin_optab = isinf_optab; break;
2280 case BUILT_IN_ISNORMAL:
2281 case BUILT_IN_ISFINITE:
2282 CASE_FLT_FN (BUILT_IN_FINITE):
2283 case BUILT_IN_FINITED32:
2284 case BUILT_IN_FINITED64:
2285 case BUILT_IN_FINITED128:
2286 case BUILT_IN_ISINFD32:
2287 case BUILT_IN_ISINFD64:
2288 case BUILT_IN_ISINFD128:
2289 /* These builtins have no optabs (yet). */
2290 break;
2291 default:
2292 gcc_unreachable ();
2295 /* There's no easy way to detect the case we need to set EDOM. */
2296 if (flag_errno_math && errno_set)
2297 return CODE_FOR_nothing;
2299 /* Optab mode depends on the mode of the input argument. */
2300 mode = TYPE_MODE (TREE_TYPE (arg));
2302 if (builtin_optab)
2303 return optab_handler (builtin_optab, mode);
2304 return CODE_FOR_nothing;
2307 /* Expand a call to one of the builtin math functions that operate on
2308 floating point argument and output an integer result (ilogb, isinf,
2309 isnan, etc).
2310 Return 0 if a normal call should be emitted rather than expanding the
2311 function in-line. EXP is the expression that is a call to the builtin
2312 function; if convenient, the result should be placed in TARGET. */
2314 static rtx
2315 expand_builtin_interclass_mathfn (tree exp, rtx target)
2317 enum insn_code icode = CODE_FOR_nothing;
2318 rtx op0;
2319 tree fndecl = get_callee_fndecl (exp);
2320 machine_mode mode;
2321 tree arg;
2323 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2324 return NULL_RTX;
2326 arg = CALL_EXPR_ARG (exp, 0);
2327 icode = interclass_mathfn_icode (arg, fndecl);
2328 mode = TYPE_MODE (TREE_TYPE (arg));
2330 if (icode != CODE_FOR_nothing)
2332 struct expand_operand ops[1];
2333 rtx_insn *last = get_last_insn ();
2334 tree orig_arg = arg;
2336 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2337 need to expand the argument again. This way, we will not perform
2338 side-effects more the once. */
2339 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2341 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2343 if (mode != GET_MODE (op0))
2344 op0 = convert_to_mode (mode, op0, 0);
2346 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2347 if (maybe_legitimize_operands (icode, 0, 1, ops)
2348 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2349 return ops[0].value;
2351 delete_insns_since (last);
2352 CALL_EXPR_ARG (exp, 0) = orig_arg;
2355 return NULL_RTX;
2358 /* Expand a call to the builtin sincos math function.
2359 Return NULL_RTX if a normal call should be emitted rather than expanding the
2360 function in-line. EXP is the expression that is a call to the builtin
2361 function. */
2363 static rtx
2364 expand_builtin_sincos (tree exp)
2366 rtx op0, op1, op2, target1, target2;
2367 machine_mode mode;
2368 tree arg, sinp, cosp;
2369 int result;
2370 location_t loc = EXPR_LOCATION (exp);
2371 tree alias_type, alias_off;
2373 if (!validate_arglist (exp, REAL_TYPE,
2374 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2375 return NULL_RTX;
2377 arg = CALL_EXPR_ARG (exp, 0);
2378 sinp = CALL_EXPR_ARG (exp, 1);
2379 cosp = CALL_EXPR_ARG (exp, 2);
2381 /* Make a suitable register to place result in. */
2382 mode = TYPE_MODE (TREE_TYPE (arg));
2384 /* Check if sincos insn is available, otherwise emit the call. */
2385 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2386 return NULL_RTX;
2388 target1 = gen_reg_rtx (mode);
2389 target2 = gen_reg_rtx (mode);
2391 op0 = expand_normal (arg);
2392 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2393 alias_off = build_int_cst (alias_type, 0);
2394 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2395 sinp, alias_off));
2396 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2397 cosp, alias_off));
2399 /* Compute into target1 and target2.
2400 Set TARGET to wherever the result comes back. */
2401 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2402 gcc_assert (result);
2404 /* Move target1 and target2 to the memory locations indicated
2405 by op1 and op2. */
2406 emit_move_insn (op1, target1);
2407 emit_move_insn (op2, target2);
2409 return const0_rtx;
2412 /* Expand a call to the internal cexpi builtin to the sincos math function.
2413 EXP is the expression that is a call to the builtin function; if convenient,
2414 the result should be placed in TARGET. */
2416 static rtx
2417 expand_builtin_cexpi (tree exp, rtx target)
2419 tree fndecl = get_callee_fndecl (exp);
2420 tree arg, type;
2421 machine_mode mode;
2422 rtx op0, op1, op2;
2423 location_t loc = EXPR_LOCATION (exp);
2425 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2426 return NULL_RTX;
2428 arg = CALL_EXPR_ARG (exp, 0);
2429 type = TREE_TYPE (arg);
2430 mode = TYPE_MODE (TREE_TYPE (arg));
2432 /* Try expanding via a sincos optab, fall back to emitting a libcall
2433 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2434 is only generated from sincos, cexp or if we have either of them. */
2435 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2437 op1 = gen_reg_rtx (mode);
2438 op2 = gen_reg_rtx (mode);
2440 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2442 /* Compute into op1 and op2. */
2443 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2445 else if (targetm.libc_has_function (function_sincos))
2447 tree call, fn = NULL_TREE;
2448 tree top1, top2;
2449 rtx op1a, op2a;
2451 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2452 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2453 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2454 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2455 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2456 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2457 else
2458 gcc_unreachable ();
2460 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2461 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2462 op1a = copy_addr_to_reg (XEXP (op1, 0));
2463 op2a = copy_addr_to_reg (XEXP (op2, 0));
2464 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2465 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2467 /* Make sure not to fold the sincos call again. */
2468 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2469 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2470 call, 3, arg, top1, top2));
2472 else
2474 tree call, fn = NULL_TREE, narg;
2475 tree ctype = build_complex_type (type);
2477 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2478 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2479 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2480 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2481 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2482 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2483 else
2484 gcc_unreachable ();
2486 /* If we don't have a decl for cexp create one. This is the
2487 friendliest fallback if the user calls __builtin_cexpi
2488 without full target C99 function support. */
2489 if (fn == NULL_TREE)
2491 tree fntype;
2492 const char *name = NULL;
2494 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2495 name = "cexpf";
2496 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2497 name = "cexp";
2498 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2499 name = "cexpl";
2501 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2502 fn = build_fn_decl (name, fntype);
2505 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2506 build_real (type, dconst0), arg);
2508 /* Make sure not to fold the cexp call again. */
2509 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2510 return expand_expr (build_call_nary (ctype, call, 1, narg),
2511 target, VOIDmode, EXPAND_NORMAL);
2514 /* Now build the proper return type. */
2515 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2516 make_tree (TREE_TYPE (arg), op2),
2517 make_tree (TREE_TYPE (arg), op1)),
2518 target, VOIDmode, EXPAND_NORMAL);
2521 /* Conveniently construct a function call expression. FNDECL names the
2522 function to be called, N is the number of arguments, and the "..."
2523 parameters are the argument expressions. Unlike build_call_exr
2524 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2526 static tree
2527 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2529 va_list ap;
2530 tree fntype = TREE_TYPE (fndecl);
2531 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2533 va_start (ap, n);
2534 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2535 va_end (ap);
2536 SET_EXPR_LOCATION (fn, loc);
2537 return fn;
2540 /* Expand a call to one of the builtin rounding functions gcc defines
2541 as an extension (lfloor and lceil). As these are gcc extensions we
2542 do not need to worry about setting errno to EDOM.
2543 If expanding via optab fails, lower expression to (int)(floor(x)).
2544 EXP is the expression that is a call to the builtin function;
2545 if convenient, the result should be placed in TARGET. */
2547 static rtx
2548 expand_builtin_int_roundingfn (tree exp, rtx target)
2550 convert_optab builtin_optab;
2551 rtx op0, tmp;
2552 rtx_insn *insns;
2553 tree fndecl = get_callee_fndecl (exp);
2554 enum built_in_function fallback_fn;
2555 tree fallback_fndecl;
2556 machine_mode mode;
2557 tree arg;
2559 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2560 gcc_unreachable ();
2562 arg = CALL_EXPR_ARG (exp, 0);
2564 switch (DECL_FUNCTION_CODE (fndecl))
2566 CASE_FLT_FN (BUILT_IN_ICEIL):
2567 CASE_FLT_FN (BUILT_IN_LCEIL):
2568 CASE_FLT_FN (BUILT_IN_LLCEIL):
2569 builtin_optab = lceil_optab;
2570 fallback_fn = BUILT_IN_CEIL;
2571 break;
2573 CASE_FLT_FN (BUILT_IN_IFLOOR):
2574 CASE_FLT_FN (BUILT_IN_LFLOOR):
2575 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2576 builtin_optab = lfloor_optab;
2577 fallback_fn = BUILT_IN_FLOOR;
2578 break;
2580 default:
2581 gcc_unreachable ();
2584 /* Make a suitable register to place result in. */
2585 mode = TYPE_MODE (TREE_TYPE (exp));
2587 target = gen_reg_rtx (mode);
2589 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2590 need to expand the argument again. This way, we will not perform
2591 side-effects more the once. */
2592 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2594 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2596 start_sequence ();
2598 /* Compute into TARGET. */
2599 if (expand_sfix_optab (target, op0, builtin_optab))
2601 /* Output the entire sequence. */
2602 insns = get_insns ();
2603 end_sequence ();
2604 emit_insn (insns);
2605 return target;
2608 /* If we were unable to expand via the builtin, stop the sequence
2609 (without outputting the insns). */
2610 end_sequence ();
2612 /* Fall back to floating point rounding optab. */
2613 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2615 /* For non-C99 targets we may end up without a fallback fndecl here
2616 if the user called __builtin_lfloor directly. In this case emit
2617 a call to the floor/ceil variants nevertheless. This should result
2618 in the best user experience for not full C99 targets. */
2619 if (fallback_fndecl == NULL_TREE)
2621 tree fntype;
2622 const char *name = NULL;
2624 switch (DECL_FUNCTION_CODE (fndecl))
2626 case BUILT_IN_ICEIL:
2627 case BUILT_IN_LCEIL:
2628 case BUILT_IN_LLCEIL:
2629 name = "ceil";
2630 break;
2631 case BUILT_IN_ICEILF:
2632 case BUILT_IN_LCEILF:
2633 case BUILT_IN_LLCEILF:
2634 name = "ceilf";
2635 break;
2636 case BUILT_IN_ICEILL:
2637 case BUILT_IN_LCEILL:
2638 case BUILT_IN_LLCEILL:
2639 name = "ceill";
2640 break;
2641 case BUILT_IN_IFLOOR:
2642 case BUILT_IN_LFLOOR:
2643 case BUILT_IN_LLFLOOR:
2644 name = "floor";
2645 break;
2646 case BUILT_IN_IFLOORF:
2647 case BUILT_IN_LFLOORF:
2648 case BUILT_IN_LLFLOORF:
2649 name = "floorf";
2650 break;
2651 case BUILT_IN_IFLOORL:
2652 case BUILT_IN_LFLOORL:
2653 case BUILT_IN_LLFLOORL:
2654 name = "floorl";
2655 break;
2656 default:
2657 gcc_unreachable ();
2660 fntype = build_function_type_list (TREE_TYPE (arg),
2661 TREE_TYPE (arg), NULL_TREE);
2662 fallback_fndecl = build_fn_decl (name, fntype);
2665 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2667 tmp = expand_normal (exp);
2668 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2670 /* Truncate the result of floating point optab to integer
2671 via expand_fix (). */
2672 target = gen_reg_rtx (mode);
2673 expand_fix (target, tmp, 0);
2675 return target;
2678 /* Expand a call to one of the builtin math functions doing integer
2679 conversion (lrint).
2680 Return 0 if a normal call should be emitted rather than expanding the
2681 function in-line. EXP is the expression that is a call to the builtin
2682 function; if convenient, the result should be placed in TARGET. */
2684 static rtx
2685 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2687 convert_optab builtin_optab;
2688 rtx op0;
2689 rtx_insn *insns;
2690 tree fndecl = get_callee_fndecl (exp);
2691 tree arg;
2692 machine_mode mode;
2693 enum built_in_function fallback_fn = BUILT_IN_NONE;
2695 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2696 gcc_unreachable ();
2698 arg = CALL_EXPR_ARG (exp, 0);
2700 switch (DECL_FUNCTION_CODE (fndecl))
2702 CASE_FLT_FN (BUILT_IN_IRINT):
2703 fallback_fn = BUILT_IN_LRINT;
2704 gcc_fallthrough ();
2705 CASE_FLT_FN (BUILT_IN_LRINT):
2706 CASE_FLT_FN (BUILT_IN_LLRINT):
2707 builtin_optab = lrint_optab;
2708 break;
2710 CASE_FLT_FN (BUILT_IN_IROUND):
2711 fallback_fn = BUILT_IN_LROUND;
2712 gcc_fallthrough ();
2713 CASE_FLT_FN (BUILT_IN_LROUND):
2714 CASE_FLT_FN (BUILT_IN_LLROUND):
2715 builtin_optab = lround_optab;
2716 break;
2718 default:
2719 gcc_unreachable ();
2722 /* There's no easy way to detect the case we need to set EDOM. */
2723 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2724 return NULL_RTX;
2726 /* Make a suitable register to place result in. */
2727 mode = TYPE_MODE (TREE_TYPE (exp));
2729 /* There's no easy way to detect the case we need to set EDOM. */
2730 if (!flag_errno_math)
2732 rtx result = gen_reg_rtx (mode);
2734 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2735 need to expand the argument again. This way, we will not perform
2736 side-effects more the once. */
2737 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2739 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2741 start_sequence ();
2743 if (expand_sfix_optab (result, op0, builtin_optab))
2745 /* Output the entire sequence. */
2746 insns = get_insns ();
2747 end_sequence ();
2748 emit_insn (insns);
2749 return result;
2752 /* If we were unable to expand via the builtin, stop the sequence
2753 (without outputting the insns) and call to the library function
2754 with the stabilized argument list. */
2755 end_sequence ();
2758 if (fallback_fn != BUILT_IN_NONE)
2760 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2761 targets, (int) round (x) should never be transformed into
2762 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2763 a call to lround in the hope that the target provides at least some
2764 C99 functions. This should result in the best user experience for
2765 not full C99 targets. */
2766 tree fallback_fndecl = mathfn_built_in_1
2767 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2769 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2770 fallback_fndecl, 1, arg);
2772 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2773 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2774 return convert_to_mode (mode, target, 0);
2777 return expand_call (exp, target, target == const0_rtx);
2780 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2781 a normal call should be emitted rather than expanding the function
2782 in-line. EXP is the expression that is a call to the builtin
2783 function; if convenient, the result should be placed in TARGET. */
2785 static rtx
2786 expand_builtin_powi (tree exp, rtx target)
2788 tree arg0, arg1;
2789 rtx op0, op1;
2790 machine_mode mode;
2791 machine_mode mode2;
2793 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2794 return NULL_RTX;
2796 arg0 = CALL_EXPR_ARG (exp, 0);
2797 arg1 = CALL_EXPR_ARG (exp, 1);
2798 mode = TYPE_MODE (TREE_TYPE (exp));
2800 /* Emit a libcall to libgcc. */
2802 /* Mode of the 2nd argument must match that of an int. */
2803 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2805 if (target == NULL_RTX)
2806 target = gen_reg_rtx (mode);
2808 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2809 if (GET_MODE (op0) != mode)
2810 op0 = convert_to_mode (mode, op0, 0);
2811 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2812 if (GET_MODE (op1) != mode2)
2813 op1 = convert_to_mode (mode2, op1, 0);
2815 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2816 target, LCT_CONST, mode,
2817 op0, mode, op1, mode2);
2819 return target;
2822 /* Expand expression EXP which is a call to the strlen builtin. Return
2823 NULL_RTX if we failed the caller should emit a normal call, otherwise
2824 try to get the result in TARGET, if convenient. */
2826 static rtx
2827 expand_builtin_strlen (tree exp, rtx target,
2828 machine_mode target_mode)
2830 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2831 return NULL_RTX;
2833 struct expand_operand ops[4];
2834 rtx pat;
2835 tree len;
2836 tree src = CALL_EXPR_ARG (exp, 0);
2837 rtx src_reg;
2838 rtx_insn *before_strlen;
2839 machine_mode insn_mode;
2840 enum insn_code icode = CODE_FOR_nothing;
2841 unsigned int align;
2843 /* If the length can be computed at compile-time, return it. */
2844 len = c_strlen (src, 0);
2845 if (len)
2846 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2848 /* If the length can be computed at compile-time and is constant
2849 integer, but there are side-effects in src, evaluate
2850 src for side-effects, then return len.
2851 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2852 can be optimized into: i++; x = 3; */
2853 len = c_strlen (src, 1);
2854 if (len && TREE_CODE (len) == INTEGER_CST)
2856 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2857 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2860 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2862 /* If SRC is not a pointer type, don't do this operation inline. */
2863 if (align == 0)
2864 return NULL_RTX;
2866 /* Bail out if we can't compute strlen in the right mode. */
2867 FOR_EACH_MODE_FROM (insn_mode, target_mode)
2869 icode = optab_handler (strlen_optab, insn_mode);
2870 if (icode != CODE_FOR_nothing)
2871 break;
2873 if (insn_mode == VOIDmode)
2874 return NULL_RTX;
2876 /* Make a place to hold the source address. We will not expand
2877 the actual source until we are sure that the expansion will
2878 not fail -- there are trees that cannot be expanded twice. */
2879 src_reg = gen_reg_rtx (Pmode);
2881 /* Mark the beginning of the strlen sequence so we can emit the
2882 source operand later. */
2883 before_strlen = get_last_insn ();
2885 create_output_operand (&ops[0], target, insn_mode);
2886 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2887 create_integer_operand (&ops[2], 0);
2888 create_integer_operand (&ops[3], align);
2889 if (!maybe_expand_insn (icode, 4, ops))
2890 return NULL_RTX;
2892 /* Check to see if the argument was declared attribute nonstring
2893 and if so, issue a warning since at this point it's not known
2894 to be nul-terminated. */
2895 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
2897 /* Now that we are assured of success, expand the source. */
2898 start_sequence ();
2899 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2900 if (pat != src_reg)
2902 #ifdef POINTERS_EXTEND_UNSIGNED
2903 if (GET_MODE (pat) != Pmode)
2904 pat = convert_to_mode (Pmode, pat,
2905 POINTERS_EXTEND_UNSIGNED);
2906 #endif
2907 emit_move_insn (src_reg, pat);
2909 pat = get_insns ();
2910 end_sequence ();
2912 if (before_strlen)
2913 emit_insn_after (pat, before_strlen);
2914 else
2915 emit_insn_before (pat, get_insns ());
2917 /* Return the value in the proper mode for this function. */
2918 if (GET_MODE (ops[0].value) == target_mode)
2919 target = ops[0].value;
2920 else if (target != 0)
2921 convert_move (target, ops[0].value, 0);
2922 else
2923 target = convert_to_mode (target_mode, ops[0].value, 0);
2925 return target;
2928 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2929 bytes from constant string DATA + OFFSET and return it as target
2930 constant. */
2932 static rtx
2933 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2934 scalar_int_mode mode)
2936 const char *str = (const char *) data;
2938 gcc_assert (offset >= 0
2939 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2940 <= strlen (str) + 1));
2942 return c_readstr (str + offset, mode);
2945 /* LEN specify length of the block of memcpy/memset operation.
2946 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2947 In some cases we can make very likely guess on max size, then we
2948 set it into PROBABLE_MAX_SIZE. */
2950 static void
2951 determine_block_size (tree len, rtx len_rtx,
2952 unsigned HOST_WIDE_INT *min_size,
2953 unsigned HOST_WIDE_INT *max_size,
2954 unsigned HOST_WIDE_INT *probable_max_size)
2956 if (CONST_INT_P (len_rtx))
2958 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
2959 return;
2961 else
2963 wide_int min, max;
2964 enum value_range_type range_type = VR_UNDEFINED;
2966 /* Determine bounds from the type. */
2967 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2968 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2969 else
2970 *min_size = 0;
2971 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2972 *probable_max_size = *max_size
2973 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
2974 else
2975 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2977 if (TREE_CODE (len) == SSA_NAME)
2978 range_type = get_range_info (len, &min, &max);
2979 if (range_type == VR_RANGE)
2981 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
2982 *min_size = min.to_uhwi ();
2983 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
2984 *probable_max_size = *max_size = max.to_uhwi ();
2986 else if (range_type == VR_ANTI_RANGE)
2988 /* Anti range 0...N lets us to determine minimal size to N+1. */
2989 if (min == 0)
2991 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2992 *min_size = max.to_uhwi () + 1;
2994 /* Code like
2996 int n;
2997 if (n < 100)
2998 memcpy (a, b, n)
3000 Produce anti range allowing negative values of N. We still
3001 can use the information and make a guess that N is not negative.
3003 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3004 *probable_max_size = min.to_uhwi () - 1;
3007 gcc_checking_assert (*max_size <=
3008 (unsigned HOST_WIDE_INT)
3009 GET_MODE_MASK (GET_MODE (len_rtx)));
3012 /* Try to verify that the sizes and lengths of the arguments to a string
3013 manipulation function given by EXP are within valid bounds and that
3014 the operation does not lead to buffer overflow or read past the end.
3015 Arguments other than EXP may be null. When non-null, the arguments
3016 have the following meaning:
3017 DST is the destination of a copy call or NULL otherwise.
3018 SRC is the source of a copy call or NULL otherwise.
3019 DSTWRITE is the number of bytes written into the destination obtained
3020 from the user-supplied size argument to the function (such as in
3021 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3022 MAXREAD is the user-supplied bound on the length of the source sequence
3023 (such as in strncat(d, s, N). It specifies the upper limit on the number
3024 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3025 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3026 expression EXP is a string function call (as opposed to a memory call
3027 like memcpy). As an exception, SRCSTR can also be an integer denoting
3028 the precomputed size of the source string or object (for functions like
3029 memcpy).
3030 DSTSIZE is the size of the destination object specified by the last
3031 argument to the _chk builtins, typically resulting from the expansion
3032 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3033 DSTSIZE).
3035 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3036 SIZE_MAX.
3038 If the call is successfully verified as safe return true, otherwise
3039 return false. */
3041 static bool
3042 check_access (tree exp, tree, tree, tree dstwrite,
3043 tree maxread, tree srcstr, tree dstsize)
3045 int opt = OPT_Wstringop_overflow_;
3047 /* The size of the largest object is half the address space, or
3048 PTRDIFF_MAX. (This is way too permissive.) */
3049 tree maxobjsize = max_object_size ();
3051 /* Either the length of the source string for string functions or
3052 the size of the source object for raw memory functions. */
3053 tree slen = NULL_TREE;
3055 tree range[2] = { NULL_TREE, NULL_TREE };
3057 /* Set to true when the exact number of bytes written by a string
3058 function like strcpy is not known and the only thing that is
3059 known is that it must be at least one (for the terminating nul). */
3060 bool at_least_one = false;
3061 if (srcstr)
3063 /* SRCSTR is normally a pointer to string but as a special case
3064 it can be an integer denoting the length of a string. */
3065 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
3067 /* Try to determine the range of lengths the source string
3068 refers to. If it can be determined and is less than
3069 the upper bound given by MAXREAD add one to it for
3070 the terminating nul. Otherwise, set it to one for
3071 the same reason, or to MAXREAD as appropriate. */
3072 get_range_strlen (srcstr, range);
3073 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
3075 if (maxread && tree_int_cst_le (maxread, range[0]))
3076 range[0] = range[1] = maxread;
3077 else
3078 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3079 range[0], size_one_node);
3081 if (maxread && tree_int_cst_le (maxread, range[1]))
3082 range[1] = maxread;
3083 else if (!integer_all_onesp (range[1]))
3084 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3085 range[1], size_one_node);
3087 slen = range[0];
3089 else
3091 at_least_one = true;
3092 slen = size_one_node;
3095 else
3096 slen = srcstr;
3099 if (!dstwrite && !maxread)
3101 /* When the only available piece of data is the object size
3102 there is nothing to do. */
3103 if (!slen)
3104 return true;
3106 /* Otherwise, when the length of the source sequence is known
3107 (as with strlen), set DSTWRITE to it. */
3108 if (!range[0])
3109 dstwrite = slen;
3112 if (!dstsize)
3113 dstsize = maxobjsize;
3115 if (dstwrite)
3116 get_size_range (dstwrite, range);
3118 tree func = get_callee_fndecl (exp);
3120 /* First check the number of bytes to be written against the maximum
3121 object size. */
3122 if (range[0] && tree_int_cst_lt (maxobjsize, range[0]))
3124 location_t loc = tree_nonartificial_location (exp);
3125 loc = expansion_point_location_if_in_system_header (loc);
3127 if (range[0] == range[1])
3128 warning_at (loc, opt,
3129 "%K%qD specified size %E "
3130 "exceeds maximum object size %E",
3131 exp, func, range[0], maxobjsize);
3132 else
3133 warning_at (loc, opt,
3134 "%K%qD specified size between %E and %E "
3135 "exceeds maximum object size %E",
3136 exp, func,
3137 range[0], range[1], maxobjsize);
3138 return false;
3141 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3142 constant, and in range of unsigned HOST_WIDE_INT. */
3143 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3145 /* Next check the number of bytes to be written against the destination
3146 object size. */
3147 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
3149 if (range[0]
3150 && ((tree_fits_uhwi_p (dstsize)
3151 && tree_int_cst_lt (dstsize, range[0]))
3152 || (tree_fits_uhwi_p (dstwrite)
3153 && tree_int_cst_lt (dstwrite, range[0]))))
3155 if (TREE_NO_WARNING (exp))
3156 return false;
3158 location_t loc = tree_nonartificial_location (exp);
3159 loc = expansion_point_location_if_in_system_header (loc);
3161 if (dstwrite == slen && at_least_one)
3163 /* This is a call to strcpy with a destination of 0 size
3164 and a source of unknown length. The call will write
3165 at least one byte past the end of the destination. */
3166 warning_at (loc, opt,
3167 "%K%qD writing %E or more bytes into a region "
3168 "of size %E overflows the destination",
3169 exp, func, range[0], dstsize);
3171 else if (tree_int_cst_equal (range[0], range[1]))
3172 warning_n (loc, opt, tree_to_uhwi (range[0]),
3173 "%K%qD writing %E byte into a region "
3174 "of size %E overflows the destination",
3175 "%K%qD writing %E bytes into a region "
3176 "of size %E overflows the destination",
3177 exp, func, range[0], dstsize);
3178 else if (tree_int_cst_sign_bit (range[1]))
3180 /* Avoid printing the upper bound if it's invalid. */
3181 warning_at (loc, opt,
3182 "%K%qD writing %E or more bytes into a region "
3183 "of size %E overflows the destination",
3184 exp, func, range[0], dstsize);
3186 else
3187 warning_at (loc, opt,
3188 "%K%qD writing between %E and %E bytes into "
3189 "a region of size %E overflows the destination",
3190 exp, func, range[0], range[1],
3191 dstsize);
3193 /* Return error when an overflow has been detected. */
3194 return false;
3198 /* Check the maximum length of the source sequence against the size
3199 of the destination object if known, or against the maximum size
3200 of an object. */
3201 if (maxread)
3203 get_size_range (maxread, range);
3205 /* Use the lower end for MAXREAD from now on. */
3206 if (range[0])
3207 maxread = range[0];
3209 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
3211 location_t loc = tree_nonartificial_location (exp);
3212 loc = expansion_point_location_if_in_system_header (loc);
3214 if (tree_int_cst_lt (maxobjsize, range[0]))
3216 if (TREE_NO_WARNING (exp))
3217 return false;
3219 /* Warn about crazy big sizes first since that's more
3220 likely to be meaningful than saying that the bound
3221 is greater than the object size if both are big. */
3222 if (range[0] == range[1])
3223 warning_at (loc, opt,
3224 "%K%qD specified bound %E "
3225 "exceeds maximum object size %E",
3226 exp, func,
3227 range[0], maxobjsize);
3228 else
3229 warning_at (loc, opt,
3230 "%K%qD specified bound between %E and %E "
3231 "exceeds maximum object size %E",
3232 exp, func,
3233 range[0], range[1], maxobjsize);
3235 return false;
3238 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
3240 if (TREE_NO_WARNING (exp))
3241 return false;
3243 if (tree_int_cst_equal (range[0], range[1]))
3244 warning_at (loc, opt,
3245 "%K%qD specified bound %E "
3246 "exceeds destination size %E",
3247 exp, func,
3248 range[0], dstsize);
3249 else
3250 warning_at (loc, opt,
3251 "%K%qD specified bound between %E and %E "
3252 "exceeds destination size %E",
3253 exp, func,
3254 range[0], range[1], dstsize);
3255 return false;
3260 /* Check for reading past the end of SRC. */
3261 if (slen
3262 && slen == srcstr
3263 && dstwrite && range[0]
3264 && tree_int_cst_lt (slen, range[0]))
3266 if (TREE_NO_WARNING (exp))
3267 return false;
3269 location_t loc = tree_nonartificial_location (exp);
3271 if (tree_int_cst_equal (range[0], range[1]))
3272 warning_n (loc, opt, tree_to_uhwi (range[0]),
3273 "%K%qD reading %E byte from a region of size %E",
3274 "%K%qD reading %E bytes from a region of size %E",
3275 exp, func, range[0], slen);
3276 else if (tree_int_cst_sign_bit (range[1]))
3278 /* Avoid printing the upper bound if it's invalid. */
3279 warning_at (loc, opt,
3280 "%K%qD reading %E or more bytes from a region "
3281 "of size %E",
3282 exp, func, range[0], slen);
3284 else
3285 warning_at (loc, opt,
3286 "%K%qD reading between %E and %E bytes from a region "
3287 "of size %E",
3288 exp, func, range[0], range[1], slen);
3289 return false;
3292 return true;
3295 /* Helper to compute the size of the object referenced by the DEST
3296 expression which must have pointer type, using Object Size type
3297 OSTYPE (only the least significant 2 bits are used). Return
3298 an estimate of the size of the object if successful or NULL when
3299 the size cannot be determined. When the referenced object involves
3300 a non-constant offset in some range the returned value represents
3301 the largest size given the smallest non-negative offset in the
3302 range. The function is intended for diagnostics and should not
3303 be used to influence code generation or optimization. */
3305 tree
3306 compute_objsize (tree dest, int ostype)
3308 unsigned HOST_WIDE_INT size;
3310 /* Only the two least significant bits are meaningful. */
3311 ostype &= 3;
3313 if (compute_builtin_object_size (dest, ostype, &size))
3314 return build_int_cst (sizetype, size);
3316 if (TREE_CODE (dest) == SSA_NAME)
3318 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3319 if (!is_gimple_assign (stmt))
3320 return NULL_TREE;
3322 dest = gimple_assign_rhs1 (stmt);
3324 tree_code code = gimple_assign_rhs_code (stmt);
3325 if (code == POINTER_PLUS_EXPR)
3327 /* compute_builtin_object_size fails for addresses with
3328 non-constant offsets. Try to determine the range of
3329 such an offset here and use it to adjust the constant
3330 size. */
3331 tree off = gimple_assign_rhs2 (stmt);
3332 if (TREE_CODE (off) == INTEGER_CST)
3334 if (tree size = compute_objsize (dest, ostype))
3336 wide_int wioff = wi::to_wide (off);
3337 wide_int wisiz = wi::to_wide (size);
3339 /* Ignore negative offsets for now. For others,
3340 use the lower bound as the most optimistic
3341 estimate of the (remaining) size. */
3342 if (wi::sign_mask (wioff))
3344 else if (wi::ltu_p (wioff, wisiz))
3345 return wide_int_to_tree (TREE_TYPE (size),
3346 wi::sub (wisiz, wioff));
3347 else
3348 return size_zero_node;
3351 else if (TREE_CODE (off) == SSA_NAME
3352 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3354 wide_int min, max;
3355 enum value_range_type rng = get_range_info (off, &min, &max);
3357 if (rng == VR_RANGE)
3359 if (tree size = compute_objsize (dest, ostype))
3361 wide_int wisiz = wi::to_wide (size);
3363 /* Ignore negative offsets for now. For others,
3364 use the lower bound as the most optimistic
3365 estimate of the (remaining)size. */
3366 if (wi::sign_mask (min))
3368 else if (wi::ltu_p (min, wisiz))
3369 return wide_int_to_tree (TREE_TYPE (size),
3370 wi::sub (wisiz, min));
3371 else
3372 return size_zero_node;
3377 else if (code != ADDR_EXPR)
3378 return NULL_TREE;
3381 /* Unless computing the largest size (for memcpy and other raw memory
3382 functions), try to determine the size of the object from its type. */
3383 if (!ostype)
3384 return NULL_TREE;
3386 if (TREE_CODE (dest) != ADDR_EXPR)
3387 return NULL_TREE;
3389 tree type = TREE_TYPE (dest);
3390 if (TREE_CODE (type) == POINTER_TYPE)
3391 type = TREE_TYPE (type);
3393 type = TYPE_MAIN_VARIANT (type);
3395 if (TREE_CODE (type) == ARRAY_TYPE
3396 && !array_at_struct_end_p (TREE_OPERAND (dest, 0)))
3398 /* Return the constant size unless it's zero (that's a zero-length
3399 array likely at the end of a struct). */
3400 tree size = TYPE_SIZE_UNIT (type);
3401 if (size && TREE_CODE (size) == INTEGER_CST
3402 && !integer_zerop (size))
3403 return size;
3406 return NULL_TREE;
3409 /* Helper to determine and check the sizes of the source and the destination
3410 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3411 call expression, DEST is the destination argument, SRC is the source
3412 argument or null, and LEN is the number of bytes. Use Object Size type-0
3413 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3414 (no overflow or invalid sizes), false otherwise. */
3416 static bool
3417 check_memop_access (tree exp, tree dest, tree src, tree size)
3419 /* For functions like memset and memcpy that operate on raw memory
3420 try to determine the size of the largest source and destination
3421 object using type-0 Object Size regardless of the object size
3422 type specified by the option. */
3423 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3424 tree dstsize = compute_objsize (dest, 0);
3426 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
3427 srcsize, dstsize);
3430 /* Validate memchr arguments without performing any expansion.
3431 Return NULL_RTX. */
3433 static rtx
3434 expand_builtin_memchr (tree exp, rtx)
3436 if (!validate_arglist (exp,
3437 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3438 return NULL_RTX;
3440 tree arg1 = CALL_EXPR_ARG (exp, 0);
3441 tree len = CALL_EXPR_ARG (exp, 2);
3443 /* Diagnose calls where the specified length exceeds the size
3444 of the object. */
3445 if (warn_stringop_overflow)
3447 tree size = compute_objsize (arg1, 0);
3448 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
3449 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
3452 return NULL_RTX;
3455 /* Expand a call EXP to the memcpy builtin.
3456 Return NULL_RTX if we failed, the caller should emit a normal call,
3457 otherwise try to get the result in TARGET, if convenient (and in
3458 mode MODE if that's convenient). */
3460 static rtx
3461 expand_builtin_memcpy (tree exp, rtx target)
3463 if (!validate_arglist (exp,
3464 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3465 return NULL_RTX;
3467 tree dest = CALL_EXPR_ARG (exp, 0);
3468 tree src = CALL_EXPR_ARG (exp, 1);
3469 tree len = CALL_EXPR_ARG (exp, 2);
3471 check_memop_access (exp, dest, src, len);
3473 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3474 /*endp=*/ 0);
3477 /* Check a call EXP to the memmove built-in for validity.
3478 Return NULL_RTX on both success and failure. */
3480 static rtx
3481 expand_builtin_memmove (tree exp, rtx)
3483 if (!validate_arglist (exp,
3484 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3485 return NULL_RTX;
3487 tree dest = CALL_EXPR_ARG (exp, 0);
3488 tree src = CALL_EXPR_ARG (exp, 1);
3489 tree len = CALL_EXPR_ARG (exp, 2);
3491 check_memop_access (exp, dest, src, len);
3493 return NULL_RTX;
3496 /* Expand a call EXP to the mempcpy builtin.
3497 Return NULL_RTX if we failed; the caller should emit a normal call,
3498 otherwise try to get the result in TARGET, if convenient (and in
3499 mode MODE if that's convenient). If ENDP is 0 return the
3500 destination pointer, if ENDP is 1 return the end pointer ala
3501 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3502 stpcpy. */
3504 static rtx
3505 expand_builtin_mempcpy (tree exp, rtx target)
3507 if (!validate_arglist (exp,
3508 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3509 return NULL_RTX;
3511 tree dest = CALL_EXPR_ARG (exp, 0);
3512 tree src = CALL_EXPR_ARG (exp, 1);
3513 tree len = CALL_EXPR_ARG (exp, 2);
3515 /* Policy does not generally allow using compute_objsize (which
3516 is used internally by check_memop_size) to change code generation
3517 or drive optimization decisions.
3519 In this instance it is safe because the code we generate has
3520 the same semantics regardless of the return value of
3521 check_memop_sizes. Exactly the same amount of data is copied
3522 and the return value is exactly the same in both cases.
3524 Furthermore, check_memop_size always uses mode 0 for the call to
3525 compute_objsize, so the imprecise nature of compute_objsize is
3526 avoided. */
3528 /* Avoid expanding mempcpy into memcpy when the call is determined
3529 to overflow the buffer. This also prevents the same overflow
3530 from being diagnosed again when expanding memcpy. */
3531 if (!check_memop_access (exp, dest, src, len))
3532 return NULL_RTX;
3534 return expand_builtin_mempcpy_args (dest, src, len,
3535 target, exp, /*endp=*/ 1);
3538 /* Helper function to do the actual work for expand of memory copy family
3539 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3540 of memory from SRC to DEST and assign to TARGET if convenient.
3541 If ENDP is 0 return the
3542 destination pointer, if ENDP is 1 return the end pointer ala
3543 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3544 stpcpy. */
3546 static rtx
3547 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3548 rtx target, tree exp, int endp)
3550 const char *src_str;
3551 unsigned int src_align = get_pointer_alignment (src);
3552 unsigned int dest_align = get_pointer_alignment (dest);
3553 rtx dest_mem, src_mem, dest_addr, len_rtx;
3554 HOST_WIDE_INT expected_size = -1;
3555 unsigned int expected_align = 0;
3556 unsigned HOST_WIDE_INT min_size;
3557 unsigned HOST_WIDE_INT max_size;
3558 unsigned HOST_WIDE_INT probable_max_size;
3560 /* If DEST is not a pointer type, call the normal function. */
3561 if (dest_align == 0)
3562 return NULL_RTX;
3564 /* If either SRC is not a pointer type, don't do this
3565 operation in-line. */
3566 if (src_align == 0)
3567 return NULL_RTX;
3569 if (currently_expanding_gimple_stmt)
3570 stringop_block_profile (currently_expanding_gimple_stmt,
3571 &expected_align, &expected_size);
3573 if (expected_align < dest_align)
3574 expected_align = dest_align;
3575 dest_mem = get_memory_rtx (dest, len);
3576 set_mem_align (dest_mem, dest_align);
3577 len_rtx = expand_normal (len);
3578 determine_block_size (len, len_rtx, &min_size, &max_size,
3579 &probable_max_size);
3580 src_str = c_getstr (src);
3582 /* If SRC is a string constant and block move would be done
3583 by pieces, we can avoid loading the string from memory
3584 and only stored the computed constants. */
3585 if (src_str
3586 && CONST_INT_P (len_rtx)
3587 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3588 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3589 CONST_CAST (char *, src_str),
3590 dest_align, false))
3592 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3593 builtin_memcpy_read_str,
3594 CONST_CAST (char *, src_str),
3595 dest_align, false, endp);
3596 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3597 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3598 return dest_mem;
3601 src_mem = get_memory_rtx (src, len);
3602 set_mem_align (src_mem, src_align);
3604 /* Copy word part most expediently. */
3605 enum block_op_methods method = BLOCK_OP_NORMAL;
3606 if (CALL_EXPR_TAILCALL (exp) && (endp == 0 || target == const0_rtx))
3607 method = BLOCK_OP_TAILCALL;
3608 if (endp == 1 && target != const0_rtx)
3609 method = BLOCK_OP_NO_LIBCALL_RET;
3610 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3611 expected_align, expected_size,
3612 min_size, max_size, probable_max_size);
3613 if (dest_addr == pc_rtx)
3614 return NULL_RTX;
3616 if (dest_addr == 0)
3618 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3619 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3622 if (endp && target != const0_rtx)
3624 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3625 /* stpcpy pointer to last byte. */
3626 if (endp == 2)
3627 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3630 return dest_addr;
3633 static rtx
3634 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3635 rtx target, tree orig_exp, int endp)
3637 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3638 endp);
3641 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3642 we failed, the caller should emit a normal call, otherwise try to
3643 get the result in TARGET, if convenient. If ENDP is 0 return the
3644 destination pointer, if ENDP is 1 return the end pointer ala
3645 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3646 stpcpy. */
3648 static rtx
3649 expand_movstr (tree dest, tree src, rtx target, int endp)
3651 struct expand_operand ops[3];
3652 rtx dest_mem;
3653 rtx src_mem;
3655 if (!targetm.have_movstr ())
3656 return NULL_RTX;
3658 dest_mem = get_memory_rtx (dest, NULL);
3659 src_mem = get_memory_rtx (src, NULL);
3660 if (!endp)
3662 target = force_reg (Pmode, XEXP (dest_mem, 0));
3663 dest_mem = replace_equiv_address (dest_mem, target);
3666 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3667 create_fixed_operand (&ops[1], dest_mem);
3668 create_fixed_operand (&ops[2], src_mem);
3669 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3670 return NULL_RTX;
3672 if (endp && target != const0_rtx)
3674 target = ops[0].value;
3675 /* movstr is supposed to set end to the address of the NUL
3676 terminator. If the caller requested a mempcpy-like return value,
3677 adjust it. */
3678 if (endp == 1)
3680 rtx tem = plus_constant (GET_MODE (target),
3681 gen_lowpart (GET_MODE (target), target), 1);
3682 emit_move_insn (target, force_operand (tem, NULL_RTX));
3685 return target;
3688 /* Do some very basic size validation of a call to the strcpy builtin
3689 given by EXP. Return NULL_RTX to have the built-in expand to a call
3690 to the library function. */
3692 static rtx
3693 expand_builtin_strcat (tree exp, rtx)
3695 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3696 || !warn_stringop_overflow)
3697 return NULL_RTX;
3699 tree dest = CALL_EXPR_ARG (exp, 0);
3700 tree src = CALL_EXPR_ARG (exp, 1);
3702 /* There is no way here to determine the length of the string in
3703 the destination to which the SRC string is being appended so
3704 just diagnose cases when the souce string is longer than
3705 the destination object. */
3707 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3709 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
3710 destsize);
3712 return NULL_RTX;
3715 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3716 NULL_RTX if we failed the caller should emit a normal call, otherwise
3717 try to get the result in TARGET, if convenient (and in mode MODE if that's
3718 convenient). */
3720 static rtx
3721 expand_builtin_strcpy (tree exp, rtx target)
3723 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3724 return NULL_RTX;
3726 tree dest = CALL_EXPR_ARG (exp, 0);
3727 tree src = CALL_EXPR_ARG (exp, 1);
3729 if (warn_stringop_overflow)
3731 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3732 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3733 src, destsize);
3736 if (rtx ret = expand_builtin_strcpy_args (dest, src, target))
3738 /* Check to see if the argument was declared attribute nonstring
3739 and if so, issue a warning since at this point it's not known
3740 to be nul-terminated. */
3741 tree fndecl = get_callee_fndecl (exp);
3742 maybe_warn_nonstring_arg (fndecl, exp);
3743 return ret;
3746 return NULL_RTX;
3749 /* Helper function to do the actual work for expand_builtin_strcpy. The
3750 arguments to the builtin_strcpy call DEST and SRC are broken out
3751 so that this can also be called without constructing an actual CALL_EXPR.
3752 The other arguments and return value are the same as for
3753 expand_builtin_strcpy. */
3755 static rtx
3756 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3758 return expand_movstr (dest, src, target, /*endp=*/0);
3761 /* Expand a call EXP to the stpcpy builtin.
3762 Return NULL_RTX if we failed the caller should emit a normal call,
3763 otherwise try to get the result in TARGET, if convenient (and in
3764 mode MODE if that's convenient). */
3766 static rtx
3767 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
3769 tree dst, src;
3770 location_t loc = EXPR_LOCATION (exp);
3772 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3773 return NULL_RTX;
3775 dst = CALL_EXPR_ARG (exp, 0);
3776 src = CALL_EXPR_ARG (exp, 1);
3778 if (warn_stringop_overflow)
3780 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
3781 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3782 src, destsize);
3785 /* If return value is ignored, transform stpcpy into strcpy. */
3786 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3788 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3789 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3790 return expand_expr (result, target, mode, EXPAND_NORMAL);
3792 else
3794 tree len, lenp1;
3795 rtx ret;
3797 /* Ensure we get an actual string whose length can be evaluated at
3798 compile-time, not an expression containing a string. This is
3799 because the latter will potentially produce pessimized code
3800 when used to produce the return value. */
3801 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3802 return expand_movstr (dst, src, target, /*endp=*/2);
3804 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3805 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3806 target, exp, /*endp=*/2);
3808 if (ret)
3809 return ret;
3811 if (TREE_CODE (len) == INTEGER_CST)
3813 rtx len_rtx = expand_normal (len);
3815 if (CONST_INT_P (len_rtx))
3817 ret = expand_builtin_strcpy_args (dst, src, target);
3819 if (ret)
3821 if (! target)
3823 if (mode != VOIDmode)
3824 target = gen_reg_rtx (mode);
3825 else
3826 target = gen_reg_rtx (GET_MODE (ret));
3828 if (GET_MODE (target) != GET_MODE (ret))
3829 ret = gen_lowpart (GET_MODE (target), ret);
3831 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3832 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3833 gcc_assert (ret);
3835 return target;
3840 return expand_movstr (dst, src, target, /*endp=*/2);
3844 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
3845 arguments while being careful to avoid duplicate warnings (which could
3846 be issued if the expander were to expand the call, resulting in it
3847 being emitted in expand_call(). */
3849 static rtx
3850 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3852 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
3854 /* The call has been successfully expanded. Check for nonstring
3855 arguments and issue warnings as appropriate. */
3856 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3857 return ret;
3860 return NULL_RTX;
3863 /* Check a call EXP to the stpncpy built-in for validity.
3864 Return NULL_RTX on both success and failure. */
3866 static rtx
3867 expand_builtin_stpncpy (tree exp, rtx)
3869 if (!validate_arglist (exp,
3870 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3871 || !warn_stringop_overflow)
3872 return NULL_RTX;
3874 /* The source and destination of the call. */
3875 tree dest = CALL_EXPR_ARG (exp, 0);
3876 tree src = CALL_EXPR_ARG (exp, 1);
3878 /* The exact number of bytes to write (not the maximum). */
3879 tree len = CALL_EXPR_ARG (exp, 2);
3881 /* The size of the destination object. */
3882 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3884 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
3886 return NULL_RTX;
3889 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3890 bytes from constant string DATA + OFFSET and return it as target
3891 constant. */
3894 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3895 scalar_int_mode mode)
3897 const char *str = (const char *) data;
3899 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3900 return const0_rtx;
3902 return c_readstr (str + offset, mode);
3905 /* Helper to check the sizes of sequences and the destination of calls
3906 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3907 success (no overflow or invalid sizes), false otherwise. */
3909 static bool
3910 check_strncat_sizes (tree exp, tree objsize)
3912 tree dest = CALL_EXPR_ARG (exp, 0);
3913 tree src = CALL_EXPR_ARG (exp, 1);
3914 tree maxread = CALL_EXPR_ARG (exp, 2);
3916 /* Try to determine the range of lengths that the source expression
3917 refers to. */
3918 tree lenrange[2];
3919 get_range_strlen (src, lenrange);
3921 /* Try to verify that the destination is big enough for the shortest
3922 string. */
3924 if (!objsize && warn_stringop_overflow)
3926 /* If it hasn't been provided by __strncat_chk, try to determine
3927 the size of the destination object into which the source is
3928 being copied. */
3929 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
3932 /* Add one for the terminating nul. */
3933 tree srclen = (lenrange[0]
3934 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3935 size_one_node)
3936 : NULL_TREE);
3938 /* The strncat function copies at most MAXREAD bytes and always appends
3939 the terminating nul so the specified upper bound should never be equal
3940 to (or greater than) the size of the destination. */
3941 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
3942 && tree_int_cst_equal (objsize, maxread))
3944 location_t loc = tree_nonartificial_location (exp);
3945 loc = expansion_point_location_if_in_system_header (loc);
3947 warning_at (loc, OPT_Wstringop_overflow_,
3948 "%K%qD specified bound %E equals destination size",
3949 exp, get_callee_fndecl (exp), maxread);
3951 return false;
3954 if (!srclen
3955 || (maxread && tree_fits_uhwi_p (maxread)
3956 && tree_fits_uhwi_p (srclen)
3957 && tree_int_cst_lt (maxread, srclen)))
3958 srclen = maxread;
3960 /* The number of bytes to write is LEN but check_access will also
3961 check SRCLEN if LEN's value isn't known. */
3962 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
3963 objsize);
3966 /* Similar to expand_builtin_strcat, do some very basic size validation
3967 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
3968 the built-in expand to a call to the library function. */
3970 static rtx
3971 expand_builtin_strncat (tree exp, rtx)
3973 if (!validate_arglist (exp,
3974 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3975 || !warn_stringop_overflow)
3976 return NULL_RTX;
3978 tree dest = CALL_EXPR_ARG (exp, 0);
3979 tree src = CALL_EXPR_ARG (exp, 1);
3980 /* The upper bound on the number of bytes to write. */
3981 tree maxread = CALL_EXPR_ARG (exp, 2);
3982 /* The length of the source sequence. */
3983 tree slen = c_strlen (src, 1);
3985 /* Try to determine the range of lengths that the source expression
3986 refers to. */
3987 tree lenrange[2];
3988 if (slen)
3989 lenrange[0] = lenrange[1] = slen;
3990 else
3991 get_range_strlen (src, lenrange);
3993 /* Try to verify that the destination is big enough for the shortest
3994 string. First try to determine the size of the destination object
3995 into which the source is being copied. */
3996 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3998 /* Add one for the terminating nul. */
3999 tree srclen = (lenrange[0]
4000 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4001 size_one_node)
4002 : NULL_TREE);
4004 /* The strncat function copies at most MAXREAD bytes and always appends
4005 the terminating nul so the specified upper bound should never be equal
4006 to (or greater than) the size of the destination. */
4007 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4008 && tree_int_cst_equal (destsize, maxread))
4010 location_t loc = tree_nonartificial_location (exp);
4011 loc = expansion_point_location_if_in_system_header (loc);
4013 warning_at (loc, OPT_Wstringop_overflow_,
4014 "%K%qD specified bound %E equals destination size",
4015 exp, get_callee_fndecl (exp), maxread);
4017 return NULL_RTX;
4020 if (!srclen
4021 || (maxread && tree_fits_uhwi_p (maxread)
4022 && tree_fits_uhwi_p (srclen)
4023 && tree_int_cst_lt (maxread, srclen)))
4024 srclen = maxread;
4026 /* The number of bytes to write is SRCLEN. */
4027 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
4029 return NULL_RTX;
4032 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4033 NULL_RTX if we failed the caller should emit a normal call. */
4035 static rtx
4036 expand_builtin_strncpy (tree exp, rtx target)
4038 location_t loc = EXPR_LOCATION (exp);
4040 if (validate_arglist (exp,
4041 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4043 tree dest = CALL_EXPR_ARG (exp, 0);
4044 tree src = CALL_EXPR_ARG (exp, 1);
4045 /* The number of bytes to write (not the maximum). */
4046 tree len = CALL_EXPR_ARG (exp, 2);
4047 /* The length of the source sequence. */
4048 tree slen = c_strlen (src, 1);
4050 if (warn_stringop_overflow)
4052 tree destsize = compute_objsize (dest,
4053 warn_stringop_overflow - 1);
4055 /* The number of bytes to write is LEN but check_access will also
4056 check SLEN if LEN's value isn't known. */
4057 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4058 destsize);
4061 /* We must be passed a constant len and src parameter. */
4062 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4063 return NULL_RTX;
4065 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4067 /* We're required to pad with trailing zeros if the requested
4068 len is greater than strlen(s2)+1. In that case try to
4069 use store_by_pieces, if it fails, punt. */
4070 if (tree_int_cst_lt (slen, len))
4072 unsigned int dest_align = get_pointer_alignment (dest);
4073 const char *p = c_getstr (src);
4074 rtx dest_mem;
4076 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4077 || !can_store_by_pieces (tree_to_uhwi (len),
4078 builtin_strncpy_read_str,
4079 CONST_CAST (char *, p),
4080 dest_align, false))
4081 return NULL_RTX;
4083 dest_mem = get_memory_rtx (dest, len);
4084 store_by_pieces (dest_mem, tree_to_uhwi (len),
4085 builtin_strncpy_read_str,
4086 CONST_CAST (char *, p), dest_align, false, 0);
4087 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4088 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4089 return dest_mem;
4092 return NULL_RTX;
4095 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4096 bytes from constant string DATA + OFFSET and return it as target
4097 constant. */
4100 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4101 scalar_int_mode mode)
4103 const char *c = (const char *) data;
4104 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4106 memset (p, *c, GET_MODE_SIZE (mode));
4108 return c_readstr (p, mode);
4111 /* Callback routine for store_by_pieces. Return the RTL of a register
4112 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4113 char value given in the RTL register data. For example, if mode is
4114 4 bytes wide, return the RTL for 0x01010101*data. */
4116 static rtx
4117 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4118 scalar_int_mode mode)
4120 rtx target, coeff;
4121 size_t size;
4122 char *p;
4124 size = GET_MODE_SIZE (mode);
4125 if (size == 1)
4126 return (rtx) data;
4128 p = XALLOCAVEC (char, size);
4129 memset (p, 1, size);
4130 coeff = c_readstr (p, mode);
4132 target = convert_to_mode (mode, (rtx) data, 1);
4133 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4134 return force_reg (mode, target);
4137 /* Expand expression EXP, which is a call to the memset builtin. Return
4138 NULL_RTX if we failed the caller should emit a normal call, otherwise
4139 try to get the result in TARGET, if convenient (and in mode MODE if that's
4140 convenient). */
4142 static rtx
4143 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4145 if (!validate_arglist (exp,
4146 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4147 return NULL_RTX;
4149 tree dest = CALL_EXPR_ARG (exp, 0);
4150 tree val = CALL_EXPR_ARG (exp, 1);
4151 tree len = CALL_EXPR_ARG (exp, 2);
4153 check_memop_access (exp, dest, NULL_TREE, len);
4155 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4158 /* Helper function to do the actual work for expand_builtin_memset. The
4159 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4160 so that this can also be called without constructing an actual CALL_EXPR.
4161 The other arguments and return value are the same as for
4162 expand_builtin_memset. */
4164 static rtx
4165 expand_builtin_memset_args (tree dest, tree val, tree len,
4166 rtx target, machine_mode mode, tree orig_exp)
4168 tree fndecl, fn;
4169 enum built_in_function fcode;
4170 machine_mode val_mode;
4171 char c;
4172 unsigned int dest_align;
4173 rtx dest_mem, dest_addr, len_rtx;
4174 HOST_WIDE_INT expected_size = -1;
4175 unsigned int expected_align = 0;
4176 unsigned HOST_WIDE_INT min_size;
4177 unsigned HOST_WIDE_INT max_size;
4178 unsigned HOST_WIDE_INT probable_max_size;
4180 dest_align = get_pointer_alignment (dest);
4182 /* If DEST is not a pointer type, don't do this operation in-line. */
4183 if (dest_align == 0)
4184 return NULL_RTX;
4186 if (currently_expanding_gimple_stmt)
4187 stringop_block_profile (currently_expanding_gimple_stmt,
4188 &expected_align, &expected_size);
4190 if (expected_align < dest_align)
4191 expected_align = dest_align;
4193 /* If the LEN parameter is zero, return DEST. */
4194 if (integer_zerop (len))
4196 /* Evaluate and ignore VAL in case it has side-effects. */
4197 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4198 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4201 /* Stabilize the arguments in case we fail. */
4202 dest = builtin_save_expr (dest);
4203 val = builtin_save_expr (val);
4204 len = builtin_save_expr (len);
4206 len_rtx = expand_normal (len);
4207 determine_block_size (len, len_rtx, &min_size, &max_size,
4208 &probable_max_size);
4209 dest_mem = get_memory_rtx (dest, len);
4210 val_mode = TYPE_MODE (unsigned_char_type_node);
4212 if (TREE_CODE (val) != INTEGER_CST)
4214 rtx val_rtx;
4216 val_rtx = expand_normal (val);
4217 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4219 /* Assume that we can memset by pieces if we can store
4220 * the coefficients by pieces (in the required modes).
4221 * We can't pass builtin_memset_gen_str as that emits RTL. */
4222 c = 1;
4223 if (tree_fits_uhwi_p (len)
4224 && can_store_by_pieces (tree_to_uhwi (len),
4225 builtin_memset_read_str, &c, dest_align,
4226 true))
4228 val_rtx = force_reg (val_mode, val_rtx);
4229 store_by_pieces (dest_mem, tree_to_uhwi (len),
4230 builtin_memset_gen_str, val_rtx, dest_align,
4231 true, 0);
4233 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4234 dest_align, expected_align,
4235 expected_size, min_size, max_size,
4236 probable_max_size))
4237 goto do_libcall;
4239 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4240 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4241 return dest_mem;
4244 if (target_char_cast (val, &c))
4245 goto do_libcall;
4247 if (c)
4249 if (tree_fits_uhwi_p (len)
4250 && can_store_by_pieces (tree_to_uhwi (len),
4251 builtin_memset_read_str, &c, dest_align,
4252 true))
4253 store_by_pieces (dest_mem, tree_to_uhwi (len),
4254 builtin_memset_read_str, &c, dest_align, true, 0);
4255 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4256 gen_int_mode (c, val_mode),
4257 dest_align, expected_align,
4258 expected_size, min_size, max_size,
4259 probable_max_size))
4260 goto do_libcall;
4262 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4263 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4264 return dest_mem;
4267 set_mem_align (dest_mem, dest_align);
4268 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4269 CALL_EXPR_TAILCALL (orig_exp)
4270 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4271 expected_align, expected_size,
4272 min_size, max_size,
4273 probable_max_size);
4275 if (dest_addr == 0)
4277 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4278 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4281 return dest_addr;
4283 do_libcall:
4284 fndecl = get_callee_fndecl (orig_exp);
4285 fcode = DECL_FUNCTION_CODE (fndecl);
4286 if (fcode == BUILT_IN_MEMSET)
4287 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4288 dest, val, len);
4289 else if (fcode == BUILT_IN_BZERO)
4290 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4291 dest, len);
4292 else
4293 gcc_unreachable ();
4294 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4295 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4296 return expand_call (fn, target, target == const0_rtx);
4299 /* Expand expression EXP, which is a call to the bzero builtin. Return
4300 NULL_RTX if we failed the caller should emit a normal call. */
4302 static rtx
4303 expand_builtin_bzero (tree exp)
4305 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4306 return NULL_RTX;
4308 tree dest = CALL_EXPR_ARG (exp, 0);
4309 tree size = CALL_EXPR_ARG (exp, 1);
4311 check_memop_access (exp, dest, NULL_TREE, size);
4313 /* New argument list transforming bzero(ptr x, int y) to
4314 memset(ptr x, int 0, size_t y). This is done this way
4315 so that if it isn't expanded inline, we fallback to
4316 calling bzero instead of memset. */
4318 location_t loc = EXPR_LOCATION (exp);
4320 return expand_builtin_memset_args (dest, integer_zero_node,
4321 fold_convert_loc (loc,
4322 size_type_node, size),
4323 const0_rtx, VOIDmode, exp);
4326 /* Try to expand cmpstr operation ICODE with the given operands.
4327 Return the result rtx on success, otherwise return null. */
4329 static rtx
4330 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4331 HOST_WIDE_INT align)
4333 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4335 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4336 target = NULL_RTX;
4338 struct expand_operand ops[4];
4339 create_output_operand (&ops[0], target, insn_mode);
4340 create_fixed_operand (&ops[1], arg1_rtx);
4341 create_fixed_operand (&ops[2], arg2_rtx);
4342 create_integer_operand (&ops[3], align);
4343 if (maybe_expand_insn (icode, 4, ops))
4344 return ops[0].value;
4345 return NULL_RTX;
4348 /* Expand expression EXP, which is a call to the memcmp built-in function.
4349 Return NULL_RTX if we failed and the caller should emit a normal call,
4350 otherwise try to get the result in TARGET, if convenient.
4351 RESULT_EQ is true if we can relax the returned value to be either zero
4352 or nonzero, without caring about the sign. */
4354 static rtx
4355 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4357 if (!validate_arglist (exp,
4358 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4359 return NULL_RTX;
4361 tree arg1 = CALL_EXPR_ARG (exp, 0);
4362 tree arg2 = CALL_EXPR_ARG (exp, 1);
4363 tree len = CALL_EXPR_ARG (exp, 2);
4365 /* Diagnose calls where the specified length exceeds the size of either
4366 object. */
4367 if (warn_stringop_overflow)
4369 tree size = compute_objsize (arg1, 0);
4370 if (check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
4371 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE))
4373 size = compute_objsize (arg2, 0);
4374 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
4375 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
4379 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4380 location_t loc = EXPR_LOCATION (exp);
4382 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4383 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4385 /* If we don't have POINTER_TYPE, call the function. */
4386 if (arg1_align == 0 || arg2_align == 0)
4387 return NULL_RTX;
4389 rtx arg1_rtx = get_memory_rtx (arg1, len);
4390 rtx arg2_rtx = get_memory_rtx (arg2, len);
4391 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4393 /* Set MEM_SIZE as appropriate. */
4394 if (CONST_INT_P (len_rtx))
4396 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4397 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4400 by_pieces_constfn constfn = NULL;
4402 const char *src_str = c_getstr (arg2);
4403 if (result_eq && src_str == NULL)
4405 src_str = c_getstr (arg1);
4406 if (src_str != NULL)
4407 std::swap (arg1_rtx, arg2_rtx);
4410 /* If SRC is a string constant and block move would be done
4411 by pieces, we can avoid loading the string from memory
4412 and only stored the computed constants. */
4413 if (src_str
4414 && CONST_INT_P (len_rtx)
4415 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4416 constfn = builtin_memcpy_read_str;
4418 rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4419 TREE_TYPE (len), target,
4420 result_eq, constfn,
4421 CONST_CAST (char *, src_str));
4423 if (result)
4425 /* Return the value in the proper mode for this function. */
4426 if (GET_MODE (result) == mode)
4427 return result;
4429 if (target != 0)
4431 convert_move (target, result, 0);
4432 return target;
4435 return convert_to_mode (mode, result, 0);
4438 return NULL_RTX;
4441 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4442 if we failed the caller should emit a normal call, otherwise try to get
4443 the result in TARGET, if convenient. */
4445 static rtx
4446 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4448 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4449 return NULL_RTX;
4451 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4452 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4453 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4454 return NULL_RTX;
4456 tree arg1 = CALL_EXPR_ARG (exp, 0);
4457 tree arg2 = CALL_EXPR_ARG (exp, 1);
4459 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4460 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4462 /* If we don't have POINTER_TYPE, call the function. */
4463 if (arg1_align == 0 || arg2_align == 0)
4464 return NULL_RTX;
4466 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4467 arg1 = builtin_save_expr (arg1);
4468 arg2 = builtin_save_expr (arg2);
4470 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4471 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4473 rtx result = NULL_RTX;
4474 /* Try to call cmpstrsi. */
4475 if (cmpstr_icode != CODE_FOR_nothing)
4476 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4477 MIN (arg1_align, arg2_align));
4479 /* Try to determine at least one length and call cmpstrnsi. */
4480 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4482 tree len;
4483 rtx arg3_rtx;
4485 tree len1 = c_strlen (arg1, 1);
4486 tree len2 = c_strlen (arg2, 1);
4488 if (len1)
4489 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4490 if (len2)
4491 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4493 /* If we don't have a constant length for the first, use the length
4494 of the second, if we know it. We don't require a constant for
4495 this case; some cost analysis could be done if both are available
4496 but neither is constant. For now, assume they're equally cheap,
4497 unless one has side effects. If both strings have constant lengths,
4498 use the smaller. */
4500 if (!len1)
4501 len = len2;
4502 else if (!len2)
4503 len = len1;
4504 else if (TREE_SIDE_EFFECTS (len1))
4505 len = len2;
4506 else if (TREE_SIDE_EFFECTS (len2))
4507 len = len1;
4508 else if (TREE_CODE (len1) != INTEGER_CST)
4509 len = len2;
4510 else if (TREE_CODE (len2) != INTEGER_CST)
4511 len = len1;
4512 else if (tree_int_cst_lt (len1, len2))
4513 len = len1;
4514 else
4515 len = len2;
4517 /* If both arguments have side effects, we cannot optimize. */
4518 if (len && !TREE_SIDE_EFFECTS (len))
4520 arg3_rtx = expand_normal (len);
4521 result = expand_cmpstrn_or_cmpmem
4522 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4523 arg3_rtx, MIN (arg1_align, arg2_align));
4527 tree fndecl = get_callee_fndecl (exp);
4528 if (result)
4530 /* Check to see if the argument was declared attribute nonstring
4531 and if so, issue a warning since at this point it's not known
4532 to be nul-terminated. */
4533 maybe_warn_nonstring_arg (fndecl, exp);
4535 /* Return the value in the proper mode for this function. */
4536 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4537 if (GET_MODE (result) == mode)
4538 return result;
4539 if (target == 0)
4540 return convert_to_mode (mode, result, 0);
4541 convert_move (target, result, 0);
4542 return target;
4545 /* Expand the library call ourselves using a stabilized argument
4546 list to avoid re-evaluating the function's arguments twice. */
4547 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4548 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4549 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4550 return expand_call (fn, target, target == const0_rtx);
4553 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4554 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4555 the result in TARGET, if convenient. */
4557 static rtx
4558 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4559 ATTRIBUTE_UNUSED machine_mode mode)
4561 if (!validate_arglist (exp,
4562 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4563 return NULL_RTX;
4565 /* If c_strlen can determine an expression for one of the string
4566 lengths, and it doesn't have side effects, then emit cmpstrnsi
4567 using length MIN(strlen(string)+1, arg3). */
4568 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4569 if (cmpstrn_icode == CODE_FOR_nothing)
4570 return NULL_RTX;
4572 tree len;
4574 tree arg1 = CALL_EXPR_ARG (exp, 0);
4575 tree arg2 = CALL_EXPR_ARG (exp, 1);
4576 tree arg3 = CALL_EXPR_ARG (exp, 2);
4578 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4579 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4581 tree len1 = c_strlen (arg1, 1);
4582 tree len2 = c_strlen (arg2, 1);
4584 location_t loc = EXPR_LOCATION (exp);
4586 if (len1)
4587 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4588 if (len2)
4589 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4591 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4593 /* If we don't have a constant length for the first, use the length
4594 of the second, if we know it. If neither string is constant length,
4595 use the given length argument. We don't require a constant for
4596 this case; some cost analysis could be done if both are available
4597 but neither is constant. For now, assume they're equally cheap,
4598 unless one has side effects. If both strings have constant lengths,
4599 use the smaller. */
4601 if (!len1 && !len2)
4602 len = len3;
4603 else if (!len1)
4604 len = len2;
4605 else if (!len2)
4606 len = len1;
4607 else if (TREE_SIDE_EFFECTS (len1))
4608 len = len2;
4609 else if (TREE_SIDE_EFFECTS (len2))
4610 len = len1;
4611 else if (TREE_CODE (len1) != INTEGER_CST)
4612 len = len2;
4613 else if (TREE_CODE (len2) != INTEGER_CST)
4614 len = len1;
4615 else if (tree_int_cst_lt (len1, len2))
4616 len = len1;
4617 else
4618 len = len2;
4620 /* If we are not using the given length, we must incorporate it here.
4621 The actual new length parameter will be MIN(len,arg3) in this case. */
4622 if (len != len3)
4623 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4624 rtx arg1_rtx = get_memory_rtx (arg1, len);
4625 rtx arg2_rtx = get_memory_rtx (arg2, len);
4626 rtx arg3_rtx = expand_normal (len);
4627 rtx result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4628 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4629 MIN (arg1_align, arg2_align));
4631 tree fndecl = get_callee_fndecl (exp);
4632 if (result)
4634 /* Check to see if the argument was declared attribute nonstring
4635 and if so, issue a warning since at this point it's not known
4636 to be nul-terminated. */
4637 maybe_warn_nonstring_arg (fndecl, exp);
4639 /* Return the value in the proper mode for this function. */
4640 mode = TYPE_MODE (TREE_TYPE (exp));
4641 if (GET_MODE (result) == mode)
4642 return result;
4643 if (target == 0)
4644 return convert_to_mode (mode, result, 0);
4645 convert_move (target, result, 0);
4646 return target;
4649 /* Expand the library call ourselves using a stabilized argument
4650 list to avoid re-evaluating the function's arguments twice. */
4651 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4652 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4653 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4654 return expand_call (fn, target, target == const0_rtx);
4657 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4658 if that's convenient. */
4661 expand_builtin_saveregs (void)
4663 rtx val;
4664 rtx_insn *seq;
4666 /* Don't do __builtin_saveregs more than once in a function.
4667 Save the result of the first call and reuse it. */
4668 if (saveregs_value != 0)
4669 return saveregs_value;
4671 /* When this function is called, it means that registers must be
4672 saved on entry to this function. So we migrate the call to the
4673 first insn of this function. */
4675 start_sequence ();
4677 /* Do whatever the machine needs done in this case. */
4678 val = targetm.calls.expand_builtin_saveregs ();
4680 seq = get_insns ();
4681 end_sequence ();
4683 saveregs_value = val;
4685 /* Put the insns after the NOTE that starts the function. If this
4686 is inside a start_sequence, make the outer-level insn chain current, so
4687 the code is placed at the start of the function. */
4688 push_topmost_sequence ();
4689 emit_insn_after (seq, entry_of_function ());
4690 pop_topmost_sequence ();
4692 return val;
4695 /* Expand a call to __builtin_next_arg. */
4697 static rtx
4698 expand_builtin_next_arg (void)
4700 /* Checking arguments is already done in fold_builtin_next_arg
4701 that must be called before this function. */
4702 return expand_binop (ptr_mode, add_optab,
4703 crtl->args.internal_arg_pointer,
4704 crtl->args.arg_offset_rtx,
4705 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4708 /* Make it easier for the backends by protecting the valist argument
4709 from multiple evaluations. */
4711 static tree
4712 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4714 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4716 /* The current way of determining the type of valist is completely
4717 bogus. We should have the information on the va builtin instead. */
4718 if (!vatype)
4719 vatype = targetm.fn_abi_va_list (cfun->decl);
4721 if (TREE_CODE (vatype) == ARRAY_TYPE)
4723 if (TREE_SIDE_EFFECTS (valist))
4724 valist = save_expr (valist);
4726 /* For this case, the backends will be expecting a pointer to
4727 vatype, but it's possible we've actually been given an array
4728 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4729 So fix it. */
4730 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4732 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4733 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4736 else
4738 tree pt = build_pointer_type (vatype);
4740 if (! needs_lvalue)
4742 if (! TREE_SIDE_EFFECTS (valist))
4743 return valist;
4745 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4746 TREE_SIDE_EFFECTS (valist) = 1;
4749 if (TREE_SIDE_EFFECTS (valist))
4750 valist = save_expr (valist);
4751 valist = fold_build2_loc (loc, MEM_REF,
4752 vatype, valist, build_int_cst (pt, 0));
4755 return valist;
4758 /* The "standard" definition of va_list is void*. */
4760 tree
4761 std_build_builtin_va_list (void)
4763 return ptr_type_node;
4766 /* The "standard" abi va_list is va_list_type_node. */
4768 tree
4769 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4771 return va_list_type_node;
4774 /* The "standard" type of va_list is va_list_type_node. */
4776 tree
4777 std_canonical_va_list_type (tree type)
4779 tree wtype, htype;
4781 wtype = va_list_type_node;
4782 htype = type;
4784 if (TREE_CODE (wtype) == ARRAY_TYPE)
4786 /* If va_list is an array type, the argument may have decayed
4787 to a pointer type, e.g. by being passed to another function.
4788 In that case, unwrap both types so that we can compare the
4789 underlying records. */
4790 if (TREE_CODE (htype) == ARRAY_TYPE
4791 || POINTER_TYPE_P (htype))
4793 wtype = TREE_TYPE (wtype);
4794 htype = TREE_TYPE (htype);
4797 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4798 return va_list_type_node;
4800 return NULL_TREE;
4803 /* The "standard" implementation of va_start: just assign `nextarg' to
4804 the variable. */
4806 void
4807 std_expand_builtin_va_start (tree valist, rtx nextarg)
4809 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4810 convert_move (va_r, nextarg, 0);
4813 /* Expand EXP, a call to __builtin_va_start. */
4815 static rtx
4816 expand_builtin_va_start (tree exp)
4818 rtx nextarg;
4819 tree valist;
4820 location_t loc = EXPR_LOCATION (exp);
4822 if (call_expr_nargs (exp) < 2)
4824 error_at (loc, "too few arguments to function %<va_start%>");
4825 return const0_rtx;
4828 if (fold_builtin_next_arg (exp, true))
4829 return const0_rtx;
4831 nextarg = expand_builtin_next_arg ();
4832 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4834 if (targetm.expand_builtin_va_start)
4835 targetm.expand_builtin_va_start (valist, nextarg);
4836 else
4837 std_expand_builtin_va_start (valist, nextarg);
4839 return const0_rtx;
4842 /* Expand EXP, a call to __builtin_va_end. */
4844 static rtx
4845 expand_builtin_va_end (tree exp)
4847 tree valist = CALL_EXPR_ARG (exp, 0);
4849 /* Evaluate for side effects, if needed. I hate macros that don't
4850 do that. */
4851 if (TREE_SIDE_EFFECTS (valist))
4852 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4854 return const0_rtx;
4857 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4858 builtin rather than just as an assignment in stdarg.h because of the
4859 nastiness of array-type va_list types. */
4861 static rtx
4862 expand_builtin_va_copy (tree exp)
4864 tree dst, src, t;
4865 location_t loc = EXPR_LOCATION (exp);
4867 dst = CALL_EXPR_ARG (exp, 0);
4868 src = CALL_EXPR_ARG (exp, 1);
4870 dst = stabilize_va_list_loc (loc, dst, 1);
4871 src = stabilize_va_list_loc (loc, src, 0);
4873 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4875 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4877 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4878 TREE_SIDE_EFFECTS (t) = 1;
4879 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4881 else
4883 rtx dstb, srcb, size;
4885 /* Evaluate to pointers. */
4886 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4887 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4888 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4889 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4891 dstb = convert_memory_address (Pmode, dstb);
4892 srcb = convert_memory_address (Pmode, srcb);
4894 /* "Dereference" to BLKmode memories. */
4895 dstb = gen_rtx_MEM (BLKmode, dstb);
4896 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4897 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4898 srcb = gen_rtx_MEM (BLKmode, srcb);
4899 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4900 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4902 /* Copy. */
4903 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4906 return const0_rtx;
4909 /* Expand a call to one of the builtin functions __builtin_frame_address or
4910 __builtin_return_address. */
4912 static rtx
4913 expand_builtin_frame_address (tree fndecl, tree exp)
4915 /* The argument must be a nonnegative integer constant.
4916 It counts the number of frames to scan up the stack.
4917 The value is either the frame pointer value or the return
4918 address saved in that frame. */
4919 if (call_expr_nargs (exp) == 0)
4920 /* Warning about missing arg was already issued. */
4921 return const0_rtx;
4922 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4924 error ("invalid argument to %qD", fndecl);
4925 return const0_rtx;
4927 else
4929 /* Number of frames to scan up the stack. */
4930 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4932 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4934 /* Some ports cannot access arbitrary stack frames. */
4935 if (tem == NULL)
4937 warning (0, "unsupported argument to %qD", fndecl);
4938 return const0_rtx;
4941 if (count)
4943 /* Warn since no effort is made to ensure that any frame
4944 beyond the current one exists or can be safely reached. */
4945 warning (OPT_Wframe_address, "calling %qD with "
4946 "a nonzero argument is unsafe", fndecl);
4949 /* For __builtin_frame_address, return what we've got. */
4950 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4951 return tem;
4953 if (!REG_P (tem)
4954 && ! CONSTANT_P (tem))
4955 tem = copy_addr_to_reg (tem);
4956 return tem;
4960 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4961 failed and the caller should emit a normal call. */
4963 static rtx
4964 expand_builtin_alloca (tree exp)
4966 rtx op0;
4967 rtx result;
4968 unsigned int align;
4969 tree fndecl = get_callee_fndecl (exp);
4970 HOST_WIDE_INT max_size;
4971 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4972 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
4973 bool valid_arglist
4974 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
4975 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
4976 VOID_TYPE)
4977 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
4978 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4979 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4981 if (!valid_arglist)
4982 return NULL_RTX;
4984 if ((alloca_for_var && !warn_vla_limit)
4985 || (!alloca_for_var && !warn_alloca_limit))
4987 /* -Walloca-larger-than and -Wvla-larger-than settings override
4988 the more general -Walloc-size-larger-than so unless either of
4989 the former options is specified check the alloca arguments for
4990 overflow. */
4991 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
4992 int idx[] = { 0, -1 };
4993 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
4996 /* Compute the argument. */
4997 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4999 /* Compute the alignment. */
5000 align = (fcode == BUILT_IN_ALLOCA
5001 ? BIGGEST_ALIGNMENT
5002 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5004 /* Compute the maximum size. */
5005 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5006 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5007 : -1);
5009 /* Allocate the desired space. If the allocation stems from the declaration
5010 of a variable-sized object, it cannot accumulate. */
5011 result
5012 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5013 result = convert_memory_address (ptr_mode, result);
5015 return result;
5018 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5019 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5020 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5021 handle_builtin_stack_restore function. */
5023 static rtx
5024 expand_asan_emit_allocas_unpoison (tree exp)
5026 tree arg0 = CALL_EXPR_ARG (exp, 0);
5027 tree arg1 = CALL_EXPR_ARG (exp, 1);
5028 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5029 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5030 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5031 stack_pointer_rtx, NULL_RTX, 0,
5032 OPTAB_LIB_WIDEN);
5033 off = convert_modes (ptr_mode, Pmode, off, 0);
5034 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5035 OPTAB_LIB_WIDEN);
5036 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5037 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5038 top, ptr_mode, bot, ptr_mode);
5039 return ret;
5042 /* Expand a call to bswap builtin in EXP.
5043 Return NULL_RTX if a normal call should be emitted rather than expanding the
5044 function in-line. If convenient, the result should be placed in TARGET.
5045 SUBTARGET may be used as the target for computing one of EXP's operands. */
5047 static rtx
5048 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5049 rtx subtarget)
5051 tree arg;
5052 rtx op0;
5054 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5055 return NULL_RTX;
5057 arg = CALL_EXPR_ARG (exp, 0);
5058 op0 = expand_expr (arg,
5059 subtarget && GET_MODE (subtarget) == target_mode
5060 ? subtarget : NULL_RTX,
5061 target_mode, EXPAND_NORMAL);
5062 if (GET_MODE (op0) != target_mode)
5063 op0 = convert_to_mode (target_mode, op0, 1);
5065 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5067 gcc_assert (target);
5069 return convert_to_mode (target_mode, target, 1);
5072 /* Expand a call to a unary builtin in EXP.
5073 Return NULL_RTX if a normal call should be emitted rather than expanding the
5074 function in-line. If convenient, the result should be placed in TARGET.
5075 SUBTARGET may be used as the target for computing one of EXP's operands. */
5077 static rtx
5078 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5079 rtx subtarget, optab op_optab)
5081 rtx op0;
5083 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5084 return NULL_RTX;
5086 /* Compute the argument. */
5087 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5088 (subtarget
5089 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5090 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5091 VOIDmode, EXPAND_NORMAL);
5092 /* Compute op, into TARGET if possible.
5093 Set TARGET to wherever the result comes back. */
5094 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5095 op_optab, op0, target, op_optab != clrsb_optab);
5096 gcc_assert (target);
5098 return convert_to_mode (target_mode, target, 0);
5101 /* Expand a call to __builtin_expect. We just return our argument
5102 as the builtin_expect semantic should've been already executed by
5103 tree branch prediction pass. */
5105 static rtx
5106 expand_builtin_expect (tree exp, rtx target)
5108 tree arg;
5110 if (call_expr_nargs (exp) < 2)
5111 return const0_rtx;
5112 arg = CALL_EXPR_ARG (exp, 0);
5114 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5115 /* When guessing was done, the hints should be already stripped away. */
5116 gcc_assert (!flag_guess_branch_prob
5117 || optimize == 0 || seen_error ());
5118 return target;
5121 /* Expand a call to __builtin_assume_aligned. We just return our first
5122 argument as the builtin_assume_aligned semantic should've been already
5123 executed by CCP. */
5125 static rtx
5126 expand_builtin_assume_aligned (tree exp, rtx target)
5128 if (call_expr_nargs (exp) < 2)
5129 return const0_rtx;
5130 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5131 EXPAND_NORMAL);
5132 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5133 && (call_expr_nargs (exp) < 3
5134 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5135 return target;
5138 void
5139 expand_builtin_trap (void)
5141 if (targetm.have_trap ())
5143 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5144 /* For trap insns when not accumulating outgoing args force
5145 REG_ARGS_SIZE note to prevent crossjumping of calls with
5146 different args sizes. */
5147 if (!ACCUMULATE_OUTGOING_ARGS)
5148 add_args_size_note (insn, stack_pointer_delta);
5150 else
5152 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5153 tree call_expr = build_call_expr (fn, 0);
5154 expand_call (call_expr, NULL_RTX, false);
5157 emit_barrier ();
5160 /* Expand a call to __builtin_unreachable. We do nothing except emit
5161 a barrier saying that control flow will not pass here.
5163 It is the responsibility of the program being compiled to ensure
5164 that control flow does never reach __builtin_unreachable. */
5165 static void
5166 expand_builtin_unreachable (void)
5168 emit_barrier ();
5171 /* Expand EXP, a call to fabs, fabsf or fabsl.
5172 Return NULL_RTX if a normal call should be emitted rather than expanding
5173 the function inline. If convenient, the result should be placed
5174 in TARGET. SUBTARGET may be used as the target for computing
5175 the operand. */
5177 static rtx
5178 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5180 machine_mode mode;
5181 tree arg;
5182 rtx op0;
5184 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5185 return NULL_RTX;
5187 arg = CALL_EXPR_ARG (exp, 0);
5188 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5189 mode = TYPE_MODE (TREE_TYPE (arg));
5190 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5191 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5194 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5195 Return NULL is a normal call should be emitted rather than expanding the
5196 function inline. If convenient, the result should be placed in TARGET.
5197 SUBTARGET may be used as the target for computing the operand. */
5199 static rtx
5200 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5202 rtx op0, op1;
5203 tree arg;
5205 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5206 return NULL_RTX;
5208 arg = CALL_EXPR_ARG (exp, 0);
5209 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5211 arg = CALL_EXPR_ARG (exp, 1);
5212 op1 = expand_normal (arg);
5214 return expand_copysign (op0, op1, target);
5217 /* Expand a call to __builtin___clear_cache. */
5219 static rtx
5220 expand_builtin___clear_cache (tree exp)
5222 if (!targetm.code_for_clear_cache)
5224 #ifdef CLEAR_INSN_CACHE
5225 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5226 does something. Just do the default expansion to a call to
5227 __clear_cache(). */
5228 return NULL_RTX;
5229 #else
5230 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5231 does nothing. There is no need to call it. Do nothing. */
5232 return const0_rtx;
5233 #endif /* CLEAR_INSN_CACHE */
5236 /* We have a "clear_cache" insn, and it will handle everything. */
5237 tree begin, end;
5238 rtx begin_rtx, end_rtx;
5240 /* We must not expand to a library call. If we did, any
5241 fallback library function in libgcc that might contain a call to
5242 __builtin___clear_cache() would recurse infinitely. */
5243 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5245 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5246 return const0_rtx;
5249 if (targetm.have_clear_cache ())
5251 struct expand_operand ops[2];
5253 begin = CALL_EXPR_ARG (exp, 0);
5254 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5256 end = CALL_EXPR_ARG (exp, 1);
5257 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5259 create_address_operand (&ops[0], begin_rtx);
5260 create_address_operand (&ops[1], end_rtx);
5261 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5262 return const0_rtx;
5264 return const0_rtx;
5267 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5269 static rtx
5270 round_trampoline_addr (rtx tramp)
5272 rtx temp, addend, mask;
5274 /* If we don't need too much alignment, we'll have been guaranteed
5275 proper alignment by get_trampoline_type. */
5276 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5277 return tramp;
5279 /* Round address up to desired boundary. */
5280 temp = gen_reg_rtx (Pmode);
5281 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5282 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5284 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5285 temp, 0, OPTAB_LIB_WIDEN);
5286 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5287 temp, 0, OPTAB_LIB_WIDEN);
5289 return tramp;
5292 static rtx
5293 expand_builtin_init_trampoline (tree exp, bool onstack)
5295 tree t_tramp, t_func, t_chain;
5296 rtx m_tramp, r_tramp, r_chain, tmp;
5298 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5299 POINTER_TYPE, VOID_TYPE))
5300 return NULL_RTX;
5302 t_tramp = CALL_EXPR_ARG (exp, 0);
5303 t_func = CALL_EXPR_ARG (exp, 1);
5304 t_chain = CALL_EXPR_ARG (exp, 2);
5306 r_tramp = expand_normal (t_tramp);
5307 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5308 MEM_NOTRAP_P (m_tramp) = 1;
5310 /* If ONSTACK, the TRAMP argument should be the address of a field
5311 within the local function's FRAME decl. Either way, let's see if
5312 we can fill in the MEM_ATTRs for this memory. */
5313 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5314 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5316 /* Creator of a heap trampoline is responsible for making sure the
5317 address is aligned to at least STACK_BOUNDARY. Normally malloc
5318 will ensure this anyhow. */
5319 tmp = round_trampoline_addr (r_tramp);
5320 if (tmp != r_tramp)
5322 m_tramp = change_address (m_tramp, BLKmode, tmp);
5323 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5324 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5327 /* The FUNC argument should be the address of the nested function.
5328 Extract the actual function decl to pass to the hook. */
5329 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5330 t_func = TREE_OPERAND (t_func, 0);
5331 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5333 r_chain = expand_normal (t_chain);
5335 /* Generate insns to initialize the trampoline. */
5336 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5338 if (onstack)
5340 trampolines_created = 1;
5342 if (targetm.calls.custom_function_descriptors != 0)
5343 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5344 "trampoline generated for nested function %qD", t_func);
5347 return const0_rtx;
5350 static rtx
5351 expand_builtin_adjust_trampoline (tree exp)
5353 rtx tramp;
5355 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5356 return NULL_RTX;
5358 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5359 tramp = round_trampoline_addr (tramp);
5360 if (targetm.calls.trampoline_adjust_address)
5361 tramp = targetm.calls.trampoline_adjust_address (tramp);
5363 return tramp;
5366 /* Expand a call to the builtin descriptor initialization routine.
5367 A descriptor is made up of a couple of pointers to the static
5368 chain and the code entry in this order. */
5370 static rtx
5371 expand_builtin_init_descriptor (tree exp)
5373 tree t_descr, t_func, t_chain;
5374 rtx m_descr, r_descr, r_func, r_chain;
5376 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5377 VOID_TYPE))
5378 return NULL_RTX;
5380 t_descr = CALL_EXPR_ARG (exp, 0);
5381 t_func = CALL_EXPR_ARG (exp, 1);
5382 t_chain = CALL_EXPR_ARG (exp, 2);
5384 r_descr = expand_normal (t_descr);
5385 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5386 MEM_NOTRAP_P (m_descr) = 1;
5388 r_func = expand_normal (t_func);
5389 r_chain = expand_normal (t_chain);
5391 /* Generate insns to initialize the descriptor. */
5392 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5393 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5394 POINTER_SIZE / BITS_PER_UNIT), r_func);
5396 return const0_rtx;
5399 /* Expand a call to the builtin descriptor adjustment routine. */
5401 static rtx
5402 expand_builtin_adjust_descriptor (tree exp)
5404 rtx tramp;
5406 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5407 return NULL_RTX;
5409 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5411 /* Unalign the descriptor to allow runtime identification. */
5412 tramp = plus_constant (ptr_mode, tramp,
5413 targetm.calls.custom_function_descriptors);
5415 return force_operand (tramp, NULL_RTX);
5418 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5419 function. The function first checks whether the back end provides
5420 an insn to implement signbit for the respective mode. If not, it
5421 checks whether the floating point format of the value is such that
5422 the sign bit can be extracted. If that is not the case, error out.
5423 EXP is the expression that is a call to the builtin function; if
5424 convenient, the result should be placed in TARGET. */
5425 static rtx
5426 expand_builtin_signbit (tree exp, rtx target)
5428 const struct real_format *fmt;
5429 scalar_float_mode fmode;
5430 scalar_int_mode rmode, imode;
5431 tree arg;
5432 int word, bitpos;
5433 enum insn_code icode;
5434 rtx temp;
5435 location_t loc = EXPR_LOCATION (exp);
5437 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5438 return NULL_RTX;
5440 arg = CALL_EXPR_ARG (exp, 0);
5441 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5442 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5443 fmt = REAL_MODE_FORMAT (fmode);
5445 arg = builtin_save_expr (arg);
5447 /* Expand the argument yielding a RTX expression. */
5448 temp = expand_normal (arg);
5450 /* Check if the back end provides an insn that handles signbit for the
5451 argument's mode. */
5452 icode = optab_handler (signbit_optab, fmode);
5453 if (icode != CODE_FOR_nothing)
5455 rtx_insn *last = get_last_insn ();
5456 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5457 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5458 return target;
5459 delete_insns_since (last);
5462 /* For floating point formats without a sign bit, implement signbit
5463 as "ARG < 0.0". */
5464 bitpos = fmt->signbit_ro;
5465 if (bitpos < 0)
5467 /* But we can't do this if the format supports signed zero. */
5468 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5470 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5471 build_real (TREE_TYPE (arg), dconst0));
5472 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5475 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5477 imode = int_mode_for_mode (fmode).require ();
5478 temp = gen_lowpart (imode, temp);
5480 else
5482 imode = word_mode;
5483 /* Handle targets with different FP word orders. */
5484 if (FLOAT_WORDS_BIG_ENDIAN)
5485 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5486 else
5487 word = bitpos / BITS_PER_WORD;
5488 temp = operand_subword_force (temp, word, fmode);
5489 bitpos = bitpos % BITS_PER_WORD;
5492 /* Force the intermediate word_mode (or narrower) result into a
5493 register. This avoids attempting to create paradoxical SUBREGs
5494 of floating point modes below. */
5495 temp = force_reg (imode, temp);
5497 /* If the bitpos is within the "result mode" lowpart, the operation
5498 can be implement with a single bitwise AND. Otherwise, we need
5499 a right shift and an AND. */
5501 if (bitpos < GET_MODE_BITSIZE (rmode))
5503 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5505 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5506 temp = gen_lowpart (rmode, temp);
5507 temp = expand_binop (rmode, and_optab, temp,
5508 immed_wide_int_const (mask, rmode),
5509 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5511 else
5513 /* Perform a logical right shift to place the signbit in the least
5514 significant bit, then truncate the result to the desired mode
5515 and mask just this bit. */
5516 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5517 temp = gen_lowpart (rmode, temp);
5518 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5519 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5522 return temp;
5525 /* Expand fork or exec calls. TARGET is the desired target of the
5526 call. EXP is the call. FN is the
5527 identificator of the actual function. IGNORE is nonzero if the
5528 value is to be ignored. */
5530 static rtx
5531 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5533 tree id, decl;
5534 tree call;
5536 /* If we are not profiling, just call the function. */
5537 if (!profile_arc_flag)
5538 return NULL_RTX;
5540 /* Otherwise call the wrapper. This should be equivalent for the rest of
5541 compiler, so the code does not diverge, and the wrapper may run the
5542 code necessary for keeping the profiling sane. */
5544 switch (DECL_FUNCTION_CODE (fn))
5546 case BUILT_IN_FORK:
5547 id = get_identifier ("__gcov_fork");
5548 break;
5550 case BUILT_IN_EXECL:
5551 id = get_identifier ("__gcov_execl");
5552 break;
5554 case BUILT_IN_EXECV:
5555 id = get_identifier ("__gcov_execv");
5556 break;
5558 case BUILT_IN_EXECLP:
5559 id = get_identifier ("__gcov_execlp");
5560 break;
5562 case BUILT_IN_EXECLE:
5563 id = get_identifier ("__gcov_execle");
5564 break;
5566 case BUILT_IN_EXECVP:
5567 id = get_identifier ("__gcov_execvp");
5568 break;
5570 case BUILT_IN_EXECVE:
5571 id = get_identifier ("__gcov_execve");
5572 break;
5574 default:
5575 gcc_unreachable ();
5578 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5579 FUNCTION_DECL, id, TREE_TYPE (fn));
5580 DECL_EXTERNAL (decl) = 1;
5581 TREE_PUBLIC (decl) = 1;
5582 DECL_ARTIFICIAL (decl) = 1;
5583 TREE_NOTHROW (decl) = 1;
5584 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5585 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5586 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5587 return expand_call (call, target, ignore);
5592 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5593 the pointer in these functions is void*, the tree optimizers may remove
5594 casts. The mode computed in expand_builtin isn't reliable either, due
5595 to __sync_bool_compare_and_swap.
5597 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5598 group of builtins. This gives us log2 of the mode size. */
5600 static inline machine_mode
5601 get_builtin_sync_mode (int fcode_diff)
5603 /* The size is not negotiable, so ask not to get BLKmode in return
5604 if the target indicates that a smaller size would be better. */
5605 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5608 /* Expand the memory expression LOC and return the appropriate memory operand
5609 for the builtin_sync operations. */
5611 static rtx
5612 get_builtin_sync_mem (tree loc, machine_mode mode)
5614 rtx addr, mem;
5616 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5617 addr = convert_memory_address (Pmode, addr);
5619 /* Note that we explicitly do not want any alias information for this
5620 memory, so that we kill all other live memories. Otherwise we don't
5621 satisfy the full barrier semantics of the intrinsic. */
5622 mem = validize_mem (gen_rtx_MEM (mode, addr));
5624 /* The alignment needs to be at least according to that of the mode. */
5625 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5626 get_pointer_alignment (loc)));
5627 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5628 MEM_VOLATILE_P (mem) = 1;
5630 return mem;
5633 /* Make sure an argument is in the right mode.
5634 EXP is the tree argument.
5635 MODE is the mode it should be in. */
5637 static rtx
5638 expand_expr_force_mode (tree exp, machine_mode mode)
5640 rtx val;
5641 machine_mode old_mode;
5643 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5644 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5645 of CONST_INTs, where we know the old_mode only from the call argument. */
5647 old_mode = GET_MODE (val);
5648 if (old_mode == VOIDmode)
5649 old_mode = TYPE_MODE (TREE_TYPE (exp));
5650 val = convert_modes (mode, old_mode, val, 1);
5651 return val;
5655 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5656 EXP is the CALL_EXPR. CODE is the rtx code
5657 that corresponds to the arithmetic or logical operation from the name;
5658 an exception here is that NOT actually means NAND. TARGET is an optional
5659 place for us to store the results; AFTER is true if this is the
5660 fetch_and_xxx form. */
5662 static rtx
5663 expand_builtin_sync_operation (machine_mode mode, tree exp,
5664 enum rtx_code code, bool after,
5665 rtx target)
5667 rtx val, mem;
5668 location_t loc = EXPR_LOCATION (exp);
5670 if (code == NOT && warn_sync_nand)
5672 tree fndecl = get_callee_fndecl (exp);
5673 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5675 static bool warned_f_a_n, warned_n_a_f;
5677 switch (fcode)
5679 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5680 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5681 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5682 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5683 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5684 if (warned_f_a_n)
5685 break;
5687 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5688 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5689 warned_f_a_n = true;
5690 break;
5692 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5693 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5694 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5695 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5696 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5697 if (warned_n_a_f)
5698 break;
5700 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5701 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5702 warned_n_a_f = true;
5703 break;
5705 default:
5706 gcc_unreachable ();
5710 /* Expand the operands. */
5711 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5712 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5714 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5715 after);
5718 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5719 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5720 true if this is the boolean form. TARGET is a place for us to store the
5721 results; this is NOT optional if IS_BOOL is true. */
5723 static rtx
5724 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5725 bool is_bool, rtx target)
5727 rtx old_val, new_val, mem;
5728 rtx *pbool, *poval;
5730 /* Expand the operands. */
5731 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5732 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5733 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5735 pbool = poval = NULL;
5736 if (target != const0_rtx)
5738 if (is_bool)
5739 pbool = &target;
5740 else
5741 poval = &target;
5743 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5744 false, MEMMODEL_SYNC_SEQ_CST,
5745 MEMMODEL_SYNC_SEQ_CST))
5746 return NULL_RTX;
5748 return target;
5751 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5752 general form is actually an atomic exchange, and some targets only
5753 support a reduced form with the second argument being a constant 1.
5754 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5755 the results. */
5757 static rtx
5758 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5759 rtx target)
5761 rtx val, mem;
5763 /* Expand the operands. */
5764 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5765 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5767 return expand_sync_lock_test_and_set (target, mem, val);
5770 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5772 static void
5773 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5775 rtx mem;
5777 /* Expand the operands. */
5778 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5780 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5783 /* Given an integer representing an ``enum memmodel'', verify its
5784 correctness and return the memory model enum. */
5786 static enum memmodel
5787 get_memmodel (tree exp)
5789 rtx op;
5790 unsigned HOST_WIDE_INT val;
5791 source_location loc
5792 = expansion_point_location_if_in_system_header (input_location);
5794 /* If the parameter is not a constant, it's a run time value so we'll just
5795 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5796 if (TREE_CODE (exp) != INTEGER_CST)
5797 return MEMMODEL_SEQ_CST;
5799 op = expand_normal (exp);
5801 val = INTVAL (op);
5802 if (targetm.memmodel_check)
5803 val = targetm.memmodel_check (val);
5804 else if (val & ~MEMMODEL_MASK)
5806 warning_at (loc, OPT_Winvalid_memory_model,
5807 "unknown architecture specifier in memory model to builtin");
5808 return MEMMODEL_SEQ_CST;
5811 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5812 if (memmodel_base (val) >= MEMMODEL_LAST)
5814 warning_at (loc, OPT_Winvalid_memory_model,
5815 "invalid memory model argument to builtin");
5816 return MEMMODEL_SEQ_CST;
5819 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5820 be conservative and promote consume to acquire. */
5821 if (val == MEMMODEL_CONSUME)
5822 val = MEMMODEL_ACQUIRE;
5824 return (enum memmodel) val;
5827 /* Expand the __atomic_exchange intrinsic:
5828 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5829 EXP is the CALL_EXPR.
5830 TARGET is an optional place for us to store the results. */
5832 static rtx
5833 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5835 rtx val, mem;
5836 enum memmodel model;
5838 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5840 if (!flag_inline_atomics)
5841 return NULL_RTX;
5843 /* Expand the operands. */
5844 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5845 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5847 return expand_atomic_exchange (target, mem, val, model);
5850 /* Expand the __atomic_compare_exchange intrinsic:
5851 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5852 TYPE desired, BOOL weak,
5853 enum memmodel success,
5854 enum memmodel failure)
5855 EXP is the CALL_EXPR.
5856 TARGET is an optional place for us to store the results. */
5858 static rtx
5859 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5860 rtx target)
5862 rtx expect, desired, mem, oldval;
5863 rtx_code_label *label;
5864 enum memmodel success, failure;
5865 tree weak;
5866 bool is_weak;
5867 source_location loc
5868 = expansion_point_location_if_in_system_header (input_location);
5870 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5871 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5873 if (failure > success)
5875 warning_at (loc, OPT_Winvalid_memory_model,
5876 "failure memory model cannot be stronger than success "
5877 "memory model for %<__atomic_compare_exchange%>");
5878 success = MEMMODEL_SEQ_CST;
5881 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5883 warning_at (loc, OPT_Winvalid_memory_model,
5884 "invalid failure memory model for "
5885 "%<__atomic_compare_exchange%>");
5886 failure = MEMMODEL_SEQ_CST;
5887 success = MEMMODEL_SEQ_CST;
5891 if (!flag_inline_atomics)
5892 return NULL_RTX;
5894 /* Expand the operands. */
5895 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5897 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5898 expect = convert_memory_address (Pmode, expect);
5899 expect = gen_rtx_MEM (mode, expect);
5900 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5902 weak = CALL_EXPR_ARG (exp, 3);
5903 is_weak = false;
5904 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5905 is_weak = true;
5907 if (target == const0_rtx)
5908 target = NULL;
5910 /* Lest the rtl backend create a race condition with an imporoper store
5911 to memory, always create a new pseudo for OLDVAL. */
5912 oldval = NULL;
5914 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5915 is_weak, success, failure))
5916 return NULL_RTX;
5918 /* Conditionally store back to EXPECT, lest we create a race condition
5919 with an improper store to memory. */
5920 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5921 the normal case where EXPECT is totally private, i.e. a register. At
5922 which point the store can be unconditional. */
5923 label = gen_label_rtx ();
5924 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5925 GET_MODE (target), 1, label);
5926 emit_move_insn (expect, oldval);
5927 emit_label (label);
5929 return target;
5932 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5933 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5934 call. The weak parameter must be dropped to match the expected parameter
5935 list and the expected argument changed from value to pointer to memory
5936 slot. */
5938 static void
5939 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5941 unsigned int z;
5942 vec<tree, va_gc> *vec;
5944 vec_alloc (vec, 5);
5945 vec->quick_push (gimple_call_arg (call, 0));
5946 tree expected = gimple_call_arg (call, 1);
5947 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5948 TREE_TYPE (expected));
5949 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5950 if (expd != x)
5951 emit_move_insn (x, expd);
5952 tree v = make_tree (TREE_TYPE (expected), x);
5953 vec->quick_push (build1 (ADDR_EXPR,
5954 build_pointer_type (TREE_TYPE (expected)), v));
5955 vec->quick_push (gimple_call_arg (call, 2));
5956 /* Skip the boolean weak parameter. */
5957 for (z = 4; z < 6; z++)
5958 vec->quick_push (gimple_call_arg (call, z));
5959 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
5960 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
5961 gcc_assert (bytes_log2 < 5);
5962 built_in_function fncode
5963 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5964 + bytes_log2);
5965 tree fndecl = builtin_decl_explicit (fncode);
5966 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
5967 fndecl);
5968 tree exp = build_call_vec (boolean_type_node, fn, vec);
5969 tree lhs = gimple_call_lhs (call);
5970 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
5971 if (lhs)
5973 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5974 if (GET_MODE (boolret) != mode)
5975 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5976 x = force_reg (mode, x);
5977 write_complex_part (target, boolret, true);
5978 write_complex_part (target, x, false);
5982 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
5984 void
5985 expand_ifn_atomic_compare_exchange (gcall *call)
5987 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
5988 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
5989 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
5990 rtx expect, desired, mem, oldval, boolret;
5991 enum memmodel success, failure;
5992 tree lhs;
5993 bool is_weak;
5994 source_location loc
5995 = expansion_point_location_if_in_system_header (gimple_location (call));
5997 success = get_memmodel (gimple_call_arg (call, 4));
5998 failure = get_memmodel (gimple_call_arg (call, 5));
6000 if (failure > success)
6002 warning_at (loc, OPT_Winvalid_memory_model,
6003 "failure memory model cannot be stronger than success "
6004 "memory model for %<__atomic_compare_exchange%>");
6005 success = MEMMODEL_SEQ_CST;
6008 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6010 warning_at (loc, OPT_Winvalid_memory_model,
6011 "invalid failure memory model for "
6012 "%<__atomic_compare_exchange%>");
6013 failure = MEMMODEL_SEQ_CST;
6014 success = MEMMODEL_SEQ_CST;
6017 if (!flag_inline_atomics)
6019 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6020 return;
6023 /* Expand the operands. */
6024 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6026 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6027 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6029 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6031 boolret = NULL;
6032 oldval = NULL;
6034 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6035 is_weak, success, failure))
6037 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6038 return;
6041 lhs = gimple_call_lhs (call);
6042 if (lhs)
6044 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6045 if (GET_MODE (boolret) != mode)
6046 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6047 write_complex_part (target, boolret, true);
6048 write_complex_part (target, oldval, false);
6052 /* Expand the __atomic_load intrinsic:
6053 TYPE __atomic_load (TYPE *object, enum memmodel)
6054 EXP is the CALL_EXPR.
6055 TARGET is an optional place for us to store the results. */
6057 static rtx
6058 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6060 rtx mem;
6061 enum memmodel model;
6063 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6064 if (is_mm_release (model) || is_mm_acq_rel (model))
6066 source_location loc
6067 = expansion_point_location_if_in_system_header (input_location);
6068 warning_at (loc, OPT_Winvalid_memory_model,
6069 "invalid memory model for %<__atomic_load%>");
6070 model = MEMMODEL_SEQ_CST;
6073 if (!flag_inline_atomics)
6074 return NULL_RTX;
6076 /* Expand the operand. */
6077 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6079 return expand_atomic_load (target, mem, model);
6083 /* Expand the __atomic_store intrinsic:
6084 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6085 EXP is the CALL_EXPR.
6086 TARGET is an optional place for us to store the results. */
6088 static rtx
6089 expand_builtin_atomic_store (machine_mode mode, tree exp)
6091 rtx mem, val;
6092 enum memmodel model;
6094 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6095 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6096 || is_mm_release (model)))
6098 source_location loc
6099 = expansion_point_location_if_in_system_header (input_location);
6100 warning_at (loc, OPT_Winvalid_memory_model,
6101 "invalid memory model for %<__atomic_store%>");
6102 model = MEMMODEL_SEQ_CST;
6105 if (!flag_inline_atomics)
6106 return NULL_RTX;
6108 /* Expand the operands. */
6109 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6110 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6112 return expand_atomic_store (mem, val, model, false);
6115 /* Expand the __atomic_fetch_XXX intrinsic:
6116 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6117 EXP is the CALL_EXPR.
6118 TARGET is an optional place for us to store the results.
6119 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6120 FETCH_AFTER is true if returning the result of the operation.
6121 FETCH_AFTER is false if returning the value before the operation.
6122 IGNORE is true if the result is not used.
6123 EXT_CALL is the correct builtin for an external call if this cannot be
6124 resolved to an instruction sequence. */
6126 static rtx
6127 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6128 enum rtx_code code, bool fetch_after,
6129 bool ignore, enum built_in_function ext_call)
6131 rtx val, mem, ret;
6132 enum memmodel model;
6133 tree fndecl;
6134 tree addr;
6136 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6138 /* Expand the operands. */
6139 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6140 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6142 /* Only try generating instructions if inlining is turned on. */
6143 if (flag_inline_atomics)
6145 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6146 if (ret)
6147 return ret;
6150 /* Return if a different routine isn't needed for the library call. */
6151 if (ext_call == BUILT_IN_NONE)
6152 return NULL_RTX;
6154 /* Change the call to the specified function. */
6155 fndecl = get_callee_fndecl (exp);
6156 addr = CALL_EXPR_FN (exp);
6157 STRIP_NOPS (addr);
6159 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6160 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6162 /* If we will emit code after the call, the call can not be a tail call.
6163 If it is emitted as a tail call, a barrier is emitted after it, and
6164 then all trailing code is removed. */
6165 if (!ignore)
6166 CALL_EXPR_TAILCALL (exp) = 0;
6168 /* Expand the call here so we can emit trailing code. */
6169 ret = expand_call (exp, target, ignore);
6171 /* Replace the original function just in case it matters. */
6172 TREE_OPERAND (addr, 0) = fndecl;
6174 /* Then issue the arithmetic correction to return the right result. */
6175 if (!ignore)
6177 if (code == NOT)
6179 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6180 OPTAB_LIB_WIDEN);
6181 ret = expand_simple_unop (mode, NOT, ret, target, true);
6183 else
6184 ret = expand_simple_binop (mode, code, ret, val, target, true,
6185 OPTAB_LIB_WIDEN);
6187 return ret;
6190 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6192 void
6193 expand_ifn_atomic_bit_test_and (gcall *call)
6195 tree ptr = gimple_call_arg (call, 0);
6196 tree bit = gimple_call_arg (call, 1);
6197 tree flag = gimple_call_arg (call, 2);
6198 tree lhs = gimple_call_lhs (call);
6199 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6200 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6201 enum rtx_code code;
6202 optab optab;
6203 struct expand_operand ops[5];
6205 gcc_assert (flag_inline_atomics);
6207 if (gimple_call_num_args (call) == 4)
6208 model = get_memmodel (gimple_call_arg (call, 3));
6210 rtx mem = get_builtin_sync_mem (ptr, mode);
6211 rtx val = expand_expr_force_mode (bit, mode);
6213 switch (gimple_call_internal_fn (call))
6215 case IFN_ATOMIC_BIT_TEST_AND_SET:
6216 code = IOR;
6217 optab = atomic_bit_test_and_set_optab;
6218 break;
6219 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6220 code = XOR;
6221 optab = atomic_bit_test_and_complement_optab;
6222 break;
6223 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6224 code = AND;
6225 optab = atomic_bit_test_and_reset_optab;
6226 break;
6227 default:
6228 gcc_unreachable ();
6231 if (lhs == NULL_TREE)
6233 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6234 val, NULL_RTX, true, OPTAB_DIRECT);
6235 if (code == AND)
6236 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6237 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6238 return;
6241 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6242 enum insn_code icode = direct_optab_handler (optab, mode);
6243 gcc_assert (icode != CODE_FOR_nothing);
6244 create_output_operand (&ops[0], target, mode);
6245 create_fixed_operand (&ops[1], mem);
6246 create_convert_operand_to (&ops[2], val, mode, true);
6247 create_integer_operand (&ops[3], model);
6248 create_integer_operand (&ops[4], integer_onep (flag));
6249 if (maybe_expand_insn (icode, 5, ops))
6250 return;
6252 rtx bitval = val;
6253 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6254 val, NULL_RTX, true, OPTAB_DIRECT);
6255 rtx maskval = val;
6256 if (code == AND)
6257 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6258 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6259 code, model, false);
6260 if (integer_onep (flag))
6262 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6263 NULL_RTX, true, OPTAB_DIRECT);
6264 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6265 true, OPTAB_DIRECT);
6267 else
6268 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6269 OPTAB_DIRECT);
6270 if (result != target)
6271 emit_move_insn (target, result);
6274 /* Expand an atomic clear operation.
6275 void _atomic_clear (BOOL *obj, enum memmodel)
6276 EXP is the call expression. */
6278 static rtx
6279 expand_builtin_atomic_clear (tree exp)
6281 machine_mode mode;
6282 rtx mem, ret;
6283 enum memmodel model;
6285 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6286 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6287 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6289 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6291 source_location loc
6292 = expansion_point_location_if_in_system_header (input_location);
6293 warning_at (loc, OPT_Winvalid_memory_model,
6294 "invalid memory model for %<__atomic_store%>");
6295 model = MEMMODEL_SEQ_CST;
6298 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6299 Failing that, a store is issued by __atomic_store. The only way this can
6300 fail is if the bool type is larger than a word size. Unlikely, but
6301 handle it anyway for completeness. Assume a single threaded model since
6302 there is no atomic support in this case, and no barriers are required. */
6303 ret = expand_atomic_store (mem, const0_rtx, model, true);
6304 if (!ret)
6305 emit_move_insn (mem, const0_rtx);
6306 return const0_rtx;
6309 /* Expand an atomic test_and_set operation.
6310 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6311 EXP is the call expression. */
6313 static rtx
6314 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6316 rtx mem;
6317 enum memmodel model;
6318 machine_mode mode;
6320 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6321 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6322 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6324 return expand_atomic_test_and_set (target, mem, model);
6328 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6329 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6331 static tree
6332 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6334 int size;
6335 machine_mode mode;
6336 unsigned int mode_align, type_align;
6338 if (TREE_CODE (arg0) != INTEGER_CST)
6339 return NULL_TREE;
6341 /* We need a corresponding integer mode for the access to be lock-free. */
6342 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6343 if (!int_mode_for_size (size, 0).exists (&mode))
6344 return boolean_false_node;
6346 mode_align = GET_MODE_ALIGNMENT (mode);
6348 if (TREE_CODE (arg1) == INTEGER_CST)
6350 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6352 /* Either this argument is null, or it's a fake pointer encoding
6353 the alignment of the object. */
6354 val = least_bit_hwi (val);
6355 val *= BITS_PER_UNIT;
6357 if (val == 0 || mode_align < val)
6358 type_align = mode_align;
6359 else
6360 type_align = val;
6362 else
6364 tree ttype = TREE_TYPE (arg1);
6366 /* This function is usually invoked and folded immediately by the front
6367 end before anything else has a chance to look at it. The pointer
6368 parameter at this point is usually cast to a void *, so check for that
6369 and look past the cast. */
6370 if (CONVERT_EXPR_P (arg1)
6371 && POINTER_TYPE_P (ttype)
6372 && VOID_TYPE_P (TREE_TYPE (ttype))
6373 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6374 arg1 = TREE_OPERAND (arg1, 0);
6376 ttype = TREE_TYPE (arg1);
6377 gcc_assert (POINTER_TYPE_P (ttype));
6379 /* Get the underlying type of the object. */
6380 ttype = TREE_TYPE (ttype);
6381 type_align = TYPE_ALIGN (ttype);
6384 /* If the object has smaller alignment, the lock free routines cannot
6385 be used. */
6386 if (type_align < mode_align)
6387 return boolean_false_node;
6389 /* Check if a compare_and_swap pattern exists for the mode which represents
6390 the required size. The pattern is not allowed to fail, so the existence
6391 of the pattern indicates support is present. Also require that an
6392 atomic load exists for the required size. */
6393 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6394 return boolean_true_node;
6395 else
6396 return boolean_false_node;
6399 /* Return true if the parameters to call EXP represent an object which will
6400 always generate lock free instructions. The first argument represents the
6401 size of the object, and the second parameter is a pointer to the object
6402 itself. If NULL is passed for the object, then the result is based on
6403 typical alignment for an object of the specified size. Otherwise return
6404 false. */
6406 static rtx
6407 expand_builtin_atomic_always_lock_free (tree exp)
6409 tree size;
6410 tree arg0 = CALL_EXPR_ARG (exp, 0);
6411 tree arg1 = CALL_EXPR_ARG (exp, 1);
6413 if (TREE_CODE (arg0) != INTEGER_CST)
6415 error ("non-constant argument 1 to __atomic_always_lock_free");
6416 return const0_rtx;
6419 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6420 if (size == boolean_true_node)
6421 return const1_rtx;
6422 return const0_rtx;
6425 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6426 is lock free on this architecture. */
6428 static tree
6429 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6431 if (!flag_inline_atomics)
6432 return NULL_TREE;
6434 /* If it isn't always lock free, don't generate a result. */
6435 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6436 return boolean_true_node;
6438 return NULL_TREE;
6441 /* Return true if the parameters to call EXP represent an object which will
6442 always generate lock free instructions. The first argument represents the
6443 size of the object, and the second parameter is a pointer to the object
6444 itself. If NULL is passed for the object, then the result is based on
6445 typical alignment for an object of the specified size. Otherwise return
6446 NULL*/
6448 static rtx
6449 expand_builtin_atomic_is_lock_free (tree exp)
6451 tree size;
6452 tree arg0 = CALL_EXPR_ARG (exp, 0);
6453 tree arg1 = CALL_EXPR_ARG (exp, 1);
6455 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6457 error ("non-integer argument 1 to __atomic_is_lock_free");
6458 return NULL_RTX;
6461 if (!flag_inline_atomics)
6462 return NULL_RTX;
6464 /* If the value is known at compile time, return the RTX for it. */
6465 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6466 if (size == boolean_true_node)
6467 return const1_rtx;
6469 return NULL_RTX;
6472 /* Expand the __atomic_thread_fence intrinsic:
6473 void __atomic_thread_fence (enum memmodel)
6474 EXP is the CALL_EXPR. */
6476 static void
6477 expand_builtin_atomic_thread_fence (tree exp)
6479 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6480 expand_mem_thread_fence (model);
6483 /* Expand the __atomic_signal_fence intrinsic:
6484 void __atomic_signal_fence (enum memmodel)
6485 EXP is the CALL_EXPR. */
6487 static void
6488 expand_builtin_atomic_signal_fence (tree exp)
6490 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6491 expand_mem_signal_fence (model);
6494 /* Expand the __sync_synchronize intrinsic. */
6496 static void
6497 expand_builtin_sync_synchronize (void)
6499 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6502 static rtx
6503 expand_builtin_thread_pointer (tree exp, rtx target)
6505 enum insn_code icode;
6506 if (!validate_arglist (exp, VOID_TYPE))
6507 return const0_rtx;
6508 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6509 if (icode != CODE_FOR_nothing)
6511 struct expand_operand op;
6512 /* If the target is not sutitable then create a new target. */
6513 if (target == NULL_RTX
6514 || !REG_P (target)
6515 || GET_MODE (target) != Pmode)
6516 target = gen_reg_rtx (Pmode);
6517 create_output_operand (&op, target, Pmode);
6518 expand_insn (icode, 1, &op);
6519 return target;
6521 error ("__builtin_thread_pointer is not supported on this target");
6522 return const0_rtx;
6525 static void
6526 expand_builtin_set_thread_pointer (tree exp)
6528 enum insn_code icode;
6529 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6530 return;
6531 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6532 if (icode != CODE_FOR_nothing)
6534 struct expand_operand op;
6535 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6536 Pmode, EXPAND_NORMAL);
6537 create_input_operand (&op, val, Pmode);
6538 expand_insn (icode, 1, &op);
6539 return;
6541 error ("__builtin_set_thread_pointer is not supported on this target");
6545 /* Emit code to restore the current value of stack. */
6547 static void
6548 expand_stack_restore (tree var)
6550 rtx_insn *prev;
6551 rtx sa = expand_normal (var);
6553 sa = convert_memory_address (Pmode, sa);
6555 prev = get_last_insn ();
6556 emit_stack_restore (SAVE_BLOCK, sa);
6558 record_new_stack_level ();
6560 fixup_args_size_notes (prev, get_last_insn (), 0);
6563 /* Emit code to save the current value of stack. */
6565 static rtx
6566 expand_stack_save (void)
6568 rtx ret = NULL_RTX;
6570 emit_stack_save (SAVE_BLOCK, &ret);
6571 return ret;
6574 /* Emit code to get the openacc gang, worker or vector id or size. */
6576 static rtx
6577 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
6579 const char *name;
6580 rtx fallback_retval;
6581 rtx_insn *(*gen_fn) (rtx, rtx);
6582 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
6584 case BUILT_IN_GOACC_PARLEVEL_ID:
6585 name = "__builtin_goacc_parlevel_id";
6586 fallback_retval = const0_rtx;
6587 gen_fn = targetm.gen_oacc_dim_pos;
6588 break;
6589 case BUILT_IN_GOACC_PARLEVEL_SIZE:
6590 name = "__builtin_goacc_parlevel_size";
6591 fallback_retval = const1_rtx;
6592 gen_fn = targetm.gen_oacc_dim_size;
6593 break;
6594 default:
6595 gcc_unreachable ();
6598 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
6600 error ("%qs only supported in OpenACC code", name);
6601 return const0_rtx;
6604 tree arg = CALL_EXPR_ARG (exp, 0);
6605 if (TREE_CODE (arg) != INTEGER_CST)
6607 error ("non-constant argument 0 to %qs", name);
6608 return const0_rtx;
6611 int dim = TREE_INT_CST_LOW (arg);
6612 switch (dim)
6614 case GOMP_DIM_GANG:
6615 case GOMP_DIM_WORKER:
6616 case GOMP_DIM_VECTOR:
6617 break;
6618 default:
6619 error ("illegal argument 0 to %qs", name);
6620 return const0_rtx;
6623 if (ignore)
6624 return target;
6626 if (target == NULL_RTX)
6627 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6629 if (!targetm.have_oacc_dim_size ())
6631 emit_move_insn (target, fallback_retval);
6632 return target;
6635 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
6636 emit_insn (gen_fn (reg, GEN_INT (dim)));
6637 if (reg != target)
6638 emit_move_insn (target, reg);
6640 return target;
6643 /* Expand an expression EXP that calls a built-in function,
6644 with result going to TARGET if that's convenient
6645 (and in mode MODE if that's convenient).
6646 SUBTARGET may be used as the target for computing one of EXP's operands.
6647 IGNORE is nonzero if the value is to be ignored. */
6650 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
6651 int ignore)
6653 tree fndecl = get_callee_fndecl (exp);
6654 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6655 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6656 int flags;
6658 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6659 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6661 /* When ASan is enabled, we don't want to expand some memory/string
6662 builtins and rely on libsanitizer's hooks. This allows us to avoid
6663 redundant checks and be sure, that possible overflow will be detected
6664 by ASan. */
6666 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
6667 return expand_call (exp, target, ignore);
6669 /* When not optimizing, generate calls to library functions for a certain
6670 set of builtins. */
6671 if (!optimize
6672 && !called_as_built_in (fndecl)
6673 && fcode != BUILT_IN_FORK
6674 && fcode != BUILT_IN_EXECL
6675 && fcode != BUILT_IN_EXECV
6676 && fcode != BUILT_IN_EXECLP
6677 && fcode != BUILT_IN_EXECLE
6678 && fcode != BUILT_IN_EXECVP
6679 && fcode != BUILT_IN_EXECVE
6680 && !ALLOCA_FUNCTION_CODE_P (fcode)
6681 && fcode != BUILT_IN_FREE)
6682 return expand_call (exp, target, ignore);
6684 /* The built-in function expanders test for target == const0_rtx
6685 to determine whether the function's result will be ignored. */
6686 if (ignore)
6687 target = const0_rtx;
6689 /* If the result of a pure or const built-in function is ignored, and
6690 none of its arguments are volatile, we can avoid expanding the
6691 built-in call and just evaluate the arguments for side-effects. */
6692 if (target == const0_rtx
6693 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6694 && !(flags & ECF_LOOPING_CONST_OR_PURE))
6696 bool volatilep = false;
6697 tree arg;
6698 call_expr_arg_iterator iter;
6700 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6701 if (TREE_THIS_VOLATILE (arg))
6703 volatilep = true;
6704 break;
6707 if (! volatilep)
6709 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6710 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6711 return const0_rtx;
6715 switch (fcode)
6717 CASE_FLT_FN (BUILT_IN_FABS):
6718 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
6719 case BUILT_IN_FABSD32:
6720 case BUILT_IN_FABSD64:
6721 case BUILT_IN_FABSD128:
6722 target = expand_builtin_fabs (exp, target, subtarget);
6723 if (target)
6724 return target;
6725 break;
6727 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6728 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
6729 target = expand_builtin_copysign (exp, target, subtarget);
6730 if (target)
6731 return target;
6732 break;
6734 /* Just do a normal library call if we were unable to fold
6735 the values. */
6736 CASE_FLT_FN (BUILT_IN_CABS):
6737 break;
6739 CASE_FLT_FN (BUILT_IN_FMA):
6740 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
6741 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6742 if (target)
6743 return target;
6744 break;
6746 CASE_FLT_FN (BUILT_IN_ILOGB):
6747 if (! flag_unsafe_math_optimizations)
6748 break;
6749 gcc_fallthrough ();
6750 CASE_FLT_FN (BUILT_IN_ISINF):
6751 CASE_FLT_FN (BUILT_IN_FINITE):
6752 case BUILT_IN_ISFINITE:
6753 case BUILT_IN_ISNORMAL:
6754 target = expand_builtin_interclass_mathfn (exp, target);
6755 if (target)
6756 return target;
6757 break;
6759 CASE_FLT_FN (BUILT_IN_ICEIL):
6760 CASE_FLT_FN (BUILT_IN_LCEIL):
6761 CASE_FLT_FN (BUILT_IN_LLCEIL):
6762 CASE_FLT_FN (BUILT_IN_LFLOOR):
6763 CASE_FLT_FN (BUILT_IN_IFLOOR):
6764 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6765 target = expand_builtin_int_roundingfn (exp, target);
6766 if (target)
6767 return target;
6768 break;
6770 CASE_FLT_FN (BUILT_IN_IRINT):
6771 CASE_FLT_FN (BUILT_IN_LRINT):
6772 CASE_FLT_FN (BUILT_IN_LLRINT):
6773 CASE_FLT_FN (BUILT_IN_IROUND):
6774 CASE_FLT_FN (BUILT_IN_LROUND):
6775 CASE_FLT_FN (BUILT_IN_LLROUND):
6776 target = expand_builtin_int_roundingfn_2 (exp, target);
6777 if (target)
6778 return target;
6779 break;
6781 CASE_FLT_FN (BUILT_IN_POWI):
6782 target = expand_builtin_powi (exp, target);
6783 if (target)
6784 return target;
6785 break;
6787 CASE_FLT_FN (BUILT_IN_CEXPI):
6788 target = expand_builtin_cexpi (exp, target);
6789 gcc_assert (target);
6790 return target;
6792 CASE_FLT_FN (BUILT_IN_SIN):
6793 CASE_FLT_FN (BUILT_IN_COS):
6794 if (! flag_unsafe_math_optimizations)
6795 break;
6796 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6797 if (target)
6798 return target;
6799 break;
6801 CASE_FLT_FN (BUILT_IN_SINCOS):
6802 if (! flag_unsafe_math_optimizations)
6803 break;
6804 target = expand_builtin_sincos (exp);
6805 if (target)
6806 return target;
6807 break;
6809 case BUILT_IN_APPLY_ARGS:
6810 return expand_builtin_apply_args ();
6812 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6813 FUNCTION with a copy of the parameters described by
6814 ARGUMENTS, and ARGSIZE. It returns a block of memory
6815 allocated on the stack into which is stored all the registers
6816 that might possibly be used for returning the result of a
6817 function. ARGUMENTS is the value returned by
6818 __builtin_apply_args. ARGSIZE is the number of bytes of
6819 arguments that must be copied. ??? How should this value be
6820 computed? We'll also need a safe worst case value for varargs
6821 functions. */
6822 case BUILT_IN_APPLY:
6823 if (!validate_arglist (exp, POINTER_TYPE,
6824 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6825 && !validate_arglist (exp, REFERENCE_TYPE,
6826 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6827 return const0_rtx;
6828 else
6830 rtx ops[3];
6832 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6833 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6834 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6836 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6839 /* __builtin_return (RESULT) causes the function to return the
6840 value described by RESULT. RESULT is address of the block of
6841 memory returned by __builtin_apply. */
6842 case BUILT_IN_RETURN:
6843 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6844 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6845 return const0_rtx;
6847 case BUILT_IN_SAVEREGS:
6848 return expand_builtin_saveregs ();
6850 case BUILT_IN_VA_ARG_PACK:
6851 /* All valid uses of __builtin_va_arg_pack () are removed during
6852 inlining. */
6853 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6854 return const0_rtx;
6856 case BUILT_IN_VA_ARG_PACK_LEN:
6857 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6858 inlining. */
6859 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6860 return const0_rtx;
6862 /* Return the address of the first anonymous stack arg. */
6863 case BUILT_IN_NEXT_ARG:
6864 if (fold_builtin_next_arg (exp, false))
6865 return const0_rtx;
6866 return expand_builtin_next_arg ();
6868 case BUILT_IN_CLEAR_CACHE:
6869 target = expand_builtin___clear_cache (exp);
6870 if (target)
6871 return target;
6872 break;
6874 case BUILT_IN_CLASSIFY_TYPE:
6875 return expand_builtin_classify_type (exp);
6877 case BUILT_IN_CONSTANT_P:
6878 return const0_rtx;
6880 case BUILT_IN_FRAME_ADDRESS:
6881 case BUILT_IN_RETURN_ADDRESS:
6882 return expand_builtin_frame_address (fndecl, exp);
6884 /* Returns the address of the area where the structure is returned.
6885 0 otherwise. */
6886 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6887 if (call_expr_nargs (exp) != 0
6888 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6889 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6890 return const0_rtx;
6891 else
6892 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6894 CASE_BUILT_IN_ALLOCA:
6895 target = expand_builtin_alloca (exp);
6896 if (target)
6897 return target;
6898 break;
6900 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
6901 return expand_asan_emit_allocas_unpoison (exp);
6903 case BUILT_IN_STACK_SAVE:
6904 return expand_stack_save ();
6906 case BUILT_IN_STACK_RESTORE:
6907 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6908 return const0_rtx;
6910 case BUILT_IN_BSWAP16:
6911 case BUILT_IN_BSWAP32:
6912 case BUILT_IN_BSWAP64:
6913 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6914 if (target)
6915 return target;
6916 break;
6918 CASE_INT_FN (BUILT_IN_FFS):
6919 target = expand_builtin_unop (target_mode, exp, target,
6920 subtarget, ffs_optab);
6921 if (target)
6922 return target;
6923 break;
6925 CASE_INT_FN (BUILT_IN_CLZ):
6926 target = expand_builtin_unop (target_mode, exp, target,
6927 subtarget, clz_optab);
6928 if (target)
6929 return target;
6930 break;
6932 CASE_INT_FN (BUILT_IN_CTZ):
6933 target = expand_builtin_unop (target_mode, exp, target,
6934 subtarget, ctz_optab);
6935 if (target)
6936 return target;
6937 break;
6939 CASE_INT_FN (BUILT_IN_CLRSB):
6940 target = expand_builtin_unop (target_mode, exp, target,
6941 subtarget, clrsb_optab);
6942 if (target)
6943 return target;
6944 break;
6946 CASE_INT_FN (BUILT_IN_POPCOUNT):
6947 target = expand_builtin_unop (target_mode, exp, target,
6948 subtarget, popcount_optab);
6949 if (target)
6950 return target;
6951 break;
6953 CASE_INT_FN (BUILT_IN_PARITY):
6954 target = expand_builtin_unop (target_mode, exp, target,
6955 subtarget, parity_optab);
6956 if (target)
6957 return target;
6958 break;
6960 case BUILT_IN_STRLEN:
6961 target = expand_builtin_strlen (exp, target, target_mode);
6962 if (target)
6963 return target;
6964 break;
6966 case BUILT_IN_STRCAT:
6967 target = expand_builtin_strcat (exp, target);
6968 if (target)
6969 return target;
6970 break;
6972 case BUILT_IN_STRCPY:
6973 target = expand_builtin_strcpy (exp, target);
6974 if (target)
6975 return target;
6976 break;
6978 case BUILT_IN_STRNCAT:
6979 target = expand_builtin_strncat (exp, target);
6980 if (target)
6981 return target;
6982 break;
6984 case BUILT_IN_STRNCPY:
6985 target = expand_builtin_strncpy (exp, target);
6986 if (target)
6987 return target;
6988 break;
6990 case BUILT_IN_STPCPY:
6991 target = expand_builtin_stpcpy (exp, target, mode);
6992 if (target)
6993 return target;
6994 break;
6996 case BUILT_IN_STPNCPY:
6997 target = expand_builtin_stpncpy (exp, target);
6998 if (target)
6999 return target;
7000 break;
7002 case BUILT_IN_MEMCHR:
7003 target = expand_builtin_memchr (exp, target);
7004 if (target)
7005 return target;
7006 break;
7008 case BUILT_IN_MEMCPY:
7009 target = expand_builtin_memcpy (exp, target);
7010 if (target)
7011 return target;
7012 break;
7014 case BUILT_IN_MEMMOVE:
7015 target = expand_builtin_memmove (exp, target);
7016 if (target)
7017 return target;
7018 break;
7020 case BUILT_IN_MEMPCPY:
7021 target = expand_builtin_mempcpy (exp, target);
7022 if (target)
7023 return target;
7024 break;
7026 case BUILT_IN_MEMSET:
7027 target = expand_builtin_memset (exp, target, mode);
7028 if (target)
7029 return target;
7030 break;
7032 case BUILT_IN_BZERO:
7033 target = expand_builtin_bzero (exp);
7034 if (target)
7035 return target;
7036 break;
7038 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7039 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
7040 when changing it to a strcmp call. */
7041 case BUILT_IN_STRCMP_EQ:
7042 target = expand_builtin_memcmp (exp, target, true);
7043 if (target)
7044 return target;
7046 /* Change this call back to a BUILT_IN_STRCMP. */
7047 TREE_OPERAND (exp, 1)
7048 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7050 /* Delete the last parameter. */
7051 unsigned int i;
7052 vec<tree, va_gc> *arg_vec;
7053 vec_alloc (arg_vec, 2);
7054 for (i = 0; i < 2; i++)
7055 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7056 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7057 /* FALLTHROUGH */
7059 case BUILT_IN_STRCMP:
7060 target = expand_builtin_strcmp (exp, target);
7061 if (target)
7062 return target;
7063 break;
7065 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7066 back to a BUILT_IN_STRNCMP. */
7067 case BUILT_IN_STRNCMP_EQ:
7068 target = expand_builtin_memcmp (exp, target, true);
7069 if (target)
7070 return target;
7072 /* Change it back to a BUILT_IN_STRNCMP. */
7073 TREE_OPERAND (exp, 1)
7074 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7075 /* FALLTHROUGH */
7077 case BUILT_IN_STRNCMP:
7078 target = expand_builtin_strncmp (exp, target, mode);
7079 if (target)
7080 return target;
7081 break;
7083 case BUILT_IN_BCMP:
7084 case BUILT_IN_MEMCMP:
7085 case BUILT_IN_MEMCMP_EQ:
7086 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7087 if (target)
7088 return target;
7089 if (fcode == BUILT_IN_MEMCMP_EQ)
7091 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7092 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7094 break;
7096 case BUILT_IN_SETJMP:
7097 /* This should have been lowered to the builtins below. */
7098 gcc_unreachable ();
7100 case BUILT_IN_SETJMP_SETUP:
7101 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7102 and the receiver label. */
7103 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7105 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7106 VOIDmode, EXPAND_NORMAL);
7107 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7108 rtx_insn *label_r = label_rtx (label);
7110 /* This is copied from the handling of non-local gotos. */
7111 expand_builtin_setjmp_setup (buf_addr, label_r);
7112 nonlocal_goto_handler_labels
7113 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7114 nonlocal_goto_handler_labels);
7115 /* ??? Do not let expand_label treat us as such since we would
7116 not want to be both on the list of non-local labels and on
7117 the list of forced labels. */
7118 FORCED_LABEL (label) = 0;
7119 return const0_rtx;
7121 break;
7123 case BUILT_IN_SETJMP_RECEIVER:
7124 /* __builtin_setjmp_receiver is passed the receiver label. */
7125 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7127 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7128 rtx_insn *label_r = label_rtx (label);
7130 expand_builtin_setjmp_receiver (label_r);
7131 return const0_rtx;
7133 break;
7135 /* __builtin_longjmp is passed a pointer to an array of five words.
7136 It's similar to the C library longjmp function but works with
7137 __builtin_setjmp above. */
7138 case BUILT_IN_LONGJMP:
7139 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7141 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7142 VOIDmode, EXPAND_NORMAL);
7143 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7145 if (value != const1_rtx)
7147 error ("%<__builtin_longjmp%> second argument must be 1");
7148 return const0_rtx;
7151 expand_builtin_longjmp (buf_addr, value);
7152 return const0_rtx;
7154 break;
7156 case BUILT_IN_NONLOCAL_GOTO:
7157 target = expand_builtin_nonlocal_goto (exp);
7158 if (target)
7159 return target;
7160 break;
7162 /* This updates the setjmp buffer that is its argument with the value
7163 of the current stack pointer. */
7164 case BUILT_IN_UPDATE_SETJMP_BUF:
7165 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7167 rtx buf_addr
7168 = expand_normal (CALL_EXPR_ARG (exp, 0));
7170 expand_builtin_update_setjmp_buf (buf_addr);
7171 return const0_rtx;
7173 break;
7175 case BUILT_IN_TRAP:
7176 expand_builtin_trap ();
7177 return const0_rtx;
7179 case BUILT_IN_UNREACHABLE:
7180 expand_builtin_unreachable ();
7181 return const0_rtx;
7183 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7184 case BUILT_IN_SIGNBITD32:
7185 case BUILT_IN_SIGNBITD64:
7186 case BUILT_IN_SIGNBITD128:
7187 target = expand_builtin_signbit (exp, target);
7188 if (target)
7189 return target;
7190 break;
7192 /* Various hooks for the DWARF 2 __throw routine. */
7193 case BUILT_IN_UNWIND_INIT:
7194 expand_builtin_unwind_init ();
7195 return const0_rtx;
7196 case BUILT_IN_DWARF_CFA:
7197 return virtual_cfa_rtx;
7198 #ifdef DWARF2_UNWIND_INFO
7199 case BUILT_IN_DWARF_SP_COLUMN:
7200 return expand_builtin_dwarf_sp_column ();
7201 case BUILT_IN_INIT_DWARF_REG_SIZES:
7202 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7203 return const0_rtx;
7204 #endif
7205 case BUILT_IN_FROB_RETURN_ADDR:
7206 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7207 case BUILT_IN_EXTRACT_RETURN_ADDR:
7208 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7209 case BUILT_IN_EH_RETURN:
7210 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7211 CALL_EXPR_ARG (exp, 1));
7212 return const0_rtx;
7213 case BUILT_IN_EH_RETURN_DATA_REGNO:
7214 return expand_builtin_eh_return_data_regno (exp);
7215 case BUILT_IN_EXTEND_POINTER:
7216 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7217 case BUILT_IN_EH_POINTER:
7218 return expand_builtin_eh_pointer (exp);
7219 case BUILT_IN_EH_FILTER:
7220 return expand_builtin_eh_filter (exp);
7221 case BUILT_IN_EH_COPY_VALUES:
7222 return expand_builtin_eh_copy_values (exp);
7224 case BUILT_IN_VA_START:
7225 return expand_builtin_va_start (exp);
7226 case BUILT_IN_VA_END:
7227 return expand_builtin_va_end (exp);
7228 case BUILT_IN_VA_COPY:
7229 return expand_builtin_va_copy (exp);
7230 case BUILT_IN_EXPECT:
7231 return expand_builtin_expect (exp, target);
7232 case BUILT_IN_ASSUME_ALIGNED:
7233 return expand_builtin_assume_aligned (exp, target);
7234 case BUILT_IN_PREFETCH:
7235 expand_builtin_prefetch (exp);
7236 return const0_rtx;
7238 case BUILT_IN_INIT_TRAMPOLINE:
7239 return expand_builtin_init_trampoline (exp, true);
7240 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7241 return expand_builtin_init_trampoline (exp, false);
7242 case BUILT_IN_ADJUST_TRAMPOLINE:
7243 return expand_builtin_adjust_trampoline (exp);
7245 case BUILT_IN_INIT_DESCRIPTOR:
7246 return expand_builtin_init_descriptor (exp);
7247 case BUILT_IN_ADJUST_DESCRIPTOR:
7248 return expand_builtin_adjust_descriptor (exp);
7250 case BUILT_IN_FORK:
7251 case BUILT_IN_EXECL:
7252 case BUILT_IN_EXECV:
7253 case BUILT_IN_EXECLP:
7254 case BUILT_IN_EXECLE:
7255 case BUILT_IN_EXECVP:
7256 case BUILT_IN_EXECVE:
7257 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7258 if (target)
7259 return target;
7260 break;
7262 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7263 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7264 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7265 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7266 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7267 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7268 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7269 if (target)
7270 return target;
7271 break;
7273 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7274 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7275 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7276 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7277 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7278 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7279 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7280 if (target)
7281 return target;
7282 break;
7284 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7285 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7286 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7287 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7288 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7289 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7290 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7291 if (target)
7292 return target;
7293 break;
7295 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7296 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7297 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7298 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7299 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7300 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7301 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7302 if (target)
7303 return target;
7304 break;
7306 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7307 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7308 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7309 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7310 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7311 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7312 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7313 if (target)
7314 return target;
7315 break;
7317 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7318 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7319 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7320 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7321 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7322 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7323 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7324 if (target)
7325 return target;
7326 break;
7328 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7329 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7330 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7331 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7332 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7333 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7334 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7335 if (target)
7336 return target;
7337 break;
7339 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7340 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7341 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7342 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7343 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7344 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7345 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7346 if (target)
7347 return target;
7348 break;
7350 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7351 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7352 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7353 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7354 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7355 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7356 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7357 if (target)
7358 return target;
7359 break;
7361 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7362 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7363 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7364 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7365 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7366 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7367 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7368 if (target)
7369 return target;
7370 break;
7372 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7373 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7374 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7375 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7376 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7377 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7378 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7379 if (target)
7380 return target;
7381 break;
7383 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7384 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7385 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7386 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7387 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7388 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7389 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7390 if (target)
7391 return target;
7392 break;
7394 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7395 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7396 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7397 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7398 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7399 if (mode == VOIDmode)
7400 mode = TYPE_MODE (boolean_type_node);
7401 if (!target || !register_operand (target, mode))
7402 target = gen_reg_rtx (mode);
7404 mode = get_builtin_sync_mode
7405 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7406 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7407 if (target)
7408 return target;
7409 break;
7411 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7412 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7413 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7414 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7415 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7416 mode = get_builtin_sync_mode
7417 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7418 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7419 if (target)
7420 return target;
7421 break;
7423 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7424 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7425 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7426 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7427 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7428 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7429 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7430 if (target)
7431 return target;
7432 break;
7434 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7435 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7436 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7437 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7438 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7439 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7440 expand_builtin_sync_lock_release (mode, exp);
7441 return const0_rtx;
7443 case BUILT_IN_SYNC_SYNCHRONIZE:
7444 expand_builtin_sync_synchronize ();
7445 return const0_rtx;
7447 case BUILT_IN_ATOMIC_EXCHANGE_1:
7448 case BUILT_IN_ATOMIC_EXCHANGE_2:
7449 case BUILT_IN_ATOMIC_EXCHANGE_4:
7450 case BUILT_IN_ATOMIC_EXCHANGE_8:
7451 case BUILT_IN_ATOMIC_EXCHANGE_16:
7452 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7453 target = expand_builtin_atomic_exchange (mode, exp, target);
7454 if (target)
7455 return target;
7456 break;
7458 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7459 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7460 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7461 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7462 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7464 unsigned int nargs, z;
7465 vec<tree, va_gc> *vec;
7467 mode =
7468 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7469 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7470 if (target)
7471 return target;
7473 /* If this is turned into an external library call, the weak parameter
7474 must be dropped to match the expected parameter list. */
7475 nargs = call_expr_nargs (exp);
7476 vec_alloc (vec, nargs - 1);
7477 for (z = 0; z < 3; z++)
7478 vec->quick_push (CALL_EXPR_ARG (exp, z));
7479 /* Skip the boolean weak parameter. */
7480 for (z = 4; z < 6; z++)
7481 vec->quick_push (CALL_EXPR_ARG (exp, z));
7482 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7483 break;
7486 case BUILT_IN_ATOMIC_LOAD_1:
7487 case BUILT_IN_ATOMIC_LOAD_2:
7488 case BUILT_IN_ATOMIC_LOAD_4:
7489 case BUILT_IN_ATOMIC_LOAD_8:
7490 case BUILT_IN_ATOMIC_LOAD_16:
7491 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7492 target = expand_builtin_atomic_load (mode, exp, target);
7493 if (target)
7494 return target;
7495 break;
7497 case BUILT_IN_ATOMIC_STORE_1:
7498 case BUILT_IN_ATOMIC_STORE_2:
7499 case BUILT_IN_ATOMIC_STORE_4:
7500 case BUILT_IN_ATOMIC_STORE_8:
7501 case BUILT_IN_ATOMIC_STORE_16:
7502 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7503 target = expand_builtin_atomic_store (mode, exp);
7504 if (target)
7505 return const0_rtx;
7506 break;
7508 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7509 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7510 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7511 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7512 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7514 enum built_in_function lib;
7515 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7516 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7517 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7518 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7519 ignore, lib);
7520 if (target)
7521 return target;
7522 break;
7524 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7525 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7526 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7527 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7528 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7530 enum built_in_function lib;
7531 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7532 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7533 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7534 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7535 ignore, lib);
7536 if (target)
7537 return target;
7538 break;
7540 case BUILT_IN_ATOMIC_AND_FETCH_1:
7541 case BUILT_IN_ATOMIC_AND_FETCH_2:
7542 case BUILT_IN_ATOMIC_AND_FETCH_4:
7543 case BUILT_IN_ATOMIC_AND_FETCH_8:
7544 case BUILT_IN_ATOMIC_AND_FETCH_16:
7546 enum built_in_function lib;
7547 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7548 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7549 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7550 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7551 ignore, lib);
7552 if (target)
7553 return target;
7554 break;
7556 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7557 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7558 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7559 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7560 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7562 enum built_in_function lib;
7563 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7564 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7565 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7566 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7567 ignore, lib);
7568 if (target)
7569 return target;
7570 break;
7572 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7573 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7574 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7575 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7576 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7578 enum built_in_function lib;
7579 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7580 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7581 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7582 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7583 ignore, lib);
7584 if (target)
7585 return target;
7586 break;
7588 case BUILT_IN_ATOMIC_OR_FETCH_1:
7589 case BUILT_IN_ATOMIC_OR_FETCH_2:
7590 case BUILT_IN_ATOMIC_OR_FETCH_4:
7591 case BUILT_IN_ATOMIC_OR_FETCH_8:
7592 case BUILT_IN_ATOMIC_OR_FETCH_16:
7594 enum built_in_function lib;
7595 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7596 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7597 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7598 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7599 ignore, lib);
7600 if (target)
7601 return target;
7602 break;
7604 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7605 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7606 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7607 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7608 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7609 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7610 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7611 ignore, BUILT_IN_NONE);
7612 if (target)
7613 return target;
7614 break;
7616 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7617 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7618 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7619 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7620 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7621 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7622 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7623 ignore, BUILT_IN_NONE);
7624 if (target)
7625 return target;
7626 break;
7628 case BUILT_IN_ATOMIC_FETCH_AND_1:
7629 case BUILT_IN_ATOMIC_FETCH_AND_2:
7630 case BUILT_IN_ATOMIC_FETCH_AND_4:
7631 case BUILT_IN_ATOMIC_FETCH_AND_8:
7632 case BUILT_IN_ATOMIC_FETCH_AND_16:
7633 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7634 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7635 ignore, BUILT_IN_NONE);
7636 if (target)
7637 return target;
7638 break;
7640 case BUILT_IN_ATOMIC_FETCH_NAND_1:
7641 case BUILT_IN_ATOMIC_FETCH_NAND_2:
7642 case BUILT_IN_ATOMIC_FETCH_NAND_4:
7643 case BUILT_IN_ATOMIC_FETCH_NAND_8:
7644 case BUILT_IN_ATOMIC_FETCH_NAND_16:
7645 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
7646 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
7647 ignore, BUILT_IN_NONE);
7648 if (target)
7649 return target;
7650 break;
7652 case BUILT_IN_ATOMIC_FETCH_XOR_1:
7653 case BUILT_IN_ATOMIC_FETCH_XOR_2:
7654 case BUILT_IN_ATOMIC_FETCH_XOR_4:
7655 case BUILT_IN_ATOMIC_FETCH_XOR_8:
7656 case BUILT_IN_ATOMIC_FETCH_XOR_16:
7657 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
7658 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
7659 ignore, BUILT_IN_NONE);
7660 if (target)
7661 return target;
7662 break;
7664 case BUILT_IN_ATOMIC_FETCH_OR_1:
7665 case BUILT_IN_ATOMIC_FETCH_OR_2:
7666 case BUILT_IN_ATOMIC_FETCH_OR_4:
7667 case BUILT_IN_ATOMIC_FETCH_OR_8:
7668 case BUILT_IN_ATOMIC_FETCH_OR_16:
7669 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
7670 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
7671 ignore, BUILT_IN_NONE);
7672 if (target)
7673 return target;
7674 break;
7676 case BUILT_IN_ATOMIC_TEST_AND_SET:
7677 return expand_builtin_atomic_test_and_set (exp, target);
7679 case BUILT_IN_ATOMIC_CLEAR:
7680 return expand_builtin_atomic_clear (exp);
7682 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
7683 return expand_builtin_atomic_always_lock_free (exp);
7685 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
7686 target = expand_builtin_atomic_is_lock_free (exp);
7687 if (target)
7688 return target;
7689 break;
7691 case BUILT_IN_ATOMIC_THREAD_FENCE:
7692 expand_builtin_atomic_thread_fence (exp);
7693 return const0_rtx;
7695 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
7696 expand_builtin_atomic_signal_fence (exp);
7697 return const0_rtx;
7699 case BUILT_IN_OBJECT_SIZE:
7700 return expand_builtin_object_size (exp);
7702 case BUILT_IN_MEMCPY_CHK:
7703 case BUILT_IN_MEMPCPY_CHK:
7704 case BUILT_IN_MEMMOVE_CHK:
7705 case BUILT_IN_MEMSET_CHK:
7706 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7707 if (target)
7708 return target;
7709 break;
7711 case BUILT_IN_STRCPY_CHK:
7712 case BUILT_IN_STPCPY_CHK:
7713 case BUILT_IN_STRNCPY_CHK:
7714 case BUILT_IN_STPNCPY_CHK:
7715 case BUILT_IN_STRCAT_CHK:
7716 case BUILT_IN_STRNCAT_CHK:
7717 case BUILT_IN_SNPRINTF_CHK:
7718 case BUILT_IN_VSNPRINTF_CHK:
7719 maybe_emit_chk_warning (exp, fcode);
7720 break;
7722 case BUILT_IN_SPRINTF_CHK:
7723 case BUILT_IN_VSPRINTF_CHK:
7724 maybe_emit_sprintf_chk_warning (exp, fcode);
7725 break;
7727 case BUILT_IN_FREE:
7728 if (warn_free_nonheap_object)
7729 maybe_emit_free_warning (exp);
7730 break;
7732 case BUILT_IN_THREAD_POINTER:
7733 return expand_builtin_thread_pointer (exp, target);
7735 case BUILT_IN_SET_THREAD_POINTER:
7736 expand_builtin_set_thread_pointer (exp);
7737 return const0_rtx;
7739 case BUILT_IN_ACC_ON_DEVICE:
7740 /* Do library call, if we failed to expand the builtin when
7741 folding. */
7742 break;
7744 case BUILT_IN_GOACC_PARLEVEL_ID:
7745 case BUILT_IN_GOACC_PARLEVEL_SIZE:
7746 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
7748 default: /* just do library call, if unknown builtin */
7749 break;
7752 /* The switch statement above can drop through to cause the function
7753 to be called normally. */
7754 return expand_call (exp, target, ignore);
7757 /* Determine whether a tree node represents a call to a built-in
7758 function. If the tree T is a call to a built-in function with
7759 the right number of arguments of the appropriate types, return
7760 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7761 Otherwise the return value is END_BUILTINS. */
7763 enum built_in_function
7764 builtin_mathfn_code (const_tree t)
7766 const_tree fndecl, arg, parmlist;
7767 const_tree argtype, parmtype;
7768 const_call_expr_arg_iterator iter;
7770 if (TREE_CODE (t) != CALL_EXPR)
7771 return END_BUILTINS;
7773 fndecl = get_callee_fndecl (t);
7774 if (fndecl == NULL_TREE
7775 || TREE_CODE (fndecl) != FUNCTION_DECL
7776 || ! DECL_BUILT_IN (fndecl)
7777 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7778 return END_BUILTINS;
7780 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7781 init_const_call_expr_arg_iterator (t, &iter);
7782 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7784 /* If a function doesn't take a variable number of arguments,
7785 the last element in the list will have type `void'. */
7786 parmtype = TREE_VALUE (parmlist);
7787 if (VOID_TYPE_P (parmtype))
7789 if (more_const_call_expr_args_p (&iter))
7790 return END_BUILTINS;
7791 return DECL_FUNCTION_CODE (fndecl);
7794 if (! more_const_call_expr_args_p (&iter))
7795 return END_BUILTINS;
7797 arg = next_const_call_expr_arg (&iter);
7798 argtype = TREE_TYPE (arg);
7800 if (SCALAR_FLOAT_TYPE_P (parmtype))
7802 if (! SCALAR_FLOAT_TYPE_P (argtype))
7803 return END_BUILTINS;
7805 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7807 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7808 return END_BUILTINS;
7810 else if (POINTER_TYPE_P (parmtype))
7812 if (! POINTER_TYPE_P (argtype))
7813 return END_BUILTINS;
7815 else if (INTEGRAL_TYPE_P (parmtype))
7817 if (! INTEGRAL_TYPE_P (argtype))
7818 return END_BUILTINS;
7820 else
7821 return END_BUILTINS;
7824 /* Variable-length argument list. */
7825 return DECL_FUNCTION_CODE (fndecl);
7828 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7829 evaluate to a constant. */
7831 static tree
7832 fold_builtin_constant_p (tree arg)
7834 /* We return 1 for a numeric type that's known to be a constant
7835 value at compile-time or for an aggregate type that's a
7836 literal constant. */
7837 STRIP_NOPS (arg);
7839 /* If we know this is a constant, emit the constant of one. */
7840 if (CONSTANT_CLASS_P (arg)
7841 || (TREE_CODE (arg) == CONSTRUCTOR
7842 && TREE_CONSTANT (arg)))
7843 return integer_one_node;
7844 if (TREE_CODE (arg) == ADDR_EXPR)
7846 tree op = TREE_OPERAND (arg, 0);
7847 if (TREE_CODE (op) == STRING_CST
7848 || (TREE_CODE (op) == ARRAY_REF
7849 && integer_zerop (TREE_OPERAND (op, 1))
7850 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7851 return integer_one_node;
7854 /* If this expression has side effects, show we don't know it to be a
7855 constant. Likewise if it's a pointer or aggregate type since in
7856 those case we only want literals, since those are only optimized
7857 when generating RTL, not later.
7858 And finally, if we are compiling an initializer, not code, we
7859 need to return a definite result now; there's not going to be any
7860 more optimization done. */
7861 if (TREE_SIDE_EFFECTS (arg)
7862 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7863 || POINTER_TYPE_P (TREE_TYPE (arg))
7864 || cfun == 0
7865 || folding_initializer
7866 || force_folding_builtin_constant_p)
7867 return integer_zero_node;
7869 return NULL_TREE;
7872 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7873 return it as a truthvalue. */
7875 static tree
7876 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7877 tree predictor)
7879 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7881 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7882 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7883 ret_type = TREE_TYPE (TREE_TYPE (fn));
7884 pred_type = TREE_VALUE (arg_types);
7885 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7887 pred = fold_convert_loc (loc, pred_type, pred);
7888 expected = fold_convert_loc (loc, expected_type, expected);
7889 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7890 predictor);
7892 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7893 build_int_cst (ret_type, 0));
7896 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7897 NULL_TREE if no simplification is possible. */
7899 tree
7900 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7902 tree inner, fndecl, inner_arg0;
7903 enum tree_code code;
7905 /* Distribute the expected value over short-circuiting operators.
7906 See through the cast from truthvalue_type_node to long. */
7907 inner_arg0 = arg0;
7908 while (CONVERT_EXPR_P (inner_arg0)
7909 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7910 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7911 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7913 /* If this is a builtin_expect within a builtin_expect keep the
7914 inner one. See through a comparison against a constant. It
7915 might have been added to create a thruthvalue. */
7916 inner = inner_arg0;
7918 if (COMPARISON_CLASS_P (inner)
7919 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7920 inner = TREE_OPERAND (inner, 0);
7922 if (TREE_CODE (inner) == CALL_EXPR
7923 && (fndecl = get_callee_fndecl (inner))
7924 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7925 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7926 return arg0;
7928 inner = inner_arg0;
7929 code = TREE_CODE (inner);
7930 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7932 tree op0 = TREE_OPERAND (inner, 0);
7933 tree op1 = TREE_OPERAND (inner, 1);
7934 arg1 = save_expr (arg1);
7936 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7937 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7938 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7940 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7943 /* If the argument isn't invariant then there's nothing else we can do. */
7944 if (!TREE_CONSTANT (inner_arg0))
7945 return NULL_TREE;
7947 /* If we expect that a comparison against the argument will fold to
7948 a constant return the constant. In practice, this means a true
7949 constant or the address of a non-weak symbol. */
7950 inner = inner_arg0;
7951 STRIP_NOPS (inner);
7952 if (TREE_CODE (inner) == ADDR_EXPR)
7956 inner = TREE_OPERAND (inner, 0);
7958 while (TREE_CODE (inner) == COMPONENT_REF
7959 || TREE_CODE (inner) == ARRAY_REF);
7960 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
7961 return NULL_TREE;
7964 /* Otherwise, ARG0 already has the proper type for the return value. */
7965 return arg0;
7968 /* Fold a call to __builtin_classify_type with argument ARG. */
7970 static tree
7971 fold_builtin_classify_type (tree arg)
7973 if (arg == 0)
7974 return build_int_cst (integer_type_node, no_type_class);
7976 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7979 /* Fold a call to __builtin_strlen with argument ARG. */
7981 static tree
7982 fold_builtin_strlen (location_t loc, tree type, tree arg)
7984 if (!validate_arg (arg, POINTER_TYPE))
7985 return NULL_TREE;
7986 else
7988 tree len = c_strlen (arg, 0);
7990 if (len)
7991 return fold_convert_loc (loc, type, len);
7993 return NULL_TREE;
7997 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7999 static tree
8000 fold_builtin_inf (location_t loc, tree type, int warn)
8002 REAL_VALUE_TYPE real;
8004 /* __builtin_inff is intended to be usable to define INFINITY on all
8005 targets. If an infinity is not available, INFINITY expands "to a
8006 positive constant of type float that overflows at translation
8007 time", footnote "In this case, using INFINITY will violate the
8008 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8009 Thus we pedwarn to ensure this constraint violation is
8010 diagnosed. */
8011 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8012 pedwarn (loc, 0, "target format does not support infinity");
8014 real_inf (&real);
8015 return build_real (type, real);
8018 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8019 NULL_TREE if no simplification can be made. */
8021 static tree
8022 fold_builtin_sincos (location_t loc,
8023 tree arg0, tree arg1, tree arg2)
8025 tree type;
8026 tree fndecl, call = NULL_TREE;
8028 if (!validate_arg (arg0, REAL_TYPE)
8029 || !validate_arg (arg1, POINTER_TYPE)
8030 || !validate_arg (arg2, POINTER_TYPE))
8031 return NULL_TREE;
8033 type = TREE_TYPE (arg0);
8035 /* Calculate the result when the argument is a constant. */
8036 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8037 if (fn == END_BUILTINS)
8038 return NULL_TREE;
8040 /* Canonicalize sincos to cexpi. */
8041 if (TREE_CODE (arg0) == REAL_CST)
8043 tree complex_type = build_complex_type (type);
8044 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8046 if (!call)
8048 if (!targetm.libc_has_function (function_c99_math_complex)
8049 || !builtin_decl_implicit_p (fn))
8050 return NULL_TREE;
8051 fndecl = builtin_decl_explicit (fn);
8052 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8053 call = builtin_save_expr (call);
8056 tree ptype = build_pointer_type (type);
8057 arg1 = fold_convert (ptype, arg1);
8058 arg2 = fold_convert (ptype, arg2);
8059 return build2 (COMPOUND_EXPR, void_type_node,
8060 build2 (MODIFY_EXPR, void_type_node,
8061 build_fold_indirect_ref_loc (loc, arg1),
8062 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8063 build2 (MODIFY_EXPR, void_type_node,
8064 build_fold_indirect_ref_loc (loc, arg2),
8065 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8068 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8069 Return NULL_TREE if no simplification can be made. */
8071 static tree
8072 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8074 if (!validate_arg (arg1, POINTER_TYPE)
8075 || !validate_arg (arg2, POINTER_TYPE)
8076 || !validate_arg (len, INTEGER_TYPE))
8077 return NULL_TREE;
8079 /* If the LEN parameter is zero, return zero. */
8080 if (integer_zerop (len))
8081 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8082 arg1, arg2);
8084 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8085 if (operand_equal_p (arg1, arg2, 0))
8086 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8088 /* If len parameter is one, return an expression corresponding to
8089 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8090 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8092 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8093 tree cst_uchar_ptr_node
8094 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8096 tree ind1
8097 = fold_convert_loc (loc, integer_type_node,
8098 build1 (INDIRECT_REF, cst_uchar_node,
8099 fold_convert_loc (loc,
8100 cst_uchar_ptr_node,
8101 arg1)));
8102 tree ind2
8103 = fold_convert_loc (loc, integer_type_node,
8104 build1 (INDIRECT_REF, cst_uchar_node,
8105 fold_convert_loc (loc,
8106 cst_uchar_ptr_node,
8107 arg2)));
8108 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8111 return NULL_TREE;
8114 /* Fold a call to builtin isascii with argument ARG. */
8116 static tree
8117 fold_builtin_isascii (location_t loc, tree arg)
8119 if (!validate_arg (arg, INTEGER_TYPE))
8120 return NULL_TREE;
8121 else
8123 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8124 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8125 build_int_cst (integer_type_node,
8126 ~ (unsigned HOST_WIDE_INT) 0x7f));
8127 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8128 arg, integer_zero_node);
8132 /* Fold a call to builtin toascii with argument ARG. */
8134 static tree
8135 fold_builtin_toascii (location_t loc, tree arg)
8137 if (!validate_arg (arg, INTEGER_TYPE))
8138 return NULL_TREE;
8140 /* Transform toascii(c) -> (c & 0x7f). */
8141 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8142 build_int_cst (integer_type_node, 0x7f));
8145 /* Fold a call to builtin isdigit with argument ARG. */
8147 static tree
8148 fold_builtin_isdigit (location_t loc, tree arg)
8150 if (!validate_arg (arg, INTEGER_TYPE))
8151 return NULL_TREE;
8152 else
8154 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8155 /* According to the C standard, isdigit is unaffected by locale.
8156 However, it definitely is affected by the target character set. */
8157 unsigned HOST_WIDE_INT target_digit0
8158 = lang_hooks.to_target_charset ('0');
8160 if (target_digit0 == 0)
8161 return NULL_TREE;
8163 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8164 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8165 build_int_cst (unsigned_type_node, target_digit0));
8166 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8167 build_int_cst (unsigned_type_node, 9));
8171 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8173 static tree
8174 fold_builtin_fabs (location_t loc, tree arg, tree type)
8176 if (!validate_arg (arg, REAL_TYPE))
8177 return NULL_TREE;
8179 arg = fold_convert_loc (loc, type, arg);
8180 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8183 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8185 static tree
8186 fold_builtin_abs (location_t loc, tree arg, tree type)
8188 if (!validate_arg (arg, INTEGER_TYPE))
8189 return NULL_TREE;
8191 arg = fold_convert_loc (loc, type, arg);
8192 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8195 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8197 static tree
8198 fold_builtin_carg (location_t loc, tree arg, tree type)
8200 if (validate_arg (arg, COMPLEX_TYPE)
8201 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8203 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8205 if (atan2_fn)
8207 tree new_arg = builtin_save_expr (arg);
8208 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8209 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8210 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8214 return NULL_TREE;
8217 /* Fold a call to builtin frexp, we can assume the base is 2. */
8219 static tree
8220 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8222 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8223 return NULL_TREE;
8225 STRIP_NOPS (arg0);
8227 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8228 return NULL_TREE;
8230 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8232 /* Proceed if a valid pointer type was passed in. */
8233 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8235 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8236 tree frac, exp;
8238 switch (value->cl)
8240 case rvc_zero:
8241 /* For +-0, return (*exp = 0, +-0). */
8242 exp = integer_zero_node;
8243 frac = arg0;
8244 break;
8245 case rvc_nan:
8246 case rvc_inf:
8247 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8248 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8249 case rvc_normal:
8251 /* Since the frexp function always expects base 2, and in
8252 GCC normalized significands are already in the range
8253 [0.5, 1.0), we have exactly what frexp wants. */
8254 REAL_VALUE_TYPE frac_rvt = *value;
8255 SET_REAL_EXP (&frac_rvt, 0);
8256 frac = build_real (rettype, frac_rvt);
8257 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8259 break;
8260 default:
8261 gcc_unreachable ();
8264 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8265 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8266 TREE_SIDE_EFFECTS (arg1) = 1;
8267 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8270 return NULL_TREE;
8273 /* Fold a call to builtin modf. */
8275 static tree
8276 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8278 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8279 return NULL_TREE;
8281 STRIP_NOPS (arg0);
8283 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8284 return NULL_TREE;
8286 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8288 /* Proceed if a valid pointer type was passed in. */
8289 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8291 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8292 REAL_VALUE_TYPE trunc, frac;
8294 switch (value->cl)
8296 case rvc_nan:
8297 case rvc_zero:
8298 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8299 trunc = frac = *value;
8300 break;
8301 case rvc_inf:
8302 /* For +-Inf, return (*arg1 = arg0, +-0). */
8303 frac = dconst0;
8304 frac.sign = value->sign;
8305 trunc = *value;
8306 break;
8307 case rvc_normal:
8308 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8309 real_trunc (&trunc, VOIDmode, value);
8310 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8311 /* If the original number was negative and already
8312 integral, then the fractional part is -0.0. */
8313 if (value->sign && frac.cl == rvc_zero)
8314 frac.sign = value->sign;
8315 break;
8318 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8319 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8320 build_real (rettype, trunc));
8321 TREE_SIDE_EFFECTS (arg1) = 1;
8322 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8323 build_real (rettype, frac));
8326 return NULL_TREE;
8329 /* Given a location LOC, an interclass builtin function decl FNDECL
8330 and its single argument ARG, return an folded expression computing
8331 the same, or NULL_TREE if we either couldn't or didn't want to fold
8332 (the latter happen if there's an RTL instruction available). */
8334 static tree
8335 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8337 machine_mode mode;
8339 if (!validate_arg (arg, REAL_TYPE))
8340 return NULL_TREE;
8342 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8343 return NULL_TREE;
8345 mode = TYPE_MODE (TREE_TYPE (arg));
8347 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8349 /* If there is no optab, try generic code. */
8350 switch (DECL_FUNCTION_CODE (fndecl))
8352 tree result;
8354 CASE_FLT_FN (BUILT_IN_ISINF):
8356 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8357 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8358 tree type = TREE_TYPE (arg);
8359 REAL_VALUE_TYPE r;
8360 char buf[128];
8362 if (is_ibm_extended)
8364 /* NaN and Inf are encoded in the high-order double value
8365 only. The low-order value is not significant. */
8366 type = double_type_node;
8367 mode = DFmode;
8368 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8370 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8371 real_from_string (&r, buf);
8372 result = build_call_expr (isgr_fn, 2,
8373 fold_build1_loc (loc, ABS_EXPR, type, arg),
8374 build_real (type, r));
8375 return result;
8377 CASE_FLT_FN (BUILT_IN_FINITE):
8378 case BUILT_IN_ISFINITE:
8380 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8381 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8382 tree type = TREE_TYPE (arg);
8383 REAL_VALUE_TYPE r;
8384 char buf[128];
8386 if (is_ibm_extended)
8388 /* NaN and Inf are encoded in the high-order double value
8389 only. The low-order value is not significant. */
8390 type = double_type_node;
8391 mode = DFmode;
8392 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8394 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8395 real_from_string (&r, buf);
8396 result = build_call_expr (isle_fn, 2,
8397 fold_build1_loc (loc, ABS_EXPR, type, arg),
8398 build_real (type, r));
8399 /*result = fold_build2_loc (loc, UNGT_EXPR,
8400 TREE_TYPE (TREE_TYPE (fndecl)),
8401 fold_build1_loc (loc, ABS_EXPR, type, arg),
8402 build_real (type, r));
8403 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8404 TREE_TYPE (TREE_TYPE (fndecl)),
8405 result);*/
8406 return result;
8408 case BUILT_IN_ISNORMAL:
8410 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8411 islessequal(fabs(x),DBL_MAX). */
8412 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8413 tree type = TREE_TYPE (arg);
8414 tree orig_arg, max_exp, min_exp;
8415 machine_mode orig_mode = mode;
8416 REAL_VALUE_TYPE rmax, rmin;
8417 char buf[128];
8419 orig_arg = arg = builtin_save_expr (arg);
8420 if (is_ibm_extended)
8422 /* Use double to test the normal range of IBM extended
8423 precision. Emin for IBM extended precision is
8424 different to emin for IEEE double, being 53 higher
8425 since the low double exponent is at least 53 lower
8426 than the high double exponent. */
8427 type = double_type_node;
8428 mode = DFmode;
8429 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8431 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8433 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8434 real_from_string (&rmax, buf);
8435 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8436 real_from_string (&rmin, buf);
8437 max_exp = build_real (type, rmax);
8438 min_exp = build_real (type, rmin);
8440 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8441 if (is_ibm_extended)
8443 /* Testing the high end of the range is done just using
8444 the high double, using the same test as isfinite().
8445 For the subnormal end of the range we first test the
8446 high double, then if its magnitude is equal to the
8447 limit of 0x1p-969, we test whether the low double is
8448 non-zero and opposite sign to the high double. */
8449 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8450 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8451 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8452 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8453 arg, min_exp);
8454 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8455 complex_double_type_node, orig_arg);
8456 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8457 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8458 tree zero = build_real (type, dconst0);
8459 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8460 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8461 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8462 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8463 fold_build3 (COND_EXPR,
8464 integer_type_node,
8465 hilt, logt, lolt));
8466 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8467 eq_min, ok_lo);
8468 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8469 gt_min, eq_min);
8471 else
8473 tree const isge_fn
8474 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8475 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8477 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8478 max_exp, min_exp);
8479 return result;
8481 default:
8482 break;
8485 return NULL_TREE;
8488 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8489 ARG is the argument for the call. */
8491 static tree
8492 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8494 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8496 if (!validate_arg (arg, REAL_TYPE))
8497 return NULL_TREE;
8499 switch (builtin_index)
8501 case BUILT_IN_ISINF:
8502 if (!HONOR_INFINITIES (arg))
8503 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8505 return NULL_TREE;
8507 case BUILT_IN_ISINF_SIGN:
8509 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8510 /* In a boolean context, GCC will fold the inner COND_EXPR to
8511 1. So e.g. "if (isinf_sign(x))" would be folded to just
8512 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8513 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8514 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8515 tree tmp = NULL_TREE;
8517 arg = builtin_save_expr (arg);
8519 if (signbit_fn && isinf_fn)
8521 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8522 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8524 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8525 signbit_call, integer_zero_node);
8526 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8527 isinf_call, integer_zero_node);
8529 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8530 integer_minus_one_node, integer_one_node);
8531 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8532 isinf_call, tmp,
8533 integer_zero_node);
8536 return tmp;
8539 case BUILT_IN_ISFINITE:
8540 if (!HONOR_NANS (arg)
8541 && !HONOR_INFINITIES (arg))
8542 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8544 return NULL_TREE;
8546 case BUILT_IN_ISNAN:
8547 if (!HONOR_NANS (arg))
8548 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8551 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8552 if (is_ibm_extended)
8554 /* NaN and Inf are encoded in the high-order double value
8555 only. The low-order value is not significant. */
8556 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8559 arg = builtin_save_expr (arg);
8560 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8562 default:
8563 gcc_unreachable ();
8567 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8568 This builtin will generate code to return the appropriate floating
8569 point classification depending on the value of the floating point
8570 number passed in. The possible return values must be supplied as
8571 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8572 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8573 one floating point argument which is "type generic". */
8575 static tree
8576 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8578 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8579 arg, type, res, tmp;
8580 machine_mode mode;
8581 REAL_VALUE_TYPE r;
8582 char buf[128];
8584 /* Verify the required arguments in the original call. */
8585 if (nargs != 6
8586 || !validate_arg (args[0], INTEGER_TYPE)
8587 || !validate_arg (args[1], INTEGER_TYPE)
8588 || !validate_arg (args[2], INTEGER_TYPE)
8589 || !validate_arg (args[3], INTEGER_TYPE)
8590 || !validate_arg (args[4], INTEGER_TYPE)
8591 || !validate_arg (args[5], REAL_TYPE))
8592 return NULL_TREE;
8594 fp_nan = args[0];
8595 fp_infinite = args[1];
8596 fp_normal = args[2];
8597 fp_subnormal = args[3];
8598 fp_zero = args[4];
8599 arg = args[5];
8600 type = TREE_TYPE (arg);
8601 mode = TYPE_MODE (type);
8602 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8604 /* fpclassify(x) ->
8605 isnan(x) ? FP_NAN :
8606 (fabs(x) == Inf ? FP_INFINITE :
8607 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8608 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8610 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8611 build_real (type, dconst0));
8612 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8613 tmp, fp_zero, fp_subnormal);
8615 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8616 real_from_string (&r, buf);
8617 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8618 arg, build_real (type, r));
8619 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8621 if (HONOR_INFINITIES (mode))
8623 real_inf (&r);
8624 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8625 build_real (type, r));
8626 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8627 fp_infinite, res);
8630 if (HONOR_NANS (mode))
8632 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8633 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8636 return res;
8639 /* Fold a call to an unordered comparison function such as
8640 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8641 being called and ARG0 and ARG1 are the arguments for the call.
8642 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8643 the opposite of the desired result. UNORDERED_CODE is used
8644 for modes that can hold NaNs and ORDERED_CODE is used for
8645 the rest. */
8647 static tree
8648 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8649 enum tree_code unordered_code,
8650 enum tree_code ordered_code)
8652 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8653 enum tree_code code;
8654 tree type0, type1;
8655 enum tree_code code0, code1;
8656 tree cmp_type = NULL_TREE;
8658 type0 = TREE_TYPE (arg0);
8659 type1 = TREE_TYPE (arg1);
8661 code0 = TREE_CODE (type0);
8662 code1 = TREE_CODE (type1);
8664 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8665 /* Choose the wider of two real types. */
8666 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8667 ? type0 : type1;
8668 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8669 cmp_type = type0;
8670 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8671 cmp_type = type1;
8673 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8674 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8676 if (unordered_code == UNORDERED_EXPR)
8678 if (!HONOR_NANS (arg0))
8679 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8680 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8683 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8684 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8685 fold_build2_loc (loc, code, type, arg0, arg1));
8688 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8689 arithmetics if it can never overflow, or into internal functions that
8690 return both result of arithmetics and overflowed boolean flag in
8691 a complex integer result, or some other check for overflow.
8692 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8693 checking part of that. */
8695 static tree
8696 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8697 tree arg0, tree arg1, tree arg2)
8699 enum internal_fn ifn = IFN_LAST;
8700 /* The code of the expression corresponding to the type-generic
8701 built-in, or ERROR_MARK for the type-specific ones. */
8702 enum tree_code opcode = ERROR_MARK;
8703 bool ovf_only = false;
8705 switch (fcode)
8707 case BUILT_IN_ADD_OVERFLOW_P:
8708 ovf_only = true;
8709 /* FALLTHRU */
8710 case BUILT_IN_ADD_OVERFLOW:
8711 opcode = PLUS_EXPR;
8712 /* FALLTHRU */
8713 case BUILT_IN_SADD_OVERFLOW:
8714 case BUILT_IN_SADDL_OVERFLOW:
8715 case BUILT_IN_SADDLL_OVERFLOW:
8716 case BUILT_IN_UADD_OVERFLOW:
8717 case BUILT_IN_UADDL_OVERFLOW:
8718 case BUILT_IN_UADDLL_OVERFLOW:
8719 ifn = IFN_ADD_OVERFLOW;
8720 break;
8721 case BUILT_IN_SUB_OVERFLOW_P:
8722 ovf_only = true;
8723 /* FALLTHRU */
8724 case BUILT_IN_SUB_OVERFLOW:
8725 opcode = MINUS_EXPR;
8726 /* FALLTHRU */
8727 case BUILT_IN_SSUB_OVERFLOW:
8728 case BUILT_IN_SSUBL_OVERFLOW:
8729 case BUILT_IN_SSUBLL_OVERFLOW:
8730 case BUILT_IN_USUB_OVERFLOW:
8731 case BUILT_IN_USUBL_OVERFLOW:
8732 case BUILT_IN_USUBLL_OVERFLOW:
8733 ifn = IFN_SUB_OVERFLOW;
8734 break;
8735 case BUILT_IN_MUL_OVERFLOW_P:
8736 ovf_only = true;
8737 /* FALLTHRU */
8738 case BUILT_IN_MUL_OVERFLOW:
8739 opcode = MULT_EXPR;
8740 /* FALLTHRU */
8741 case BUILT_IN_SMUL_OVERFLOW:
8742 case BUILT_IN_SMULL_OVERFLOW:
8743 case BUILT_IN_SMULLL_OVERFLOW:
8744 case BUILT_IN_UMUL_OVERFLOW:
8745 case BUILT_IN_UMULL_OVERFLOW:
8746 case BUILT_IN_UMULLL_OVERFLOW:
8747 ifn = IFN_MUL_OVERFLOW;
8748 break;
8749 default:
8750 gcc_unreachable ();
8753 /* For the "generic" overloads, the first two arguments can have different
8754 types and the last argument determines the target type to use to check
8755 for overflow. The arguments of the other overloads all have the same
8756 type. */
8757 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
8759 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8760 arguments are constant, attempt to fold the built-in call into a constant
8761 expression indicating whether or not it detected an overflow. */
8762 if (ovf_only
8763 && TREE_CODE (arg0) == INTEGER_CST
8764 && TREE_CODE (arg1) == INTEGER_CST)
8765 /* Perform the computation in the target type and check for overflow. */
8766 return omit_one_operand_loc (loc, boolean_type_node,
8767 arith_overflowed_p (opcode, type, arg0, arg1)
8768 ? boolean_true_node : boolean_false_node,
8769 arg2);
8771 tree ctype = build_complex_type (type);
8772 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8773 2, arg0, arg1);
8774 tree tgt = save_expr (call);
8775 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8776 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8777 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8779 if (ovf_only)
8780 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
8782 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8783 tree store
8784 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8785 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8788 /* Fold a call to __builtin_FILE to a constant string. */
8790 static inline tree
8791 fold_builtin_FILE (location_t loc)
8793 if (const char *fname = LOCATION_FILE (loc))
8795 /* The documentation says this builtin is equivalent to the preprocessor
8796 __FILE__ macro so it appears appropriate to use the same file prefix
8797 mappings. */
8798 fname = remap_macro_filename (fname);
8799 return build_string_literal (strlen (fname) + 1, fname);
8802 return build_string_literal (1, "");
8805 /* Fold a call to __builtin_FUNCTION to a constant string. */
8807 static inline tree
8808 fold_builtin_FUNCTION ()
8810 const char *name = "";
8812 if (current_function_decl)
8813 name = lang_hooks.decl_printable_name (current_function_decl, 0);
8815 return build_string_literal (strlen (name) + 1, name);
8818 /* Fold a call to __builtin_LINE to an integer constant. */
8820 static inline tree
8821 fold_builtin_LINE (location_t loc, tree type)
8823 return build_int_cst (type, LOCATION_LINE (loc));
8826 /* Fold a call to built-in function FNDECL with 0 arguments.
8827 This function returns NULL_TREE if no simplification was possible. */
8829 static tree
8830 fold_builtin_0 (location_t loc, tree fndecl)
8832 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8833 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8834 switch (fcode)
8836 case BUILT_IN_FILE:
8837 return fold_builtin_FILE (loc);
8839 case BUILT_IN_FUNCTION:
8840 return fold_builtin_FUNCTION ();
8842 case BUILT_IN_LINE:
8843 return fold_builtin_LINE (loc, type);
8845 CASE_FLT_FN (BUILT_IN_INF):
8846 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
8847 case BUILT_IN_INFD32:
8848 case BUILT_IN_INFD64:
8849 case BUILT_IN_INFD128:
8850 return fold_builtin_inf (loc, type, true);
8852 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8853 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
8854 return fold_builtin_inf (loc, type, false);
8856 case BUILT_IN_CLASSIFY_TYPE:
8857 return fold_builtin_classify_type (NULL_TREE);
8859 default:
8860 break;
8862 return NULL_TREE;
8865 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8866 This function returns NULL_TREE if no simplification was possible. */
8868 static tree
8869 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8871 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8872 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8874 if (TREE_CODE (arg0) == ERROR_MARK)
8875 return NULL_TREE;
8877 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
8878 return ret;
8880 switch (fcode)
8882 case BUILT_IN_CONSTANT_P:
8884 tree val = fold_builtin_constant_p (arg0);
8886 /* Gimplification will pull the CALL_EXPR for the builtin out of
8887 an if condition. When not optimizing, we'll not CSE it back.
8888 To avoid link error types of regressions, return false now. */
8889 if (!val && !optimize)
8890 val = integer_zero_node;
8892 return val;
8895 case BUILT_IN_CLASSIFY_TYPE:
8896 return fold_builtin_classify_type (arg0);
8898 case BUILT_IN_STRLEN:
8899 return fold_builtin_strlen (loc, type, arg0);
8901 CASE_FLT_FN (BUILT_IN_FABS):
8902 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8903 case BUILT_IN_FABSD32:
8904 case BUILT_IN_FABSD64:
8905 case BUILT_IN_FABSD128:
8906 return fold_builtin_fabs (loc, arg0, type);
8908 case BUILT_IN_ABS:
8909 case BUILT_IN_LABS:
8910 case BUILT_IN_LLABS:
8911 case BUILT_IN_IMAXABS:
8912 return fold_builtin_abs (loc, arg0, type);
8914 CASE_FLT_FN (BUILT_IN_CONJ):
8915 if (validate_arg (arg0, COMPLEX_TYPE)
8916 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8917 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8918 break;
8920 CASE_FLT_FN (BUILT_IN_CREAL):
8921 if (validate_arg (arg0, COMPLEX_TYPE)
8922 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8923 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8924 break;
8926 CASE_FLT_FN (BUILT_IN_CIMAG):
8927 if (validate_arg (arg0, COMPLEX_TYPE)
8928 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8929 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8930 break;
8932 CASE_FLT_FN (BUILT_IN_CARG):
8933 return fold_builtin_carg (loc, arg0, type);
8935 case BUILT_IN_ISASCII:
8936 return fold_builtin_isascii (loc, arg0);
8938 case BUILT_IN_TOASCII:
8939 return fold_builtin_toascii (loc, arg0);
8941 case BUILT_IN_ISDIGIT:
8942 return fold_builtin_isdigit (loc, arg0);
8944 CASE_FLT_FN (BUILT_IN_FINITE):
8945 case BUILT_IN_FINITED32:
8946 case BUILT_IN_FINITED64:
8947 case BUILT_IN_FINITED128:
8948 case BUILT_IN_ISFINITE:
8950 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
8951 if (ret)
8952 return ret;
8953 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8956 CASE_FLT_FN (BUILT_IN_ISINF):
8957 case BUILT_IN_ISINFD32:
8958 case BUILT_IN_ISINFD64:
8959 case BUILT_IN_ISINFD128:
8961 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
8962 if (ret)
8963 return ret;
8964 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8967 case BUILT_IN_ISNORMAL:
8968 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8970 case BUILT_IN_ISINF_SIGN:
8971 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
8973 CASE_FLT_FN (BUILT_IN_ISNAN):
8974 case BUILT_IN_ISNAND32:
8975 case BUILT_IN_ISNAND64:
8976 case BUILT_IN_ISNAND128:
8977 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
8979 case BUILT_IN_FREE:
8980 if (integer_zerop (arg0))
8981 return build_empty_stmt (loc);
8982 break;
8984 default:
8985 break;
8988 return NULL_TREE;
8992 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8993 This function returns NULL_TREE if no simplification was possible. */
8995 static tree
8996 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
8998 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8999 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9001 if (TREE_CODE (arg0) == ERROR_MARK
9002 || TREE_CODE (arg1) == ERROR_MARK)
9003 return NULL_TREE;
9005 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9006 return ret;
9008 switch (fcode)
9010 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9011 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9012 if (validate_arg (arg0, REAL_TYPE)
9013 && validate_arg (arg1, POINTER_TYPE))
9014 return do_mpfr_lgamma_r (arg0, arg1, type);
9015 break;
9017 CASE_FLT_FN (BUILT_IN_FREXP):
9018 return fold_builtin_frexp (loc, arg0, arg1, type);
9020 CASE_FLT_FN (BUILT_IN_MODF):
9021 return fold_builtin_modf (loc, arg0, arg1, type);
9023 case BUILT_IN_STRSPN:
9024 return fold_builtin_strspn (loc, arg0, arg1);
9026 case BUILT_IN_STRCSPN:
9027 return fold_builtin_strcspn (loc, arg0, arg1);
9029 case BUILT_IN_STRPBRK:
9030 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9032 case BUILT_IN_EXPECT:
9033 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
9035 case BUILT_IN_ISGREATER:
9036 return fold_builtin_unordered_cmp (loc, fndecl,
9037 arg0, arg1, UNLE_EXPR, LE_EXPR);
9038 case BUILT_IN_ISGREATEREQUAL:
9039 return fold_builtin_unordered_cmp (loc, fndecl,
9040 arg0, arg1, UNLT_EXPR, LT_EXPR);
9041 case BUILT_IN_ISLESS:
9042 return fold_builtin_unordered_cmp (loc, fndecl,
9043 arg0, arg1, UNGE_EXPR, GE_EXPR);
9044 case BUILT_IN_ISLESSEQUAL:
9045 return fold_builtin_unordered_cmp (loc, fndecl,
9046 arg0, arg1, UNGT_EXPR, GT_EXPR);
9047 case BUILT_IN_ISLESSGREATER:
9048 return fold_builtin_unordered_cmp (loc, fndecl,
9049 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9050 case BUILT_IN_ISUNORDERED:
9051 return fold_builtin_unordered_cmp (loc, fndecl,
9052 arg0, arg1, UNORDERED_EXPR,
9053 NOP_EXPR);
9055 /* We do the folding for va_start in the expander. */
9056 case BUILT_IN_VA_START:
9057 break;
9059 case BUILT_IN_OBJECT_SIZE:
9060 return fold_builtin_object_size (arg0, arg1);
9062 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9063 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9065 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9066 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9068 default:
9069 break;
9071 return NULL_TREE;
9074 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9075 and ARG2.
9076 This function returns NULL_TREE if no simplification was possible. */
9078 static tree
9079 fold_builtin_3 (location_t loc, tree fndecl,
9080 tree arg0, tree arg1, tree arg2)
9082 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9083 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9085 if (TREE_CODE (arg0) == ERROR_MARK
9086 || TREE_CODE (arg1) == ERROR_MARK
9087 || TREE_CODE (arg2) == ERROR_MARK)
9088 return NULL_TREE;
9090 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9091 arg0, arg1, arg2))
9092 return ret;
9094 switch (fcode)
9097 CASE_FLT_FN (BUILT_IN_SINCOS):
9098 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9100 CASE_FLT_FN (BUILT_IN_REMQUO):
9101 if (validate_arg (arg0, REAL_TYPE)
9102 && validate_arg (arg1, REAL_TYPE)
9103 && validate_arg (arg2, POINTER_TYPE))
9104 return do_mpfr_remquo (arg0, arg1, arg2);
9105 break;
9107 case BUILT_IN_MEMCMP:
9108 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9110 case BUILT_IN_EXPECT:
9111 return fold_builtin_expect (loc, arg0, arg1, arg2);
9113 case BUILT_IN_ADD_OVERFLOW:
9114 case BUILT_IN_SUB_OVERFLOW:
9115 case BUILT_IN_MUL_OVERFLOW:
9116 case BUILT_IN_ADD_OVERFLOW_P:
9117 case BUILT_IN_SUB_OVERFLOW_P:
9118 case BUILT_IN_MUL_OVERFLOW_P:
9119 case BUILT_IN_SADD_OVERFLOW:
9120 case BUILT_IN_SADDL_OVERFLOW:
9121 case BUILT_IN_SADDLL_OVERFLOW:
9122 case BUILT_IN_SSUB_OVERFLOW:
9123 case BUILT_IN_SSUBL_OVERFLOW:
9124 case BUILT_IN_SSUBLL_OVERFLOW:
9125 case BUILT_IN_SMUL_OVERFLOW:
9126 case BUILT_IN_SMULL_OVERFLOW:
9127 case BUILT_IN_SMULLL_OVERFLOW:
9128 case BUILT_IN_UADD_OVERFLOW:
9129 case BUILT_IN_UADDL_OVERFLOW:
9130 case BUILT_IN_UADDLL_OVERFLOW:
9131 case BUILT_IN_USUB_OVERFLOW:
9132 case BUILT_IN_USUBL_OVERFLOW:
9133 case BUILT_IN_USUBLL_OVERFLOW:
9134 case BUILT_IN_UMUL_OVERFLOW:
9135 case BUILT_IN_UMULL_OVERFLOW:
9136 case BUILT_IN_UMULLL_OVERFLOW:
9137 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9139 default:
9140 break;
9142 return NULL_TREE;
9145 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9146 arguments. IGNORE is true if the result of the
9147 function call is ignored. This function returns NULL_TREE if no
9148 simplification was possible. */
9150 tree
9151 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9153 tree ret = NULL_TREE;
9155 switch (nargs)
9157 case 0:
9158 ret = fold_builtin_0 (loc, fndecl);
9159 break;
9160 case 1:
9161 ret = fold_builtin_1 (loc, fndecl, args[0]);
9162 break;
9163 case 2:
9164 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9165 break;
9166 case 3:
9167 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9168 break;
9169 default:
9170 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9171 break;
9173 if (ret)
9175 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9176 SET_EXPR_LOCATION (ret, loc);
9177 TREE_NO_WARNING (ret) = 1;
9178 return ret;
9180 return NULL_TREE;
9183 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9184 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9185 of arguments in ARGS to be omitted. OLDNARGS is the number of
9186 elements in ARGS. */
9188 static tree
9189 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9190 int skip, tree fndecl, int n, va_list newargs)
9192 int nargs = oldnargs - skip + n;
9193 tree *buffer;
9195 if (n > 0)
9197 int i, j;
9199 buffer = XALLOCAVEC (tree, nargs);
9200 for (i = 0; i < n; i++)
9201 buffer[i] = va_arg (newargs, tree);
9202 for (j = skip; j < oldnargs; j++, i++)
9203 buffer[i] = args[j];
9205 else
9206 buffer = args + skip;
9208 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9211 /* Return true if FNDECL shouldn't be folded right now.
9212 If a built-in function has an inline attribute always_inline
9213 wrapper, defer folding it after always_inline functions have
9214 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9215 might not be performed. */
9217 bool
9218 avoid_folding_inline_builtin (tree fndecl)
9220 return (DECL_DECLARED_INLINE_P (fndecl)
9221 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9222 && cfun
9223 && !cfun->always_inline_functions_inlined
9224 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9227 /* A wrapper function for builtin folding that prevents warnings for
9228 "statement without effect" and the like, caused by removing the
9229 call node earlier than the warning is generated. */
9231 tree
9232 fold_call_expr (location_t loc, tree exp, bool ignore)
9234 tree ret = NULL_TREE;
9235 tree fndecl = get_callee_fndecl (exp);
9236 if (fndecl
9237 && TREE_CODE (fndecl) == FUNCTION_DECL
9238 && DECL_BUILT_IN (fndecl)
9239 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9240 yet. Defer folding until we see all the arguments
9241 (after inlining). */
9242 && !CALL_EXPR_VA_ARG_PACK (exp))
9244 int nargs = call_expr_nargs (exp);
9246 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9247 instead last argument is __builtin_va_arg_pack (). Defer folding
9248 even in that case, until arguments are finalized. */
9249 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9251 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9252 if (fndecl2
9253 && TREE_CODE (fndecl2) == FUNCTION_DECL
9254 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9255 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9256 return NULL_TREE;
9259 if (avoid_folding_inline_builtin (fndecl))
9260 return NULL_TREE;
9262 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9263 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9264 CALL_EXPR_ARGP (exp), ignore);
9265 else
9267 tree *args = CALL_EXPR_ARGP (exp);
9268 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9269 if (ret)
9270 return ret;
9273 return NULL_TREE;
9276 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9277 N arguments are passed in the array ARGARRAY. Return a folded
9278 expression or NULL_TREE if no simplification was possible. */
9280 tree
9281 fold_builtin_call_array (location_t loc, tree,
9282 tree fn,
9283 int n,
9284 tree *argarray)
9286 if (TREE_CODE (fn) != ADDR_EXPR)
9287 return NULL_TREE;
9289 tree fndecl = TREE_OPERAND (fn, 0);
9290 if (TREE_CODE (fndecl) == FUNCTION_DECL
9291 && DECL_BUILT_IN (fndecl))
9293 /* If last argument is __builtin_va_arg_pack (), arguments to this
9294 function are not finalized yet. Defer folding until they are. */
9295 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9297 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9298 if (fndecl2
9299 && TREE_CODE (fndecl2) == FUNCTION_DECL
9300 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9301 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9302 return NULL_TREE;
9304 if (avoid_folding_inline_builtin (fndecl))
9305 return NULL_TREE;
9306 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9307 return targetm.fold_builtin (fndecl, n, argarray, false);
9308 else
9309 return fold_builtin_n (loc, fndecl, argarray, n, false);
9312 return NULL_TREE;
9315 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9316 along with N new arguments specified as the "..." parameters. SKIP
9317 is the number of arguments in EXP to be omitted. This function is used
9318 to do varargs-to-varargs transformations. */
9320 static tree
9321 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9323 va_list ap;
9324 tree t;
9326 va_start (ap, n);
9327 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9328 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9329 va_end (ap);
9331 return t;
9334 /* Validate a single argument ARG against a tree code CODE representing
9335 a type. Return true when argument is valid. */
9337 static bool
9338 validate_arg (const_tree arg, enum tree_code code)
9340 if (!arg)
9341 return false;
9342 else if (code == POINTER_TYPE)
9343 return POINTER_TYPE_P (TREE_TYPE (arg));
9344 else if (code == INTEGER_TYPE)
9345 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9346 return code == TREE_CODE (TREE_TYPE (arg));
9349 /* This function validates the types of a function call argument list
9350 against a specified list of tree_codes. If the last specifier is a 0,
9351 that represents an ellipses, otherwise the last specifier must be a
9352 VOID_TYPE.
9354 This is the GIMPLE version of validate_arglist. Eventually we want to
9355 completely convert builtins.c to work from GIMPLEs and the tree based
9356 validate_arglist will then be removed. */
9358 bool
9359 validate_gimple_arglist (const gcall *call, ...)
9361 enum tree_code code;
9362 bool res = 0;
9363 va_list ap;
9364 const_tree arg;
9365 size_t i;
9367 va_start (ap, call);
9368 i = 0;
9372 code = (enum tree_code) va_arg (ap, int);
9373 switch (code)
9375 case 0:
9376 /* This signifies an ellipses, any further arguments are all ok. */
9377 res = true;
9378 goto end;
9379 case VOID_TYPE:
9380 /* This signifies an endlink, if no arguments remain, return
9381 true, otherwise return false. */
9382 res = (i == gimple_call_num_args (call));
9383 goto end;
9384 default:
9385 /* If no parameters remain or the parameter's code does not
9386 match the specified code, return false. Otherwise continue
9387 checking any remaining arguments. */
9388 arg = gimple_call_arg (call, i++);
9389 if (!validate_arg (arg, code))
9390 goto end;
9391 break;
9394 while (1);
9396 /* We need gotos here since we can only have one VA_CLOSE in a
9397 function. */
9398 end: ;
9399 va_end (ap);
9401 return res;
9404 /* Default target-specific builtin expander that does nothing. */
9407 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9408 rtx target ATTRIBUTE_UNUSED,
9409 rtx subtarget ATTRIBUTE_UNUSED,
9410 machine_mode mode ATTRIBUTE_UNUSED,
9411 int ignore ATTRIBUTE_UNUSED)
9413 return NULL_RTX;
9416 /* Returns true is EXP represents data that would potentially reside
9417 in a readonly section. */
9419 bool
9420 readonly_data_expr (tree exp)
9422 STRIP_NOPS (exp);
9424 if (TREE_CODE (exp) != ADDR_EXPR)
9425 return false;
9427 exp = get_base_address (TREE_OPERAND (exp, 0));
9428 if (!exp)
9429 return false;
9431 /* Make sure we call decl_readonly_section only for trees it
9432 can handle (since it returns true for everything it doesn't
9433 understand). */
9434 if (TREE_CODE (exp) == STRING_CST
9435 || TREE_CODE (exp) == CONSTRUCTOR
9436 || (VAR_P (exp) && TREE_STATIC (exp)))
9437 return decl_readonly_section (exp, 0);
9438 else
9439 return false;
9442 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9443 to the call, and TYPE is its return type.
9445 Return NULL_TREE if no simplification was possible, otherwise return the
9446 simplified form of the call as a tree.
9448 The simplified form may be a constant or other expression which
9449 computes the same value, but in a more efficient manner (including
9450 calls to other builtin functions).
9452 The call may contain arguments which need to be evaluated, but
9453 which are not useful to determine the result of the call. In
9454 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9455 COMPOUND_EXPR will be an argument which must be evaluated.
9456 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9457 COMPOUND_EXPR in the chain will contain the tree for the simplified
9458 form of the builtin function call. */
9460 static tree
9461 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9463 if (!validate_arg (s1, POINTER_TYPE)
9464 || !validate_arg (s2, POINTER_TYPE))
9465 return NULL_TREE;
9466 else
9468 tree fn;
9469 const char *p1, *p2;
9471 p2 = c_getstr (s2);
9472 if (p2 == NULL)
9473 return NULL_TREE;
9475 p1 = c_getstr (s1);
9476 if (p1 != NULL)
9478 const char *r = strpbrk (p1, p2);
9479 tree tem;
9481 if (r == NULL)
9482 return build_int_cst (TREE_TYPE (s1), 0);
9484 /* Return an offset into the constant string argument. */
9485 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9486 return fold_convert_loc (loc, type, tem);
9489 if (p2[0] == '\0')
9490 /* strpbrk(x, "") == NULL.
9491 Evaluate and ignore s1 in case it had side-effects. */
9492 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
9494 if (p2[1] != '\0')
9495 return NULL_TREE; /* Really call strpbrk. */
9497 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9498 if (!fn)
9499 return NULL_TREE;
9501 /* New argument list transforming strpbrk(s1, s2) to
9502 strchr(s1, s2[0]). */
9503 return build_call_expr_loc (loc, fn, 2, s1,
9504 build_int_cst (integer_type_node, p2[0]));
9508 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9509 to the call.
9511 Return NULL_TREE if no simplification was possible, otherwise return the
9512 simplified form of the call as a tree.
9514 The simplified form may be a constant or other expression which
9515 computes the same value, but in a more efficient manner (including
9516 calls to other builtin functions).
9518 The call may contain arguments which need to be evaluated, but
9519 which are not useful to determine the result of the call. In
9520 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9521 COMPOUND_EXPR will be an argument which must be evaluated.
9522 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9523 COMPOUND_EXPR in the chain will contain the tree for the simplified
9524 form of the builtin function call. */
9526 static tree
9527 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9529 if (!validate_arg (s1, POINTER_TYPE)
9530 || !validate_arg (s2, POINTER_TYPE))
9531 return NULL_TREE;
9532 else
9534 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9536 /* If either argument is "", return NULL_TREE. */
9537 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9538 /* Evaluate and ignore both arguments in case either one has
9539 side-effects. */
9540 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9541 s1, s2);
9542 return NULL_TREE;
9546 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9547 to the call.
9549 Return NULL_TREE if no simplification was possible, otherwise return the
9550 simplified form of the call as a tree.
9552 The simplified form may be a constant or other expression which
9553 computes the same value, but in a more efficient manner (including
9554 calls to other builtin functions).
9556 The call may contain arguments which need to be evaluated, but
9557 which are not useful to determine the result of the call. In
9558 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9559 COMPOUND_EXPR will be an argument which must be evaluated.
9560 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9561 COMPOUND_EXPR in the chain will contain the tree for the simplified
9562 form of the builtin function call. */
9564 static tree
9565 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9567 if (!validate_arg (s1, POINTER_TYPE)
9568 || !validate_arg (s2, POINTER_TYPE))
9569 return NULL_TREE;
9570 else
9572 /* If the first argument is "", return NULL_TREE. */
9573 const char *p1 = c_getstr (s1);
9574 if (p1 && *p1 == '\0')
9576 /* Evaluate and ignore argument s2 in case it has
9577 side-effects. */
9578 return omit_one_operand_loc (loc, size_type_node,
9579 size_zero_node, s2);
9582 /* If the second argument is "", return __builtin_strlen(s1). */
9583 const char *p2 = c_getstr (s2);
9584 if (p2 && *p2 == '\0')
9586 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9588 /* If the replacement _DECL isn't initialized, don't do the
9589 transformation. */
9590 if (!fn)
9591 return NULL_TREE;
9593 return build_call_expr_loc (loc, fn, 1, s1);
9595 return NULL_TREE;
9599 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9600 produced. False otherwise. This is done so that we don't output the error
9601 or warning twice or three times. */
9603 bool
9604 fold_builtin_next_arg (tree exp, bool va_start_p)
9606 tree fntype = TREE_TYPE (current_function_decl);
9607 int nargs = call_expr_nargs (exp);
9608 tree arg;
9609 /* There is good chance the current input_location points inside the
9610 definition of the va_start macro (perhaps on the token for
9611 builtin) in a system header, so warnings will not be emitted.
9612 Use the location in real source code. */
9613 source_location current_location =
9614 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9615 NULL);
9617 if (!stdarg_p (fntype))
9619 error ("%<va_start%> used in function with fixed args");
9620 return true;
9623 if (va_start_p)
9625 if (va_start_p && (nargs != 2))
9627 error ("wrong number of arguments to function %<va_start%>");
9628 return true;
9630 arg = CALL_EXPR_ARG (exp, 1);
9632 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9633 when we checked the arguments and if needed issued a warning. */
9634 else
9636 if (nargs == 0)
9638 /* Evidently an out of date version of <stdarg.h>; can't validate
9639 va_start's second argument, but can still work as intended. */
9640 warning_at (current_location,
9641 OPT_Wvarargs,
9642 "%<__builtin_next_arg%> called without an argument");
9643 return true;
9645 else if (nargs > 1)
9647 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9648 return true;
9650 arg = CALL_EXPR_ARG (exp, 0);
9653 if (TREE_CODE (arg) == SSA_NAME)
9654 arg = SSA_NAME_VAR (arg);
9656 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9657 or __builtin_next_arg (0) the first time we see it, after checking
9658 the arguments and if needed issuing a warning. */
9659 if (!integer_zerop (arg))
9661 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9663 /* Strip off all nops for the sake of the comparison. This
9664 is not quite the same as STRIP_NOPS. It does more.
9665 We must also strip off INDIRECT_EXPR for C++ reference
9666 parameters. */
9667 while (CONVERT_EXPR_P (arg)
9668 || TREE_CODE (arg) == INDIRECT_REF)
9669 arg = TREE_OPERAND (arg, 0);
9670 if (arg != last_parm)
9672 /* FIXME: Sometimes with the tree optimizers we can get the
9673 not the last argument even though the user used the last
9674 argument. We just warn and set the arg to be the last
9675 argument so that we will get wrong-code because of
9676 it. */
9677 warning_at (current_location,
9678 OPT_Wvarargs,
9679 "second parameter of %<va_start%> not last named argument");
9682 /* Undefined by C99 7.15.1.4p4 (va_start):
9683 "If the parameter parmN is declared with the register storage
9684 class, with a function or array type, or with a type that is
9685 not compatible with the type that results after application of
9686 the default argument promotions, the behavior is undefined."
9688 else if (DECL_REGISTER (arg))
9690 warning_at (current_location,
9691 OPT_Wvarargs,
9692 "undefined behavior when second parameter of "
9693 "%<va_start%> is declared with %<register%> storage");
9696 /* We want to verify the second parameter just once before the tree
9697 optimizers are run and then avoid keeping it in the tree,
9698 as otherwise we could warn even for correct code like:
9699 void foo (int i, ...)
9700 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9701 if (va_start_p)
9702 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9703 else
9704 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9706 return false;
9710 /* Expand a call EXP to __builtin_object_size. */
9712 static rtx
9713 expand_builtin_object_size (tree exp)
9715 tree ost;
9716 int object_size_type;
9717 tree fndecl = get_callee_fndecl (exp);
9719 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9721 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
9722 exp, fndecl);
9723 expand_builtin_trap ();
9724 return const0_rtx;
9727 ost = CALL_EXPR_ARG (exp, 1);
9728 STRIP_NOPS (ost);
9730 if (TREE_CODE (ost) != INTEGER_CST
9731 || tree_int_cst_sgn (ost) < 0
9732 || compare_tree_int (ost, 3) > 0)
9734 error ("%Klast argument of %qD is not integer constant between 0 and 3",
9735 exp, fndecl);
9736 expand_builtin_trap ();
9737 return const0_rtx;
9740 object_size_type = tree_to_shwi (ost);
9742 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9745 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9746 FCODE is the BUILT_IN_* to use.
9747 Return NULL_RTX if we failed; the caller should emit a normal call,
9748 otherwise try to get the result in TARGET, if convenient (and in
9749 mode MODE if that's convenient). */
9751 static rtx
9752 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9753 enum built_in_function fcode)
9755 if (!validate_arglist (exp,
9756 POINTER_TYPE,
9757 fcode == BUILT_IN_MEMSET_CHK
9758 ? INTEGER_TYPE : POINTER_TYPE,
9759 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9760 return NULL_RTX;
9762 tree dest = CALL_EXPR_ARG (exp, 0);
9763 tree src = CALL_EXPR_ARG (exp, 1);
9764 tree len = CALL_EXPR_ARG (exp, 2);
9765 tree size = CALL_EXPR_ARG (exp, 3);
9767 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
9768 /*str=*/NULL_TREE, size);
9770 if (!tree_fits_uhwi_p (size))
9771 return NULL_RTX;
9773 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
9775 /* Avoid transforming the checking call to an ordinary one when
9776 an overflow has been detected or when the call couldn't be
9777 validated because the size is not constant. */
9778 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
9779 return NULL_RTX;
9781 tree fn = NULL_TREE;
9782 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9783 mem{cpy,pcpy,move,set} is available. */
9784 switch (fcode)
9786 case BUILT_IN_MEMCPY_CHK:
9787 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
9788 break;
9789 case BUILT_IN_MEMPCPY_CHK:
9790 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
9791 break;
9792 case BUILT_IN_MEMMOVE_CHK:
9793 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
9794 break;
9795 case BUILT_IN_MEMSET_CHK:
9796 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
9797 break;
9798 default:
9799 break;
9802 if (! fn)
9803 return NULL_RTX;
9805 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
9806 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9807 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9808 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9810 else if (fcode == BUILT_IN_MEMSET_CHK)
9811 return NULL_RTX;
9812 else
9814 unsigned int dest_align = get_pointer_alignment (dest);
9816 /* If DEST is not a pointer type, call the normal function. */
9817 if (dest_align == 0)
9818 return NULL_RTX;
9820 /* If SRC and DEST are the same (and not volatile), do nothing. */
9821 if (operand_equal_p (src, dest, 0))
9823 tree expr;
9825 if (fcode != BUILT_IN_MEMPCPY_CHK)
9827 /* Evaluate and ignore LEN in case it has side-effects. */
9828 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9829 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9832 expr = fold_build_pointer_plus (dest, len);
9833 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9836 /* __memmove_chk special case. */
9837 if (fcode == BUILT_IN_MEMMOVE_CHK)
9839 unsigned int src_align = get_pointer_alignment (src);
9841 if (src_align == 0)
9842 return NULL_RTX;
9844 /* If src is categorized for a readonly section we can use
9845 normal __memcpy_chk. */
9846 if (readonly_data_expr (src))
9848 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
9849 if (!fn)
9850 return NULL_RTX;
9851 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9852 dest, src, len, size);
9853 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9854 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9855 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9858 return NULL_RTX;
9862 /* Emit warning if a buffer overflow is detected at compile time. */
9864 static void
9865 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9867 /* The source string. */
9868 tree srcstr = NULL_TREE;
9869 /* The size of the destination object. */
9870 tree objsize = NULL_TREE;
9871 /* The string that is being concatenated with (as in __strcat_chk)
9872 or null if it isn't. */
9873 tree catstr = NULL_TREE;
9874 /* The maximum length of the source sequence in a bounded operation
9875 (such as __strncat_chk) or null if the operation isn't bounded
9876 (such as __strcat_chk). */
9877 tree maxread = NULL_TREE;
9878 /* The exact size of the access (such as in __strncpy_chk). */
9879 tree size = NULL_TREE;
9881 switch (fcode)
9883 case BUILT_IN_STRCPY_CHK:
9884 case BUILT_IN_STPCPY_CHK:
9885 srcstr = CALL_EXPR_ARG (exp, 1);
9886 objsize = CALL_EXPR_ARG (exp, 2);
9887 break;
9889 case BUILT_IN_STRCAT_CHK:
9890 /* For __strcat_chk the warning will be emitted only if overflowing
9891 by at least strlen (dest) + 1 bytes. */
9892 catstr = CALL_EXPR_ARG (exp, 0);
9893 srcstr = CALL_EXPR_ARG (exp, 1);
9894 objsize = CALL_EXPR_ARG (exp, 2);
9895 break;
9897 case BUILT_IN_STRNCAT_CHK:
9898 catstr = CALL_EXPR_ARG (exp, 0);
9899 srcstr = CALL_EXPR_ARG (exp, 1);
9900 maxread = CALL_EXPR_ARG (exp, 2);
9901 objsize = CALL_EXPR_ARG (exp, 3);
9902 break;
9904 case BUILT_IN_STRNCPY_CHK:
9905 case BUILT_IN_STPNCPY_CHK:
9906 srcstr = CALL_EXPR_ARG (exp, 1);
9907 size = CALL_EXPR_ARG (exp, 2);
9908 objsize = CALL_EXPR_ARG (exp, 3);
9909 break;
9911 case BUILT_IN_SNPRINTF_CHK:
9912 case BUILT_IN_VSNPRINTF_CHK:
9913 maxread = CALL_EXPR_ARG (exp, 1);
9914 objsize = CALL_EXPR_ARG (exp, 3);
9915 break;
9916 default:
9917 gcc_unreachable ();
9920 if (catstr && maxread)
9922 /* Check __strncat_chk. There is no way to determine the length
9923 of the string to which the source string is being appended so
9924 just warn when the length of the source string is not known. */
9925 check_strncat_sizes (exp, objsize);
9926 return;
9929 /* The destination argument is the first one for all built-ins above. */
9930 tree dst = CALL_EXPR_ARG (exp, 0);
9932 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
9935 /* Emit warning if a buffer overflow is detected at compile time
9936 in __sprintf_chk/__vsprintf_chk calls. */
9938 static void
9939 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
9941 tree size, len, fmt;
9942 const char *fmt_str;
9943 int nargs = call_expr_nargs (exp);
9945 /* Verify the required arguments in the original call. */
9947 if (nargs < 4)
9948 return;
9949 size = CALL_EXPR_ARG (exp, 2);
9950 fmt = CALL_EXPR_ARG (exp, 3);
9952 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9953 return;
9955 /* Check whether the format is a literal string constant. */
9956 fmt_str = c_getstr (fmt);
9957 if (fmt_str == NULL)
9958 return;
9960 if (!init_target_chars ())
9961 return;
9963 /* If the format doesn't contain % args or %%, we know its size. */
9964 if (strchr (fmt_str, target_percent) == 0)
9965 len = build_int_cstu (size_type_node, strlen (fmt_str));
9966 /* If the format is "%s" and first ... argument is a string literal,
9967 we know it too. */
9968 else if (fcode == BUILT_IN_SPRINTF_CHK
9969 && strcmp (fmt_str, target_percent_s) == 0)
9971 tree arg;
9973 if (nargs < 5)
9974 return;
9975 arg = CALL_EXPR_ARG (exp, 4);
9976 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
9977 return;
9979 len = c_strlen (arg, 1);
9980 if (!len || ! tree_fits_uhwi_p (len))
9981 return;
9983 else
9984 return;
9986 /* Add one for the terminating nul. */
9987 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
9989 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
9990 /*maxread=*/NULL_TREE, len, size);
9993 /* Emit warning if a free is called with address of a variable. */
9995 static void
9996 maybe_emit_free_warning (tree exp)
9998 tree arg = CALL_EXPR_ARG (exp, 0);
10000 STRIP_NOPS (arg);
10001 if (TREE_CODE (arg) != ADDR_EXPR)
10002 return;
10004 arg = get_base_address (TREE_OPERAND (arg, 0));
10005 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
10006 return;
10008 if (SSA_VAR_P (arg))
10009 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10010 "%Kattempt to free a non-heap object %qD", exp, arg);
10011 else
10012 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10013 "%Kattempt to free a non-heap object", exp);
10016 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10017 if possible. */
10019 static tree
10020 fold_builtin_object_size (tree ptr, tree ost)
10022 unsigned HOST_WIDE_INT bytes;
10023 int object_size_type;
10025 if (!validate_arg (ptr, POINTER_TYPE)
10026 || !validate_arg (ost, INTEGER_TYPE))
10027 return NULL_TREE;
10029 STRIP_NOPS (ost);
10031 if (TREE_CODE (ost) != INTEGER_CST
10032 || tree_int_cst_sgn (ost) < 0
10033 || compare_tree_int (ost, 3) > 0)
10034 return NULL_TREE;
10036 object_size_type = tree_to_shwi (ost);
10038 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10039 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10040 and (size_t) 0 for types 2 and 3. */
10041 if (TREE_SIDE_EFFECTS (ptr))
10042 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10044 if (TREE_CODE (ptr) == ADDR_EXPR)
10046 compute_builtin_object_size (ptr, object_size_type, &bytes);
10047 if (wi::fits_to_tree_p (bytes, size_type_node))
10048 return build_int_cstu (size_type_node, bytes);
10050 else if (TREE_CODE (ptr) == SSA_NAME)
10052 /* If object size is not known yet, delay folding until
10053 later. Maybe subsequent passes will help determining
10054 it. */
10055 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10056 && wi::fits_to_tree_p (bytes, size_type_node))
10057 return build_int_cstu (size_type_node, bytes);
10060 return NULL_TREE;
10063 /* Builtins with folding operations that operate on "..." arguments
10064 need special handling; we need to store the arguments in a convenient
10065 data structure before attempting any folding. Fortunately there are
10066 only a few builtins that fall into this category. FNDECL is the
10067 function, EXP is the CALL_EXPR for the call. */
10069 static tree
10070 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10072 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10073 tree ret = NULL_TREE;
10075 switch (fcode)
10077 case BUILT_IN_FPCLASSIFY:
10078 ret = fold_builtin_fpclassify (loc, args, nargs);
10079 break;
10081 default:
10082 break;
10084 if (ret)
10086 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10087 SET_EXPR_LOCATION (ret, loc);
10088 TREE_NO_WARNING (ret) = 1;
10089 return ret;
10091 return NULL_TREE;
10094 /* Initialize format string characters in the target charset. */
10096 bool
10097 init_target_chars (void)
10099 static bool init;
10100 if (!init)
10102 target_newline = lang_hooks.to_target_charset ('\n');
10103 target_percent = lang_hooks.to_target_charset ('%');
10104 target_c = lang_hooks.to_target_charset ('c');
10105 target_s = lang_hooks.to_target_charset ('s');
10106 if (target_newline == 0 || target_percent == 0 || target_c == 0
10107 || target_s == 0)
10108 return false;
10110 target_percent_c[0] = target_percent;
10111 target_percent_c[1] = target_c;
10112 target_percent_c[2] = '\0';
10114 target_percent_s[0] = target_percent;
10115 target_percent_s[1] = target_s;
10116 target_percent_s[2] = '\0';
10118 target_percent_s_newline[0] = target_percent;
10119 target_percent_s_newline[1] = target_s;
10120 target_percent_s_newline[2] = target_newline;
10121 target_percent_s_newline[3] = '\0';
10123 init = true;
10125 return true;
10128 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10129 and no overflow/underflow occurred. INEXACT is true if M was not
10130 exactly calculated. TYPE is the tree type for the result. This
10131 function assumes that you cleared the MPFR flags and then
10132 calculated M to see if anything subsequently set a flag prior to
10133 entering this function. Return NULL_TREE if any checks fail. */
10135 static tree
10136 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10138 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10139 overflow/underflow occurred. If -frounding-math, proceed iff the
10140 result of calling FUNC was exact. */
10141 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10142 && (!flag_rounding_math || !inexact))
10144 REAL_VALUE_TYPE rr;
10146 real_from_mpfr (&rr, m, type, GMP_RNDN);
10147 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10148 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10149 but the mpft_t is not, then we underflowed in the
10150 conversion. */
10151 if (real_isfinite (&rr)
10152 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10154 REAL_VALUE_TYPE rmode;
10156 real_convert (&rmode, TYPE_MODE (type), &rr);
10157 /* Proceed iff the specified mode can hold the value. */
10158 if (real_identical (&rmode, &rr))
10159 return build_real (type, rmode);
10162 return NULL_TREE;
10165 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10166 number and no overflow/underflow occurred. INEXACT is true if M
10167 was not exactly calculated. TYPE is the tree type for the result.
10168 This function assumes that you cleared the MPFR flags and then
10169 calculated M to see if anything subsequently set a flag prior to
10170 entering this function. Return NULL_TREE if any checks fail, if
10171 FORCE_CONVERT is true, then bypass the checks. */
10173 static tree
10174 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10176 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10177 overflow/underflow occurred. If -frounding-math, proceed iff the
10178 result of calling FUNC was exact. */
10179 if (force_convert
10180 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10181 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10182 && (!flag_rounding_math || !inexact)))
10184 REAL_VALUE_TYPE re, im;
10186 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10187 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10188 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10189 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10190 but the mpft_t is not, then we underflowed in the
10191 conversion. */
10192 if (force_convert
10193 || (real_isfinite (&re) && real_isfinite (&im)
10194 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10195 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10197 REAL_VALUE_TYPE re_mode, im_mode;
10199 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10200 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10201 /* Proceed iff the specified mode can hold the value. */
10202 if (force_convert
10203 || (real_identical (&re_mode, &re)
10204 && real_identical (&im_mode, &im)))
10205 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10206 build_real (TREE_TYPE (type), im_mode));
10209 return NULL_TREE;
10212 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10213 the pointer *(ARG_QUO) and return the result. The type is taken
10214 from the type of ARG0 and is used for setting the precision of the
10215 calculation and results. */
10217 static tree
10218 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10220 tree const type = TREE_TYPE (arg0);
10221 tree result = NULL_TREE;
10223 STRIP_NOPS (arg0);
10224 STRIP_NOPS (arg1);
10226 /* To proceed, MPFR must exactly represent the target floating point
10227 format, which only happens when the target base equals two. */
10228 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10229 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10230 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10232 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10233 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10235 if (real_isfinite (ra0) && real_isfinite (ra1))
10237 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10238 const int prec = fmt->p;
10239 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10240 tree result_rem;
10241 long integer_quo;
10242 mpfr_t m0, m1;
10244 mpfr_inits2 (prec, m0, m1, NULL);
10245 mpfr_from_real (m0, ra0, GMP_RNDN);
10246 mpfr_from_real (m1, ra1, GMP_RNDN);
10247 mpfr_clear_flags ();
10248 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10249 /* Remquo is independent of the rounding mode, so pass
10250 inexact=0 to do_mpfr_ckconv(). */
10251 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10252 mpfr_clears (m0, m1, NULL);
10253 if (result_rem)
10255 /* MPFR calculates quo in the host's long so it may
10256 return more bits in quo than the target int can hold
10257 if sizeof(host long) > sizeof(target int). This can
10258 happen even for native compilers in LP64 mode. In
10259 these cases, modulo the quo value with the largest
10260 number that the target int can hold while leaving one
10261 bit for the sign. */
10262 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10263 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10265 /* Dereference the quo pointer argument. */
10266 arg_quo = build_fold_indirect_ref (arg_quo);
10267 /* Proceed iff a valid pointer type was passed in. */
10268 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10270 /* Set the value. */
10271 tree result_quo
10272 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10273 build_int_cst (TREE_TYPE (arg_quo),
10274 integer_quo));
10275 TREE_SIDE_EFFECTS (result_quo) = 1;
10276 /* Combine the quo assignment with the rem. */
10277 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10278 result_quo, result_rem));
10283 return result;
10286 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10287 resulting value as a tree with type TYPE. The mpfr precision is
10288 set to the precision of TYPE. We assume that this mpfr function
10289 returns zero if the result could be calculated exactly within the
10290 requested precision. In addition, the integer pointer represented
10291 by ARG_SG will be dereferenced and set to the appropriate signgam
10292 (-1,1) value. */
10294 static tree
10295 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10297 tree result = NULL_TREE;
10299 STRIP_NOPS (arg);
10301 /* To proceed, MPFR must exactly represent the target floating point
10302 format, which only happens when the target base equals two. Also
10303 verify ARG is a constant and that ARG_SG is an int pointer. */
10304 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10305 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10306 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10307 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10309 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10311 /* In addition to NaN and Inf, the argument cannot be zero or a
10312 negative integer. */
10313 if (real_isfinite (ra)
10314 && ra->cl != rvc_zero
10315 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10317 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10318 const int prec = fmt->p;
10319 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10320 int inexact, sg;
10321 mpfr_t m;
10322 tree result_lg;
10324 mpfr_init2 (m, prec);
10325 mpfr_from_real (m, ra, GMP_RNDN);
10326 mpfr_clear_flags ();
10327 inexact = mpfr_lgamma (m, &sg, m, rnd);
10328 result_lg = do_mpfr_ckconv (m, type, inexact);
10329 mpfr_clear (m);
10330 if (result_lg)
10332 tree result_sg;
10334 /* Dereference the arg_sg pointer argument. */
10335 arg_sg = build_fold_indirect_ref (arg_sg);
10336 /* Assign the signgam value into *arg_sg. */
10337 result_sg = fold_build2 (MODIFY_EXPR,
10338 TREE_TYPE (arg_sg), arg_sg,
10339 build_int_cst (TREE_TYPE (arg_sg), sg));
10340 TREE_SIDE_EFFECTS (result_sg) = 1;
10341 /* Combine the signgam assignment with the lgamma result. */
10342 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10343 result_sg, result_lg));
10348 return result;
10351 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10352 mpc function FUNC on it and return the resulting value as a tree
10353 with type TYPE. The mpfr precision is set to the precision of
10354 TYPE. We assume that function FUNC returns zero if the result
10355 could be calculated exactly within the requested precision. If
10356 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10357 in the arguments and/or results. */
10359 tree
10360 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10361 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10363 tree result = NULL_TREE;
10365 STRIP_NOPS (arg0);
10366 STRIP_NOPS (arg1);
10368 /* To proceed, MPFR must exactly represent the target floating point
10369 format, which only happens when the target base equals two. */
10370 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10371 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10372 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10373 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10374 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10376 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10377 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10378 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10379 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10381 if (do_nonfinite
10382 || (real_isfinite (re0) && real_isfinite (im0)
10383 && real_isfinite (re1) && real_isfinite (im1)))
10385 const struct real_format *const fmt =
10386 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10387 const int prec = fmt->p;
10388 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10389 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10390 int inexact;
10391 mpc_t m0, m1;
10393 mpc_init2 (m0, prec);
10394 mpc_init2 (m1, prec);
10395 mpfr_from_real (mpc_realref (m0), re0, rnd);
10396 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10397 mpfr_from_real (mpc_realref (m1), re1, rnd);
10398 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10399 mpfr_clear_flags ();
10400 inexact = func (m0, m0, m1, crnd);
10401 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10402 mpc_clear (m0);
10403 mpc_clear (m1);
10407 return result;
10410 /* A wrapper function for builtin folding that prevents warnings for
10411 "statement without effect" and the like, caused by removing the
10412 call node earlier than the warning is generated. */
10414 tree
10415 fold_call_stmt (gcall *stmt, bool ignore)
10417 tree ret = NULL_TREE;
10418 tree fndecl = gimple_call_fndecl (stmt);
10419 location_t loc = gimple_location (stmt);
10420 if (fndecl
10421 && TREE_CODE (fndecl) == FUNCTION_DECL
10422 && DECL_BUILT_IN (fndecl)
10423 && !gimple_call_va_arg_pack_p (stmt))
10425 int nargs = gimple_call_num_args (stmt);
10426 tree *args = (nargs > 0
10427 ? gimple_call_arg_ptr (stmt, 0)
10428 : &error_mark_node);
10430 if (avoid_folding_inline_builtin (fndecl))
10431 return NULL_TREE;
10432 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10434 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10436 else
10438 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10439 if (ret)
10441 /* Propagate location information from original call to
10442 expansion of builtin. Otherwise things like
10443 maybe_emit_chk_warning, that operate on the expansion
10444 of a builtin, will use the wrong location information. */
10445 if (gimple_has_location (stmt))
10447 tree realret = ret;
10448 if (TREE_CODE (ret) == NOP_EXPR)
10449 realret = TREE_OPERAND (ret, 0);
10450 if (CAN_HAVE_LOCATION_P (realret)
10451 && !EXPR_HAS_LOCATION (realret))
10452 SET_EXPR_LOCATION (realret, loc);
10453 return realret;
10455 return ret;
10459 return NULL_TREE;
10462 /* Look up the function in builtin_decl that corresponds to DECL
10463 and set ASMSPEC as its user assembler name. DECL must be a
10464 function decl that declares a builtin. */
10466 void
10467 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10469 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10470 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10471 && asmspec != 0);
10473 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10474 set_user_assembler_name (builtin, asmspec);
10476 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10477 && INT_TYPE_SIZE < BITS_PER_WORD)
10479 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
10480 set_user_assembler_libfunc ("ffs", asmspec);
10481 set_optab_libfunc (ffs_optab, mode, "ffs");
10485 /* Return true if DECL is a builtin that expands to a constant or similarly
10486 simple code. */
10487 bool
10488 is_simple_builtin (tree decl)
10490 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10491 switch (DECL_FUNCTION_CODE (decl))
10493 /* Builtins that expand to constants. */
10494 case BUILT_IN_CONSTANT_P:
10495 case BUILT_IN_EXPECT:
10496 case BUILT_IN_OBJECT_SIZE:
10497 case BUILT_IN_UNREACHABLE:
10498 /* Simple register moves or loads from stack. */
10499 case BUILT_IN_ASSUME_ALIGNED:
10500 case BUILT_IN_RETURN_ADDRESS:
10501 case BUILT_IN_EXTRACT_RETURN_ADDR:
10502 case BUILT_IN_FROB_RETURN_ADDR:
10503 case BUILT_IN_RETURN:
10504 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10505 case BUILT_IN_FRAME_ADDRESS:
10506 case BUILT_IN_VA_END:
10507 case BUILT_IN_STACK_SAVE:
10508 case BUILT_IN_STACK_RESTORE:
10509 /* Exception state returns or moves registers around. */
10510 case BUILT_IN_EH_FILTER:
10511 case BUILT_IN_EH_POINTER:
10512 case BUILT_IN_EH_COPY_VALUES:
10513 return true;
10515 default:
10516 return false;
10519 return false;
10522 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10523 most probably expanded inline into reasonably simple code. This is a
10524 superset of is_simple_builtin. */
10525 bool
10526 is_inexpensive_builtin (tree decl)
10528 if (!decl)
10529 return false;
10530 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10531 return true;
10532 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10533 switch (DECL_FUNCTION_CODE (decl))
10535 case BUILT_IN_ABS:
10536 CASE_BUILT_IN_ALLOCA:
10537 case BUILT_IN_BSWAP16:
10538 case BUILT_IN_BSWAP32:
10539 case BUILT_IN_BSWAP64:
10540 case BUILT_IN_CLZ:
10541 case BUILT_IN_CLZIMAX:
10542 case BUILT_IN_CLZL:
10543 case BUILT_IN_CLZLL:
10544 case BUILT_IN_CTZ:
10545 case BUILT_IN_CTZIMAX:
10546 case BUILT_IN_CTZL:
10547 case BUILT_IN_CTZLL:
10548 case BUILT_IN_FFS:
10549 case BUILT_IN_FFSIMAX:
10550 case BUILT_IN_FFSL:
10551 case BUILT_IN_FFSLL:
10552 case BUILT_IN_IMAXABS:
10553 case BUILT_IN_FINITE:
10554 case BUILT_IN_FINITEF:
10555 case BUILT_IN_FINITEL:
10556 case BUILT_IN_FINITED32:
10557 case BUILT_IN_FINITED64:
10558 case BUILT_IN_FINITED128:
10559 case BUILT_IN_FPCLASSIFY:
10560 case BUILT_IN_ISFINITE:
10561 case BUILT_IN_ISINF_SIGN:
10562 case BUILT_IN_ISINF:
10563 case BUILT_IN_ISINFF:
10564 case BUILT_IN_ISINFL:
10565 case BUILT_IN_ISINFD32:
10566 case BUILT_IN_ISINFD64:
10567 case BUILT_IN_ISINFD128:
10568 case BUILT_IN_ISNAN:
10569 case BUILT_IN_ISNANF:
10570 case BUILT_IN_ISNANL:
10571 case BUILT_IN_ISNAND32:
10572 case BUILT_IN_ISNAND64:
10573 case BUILT_IN_ISNAND128:
10574 case BUILT_IN_ISNORMAL:
10575 case BUILT_IN_ISGREATER:
10576 case BUILT_IN_ISGREATEREQUAL:
10577 case BUILT_IN_ISLESS:
10578 case BUILT_IN_ISLESSEQUAL:
10579 case BUILT_IN_ISLESSGREATER:
10580 case BUILT_IN_ISUNORDERED:
10581 case BUILT_IN_VA_ARG_PACK:
10582 case BUILT_IN_VA_ARG_PACK_LEN:
10583 case BUILT_IN_VA_COPY:
10584 case BUILT_IN_TRAP:
10585 case BUILT_IN_SAVEREGS:
10586 case BUILT_IN_POPCOUNTL:
10587 case BUILT_IN_POPCOUNTLL:
10588 case BUILT_IN_POPCOUNTIMAX:
10589 case BUILT_IN_POPCOUNT:
10590 case BUILT_IN_PARITYL:
10591 case BUILT_IN_PARITYLL:
10592 case BUILT_IN_PARITYIMAX:
10593 case BUILT_IN_PARITY:
10594 case BUILT_IN_LABS:
10595 case BUILT_IN_LLABS:
10596 case BUILT_IN_PREFETCH:
10597 case BUILT_IN_ACC_ON_DEVICE:
10598 return true;
10600 default:
10601 return is_simple_builtin (decl);
10604 return false;
10607 /* Return true if T is a constant and the value cast to a target char
10608 can be represented by a host char.
10609 Store the casted char constant in *P if so. */
10611 bool
10612 target_char_cst_p (tree t, char *p)
10614 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
10615 return false;
10617 *p = (char)tree_to_uhwi (t);
10618 return true;
10621 /* Return the maximum object size. */
10623 tree
10624 max_object_size (void)
10626 /* To do: Make this a configurable parameter. */
10627 return TYPE_MAX_VALUE (ptrdiff_type_node);