[AArch64] Fix -mlow-precision-div (PR 86838)
[official-gcc.git] / gcc / builtins.c
blob39611de702123be2fa958767a4a17883ab7e6de0
1 /* Expand builtin functions.
2 Copyright (C) 1988-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "params.h"
35 #include "tm_p.h"
36 #include "stringpool.h"
37 #include "tree-vrp.h"
38 #include "tree-ssanames.h"
39 #include "expmed.h"
40 #include "optabs.h"
41 #include "emit-rtl.h"
42 #include "recog.h"
43 #include "diagnostic-core.h"
44 #include "alias.h"
45 #include "fold-const.h"
46 #include "fold-const-call.h"
47 #include "gimple-ssa-warn-restrict.h"
48 #include "stor-layout.h"
49 #include "calls.h"
50 #include "varasm.h"
51 #include "tree-object-size.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
76 struct target_builtins default_target_builtins;
77 #if SWITCHABLE_TARGET
78 struct target_builtins *this_target_builtins = &default_target_builtins;
79 #endif
81 /* Define the names of the builtin function types and codes. */
82 const char *const built_in_class_names[BUILT_IN_LAST]
83 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
85 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
86 const char * built_in_names[(int) END_BUILTINS] =
88 #include "builtins.def"
91 /* Setup an array of builtin_info_type, make sure each element decl is
92 initialized to NULL_TREE. */
93 builtin_info_type builtin_info[(int)END_BUILTINS];
95 /* Non-zero if __builtin_constant_p should be folded right away. */
96 bool force_folding_builtin_constant_p;
98 static rtx c_readstr (const char *, scalar_int_mode);
99 static int target_char_cast (tree, char *);
100 static rtx get_memory_rtx (tree, tree);
101 static int apply_args_size (void);
102 static int apply_result_size (void);
103 static rtx result_vector (int, rtx);
104 static void expand_builtin_prefetch (tree);
105 static rtx expand_builtin_apply_args (void);
106 static rtx expand_builtin_apply_args_1 (void);
107 static rtx expand_builtin_apply (rtx, rtx, rtx);
108 static void expand_builtin_return (rtx);
109 static enum type_class type_to_class (tree);
110 static rtx expand_builtin_classify_type (tree);
111 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
112 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
113 static rtx expand_builtin_interclass_mathfn (tree, rtx);
114 static rtx expand_builtin_sincos (tree);
115 static rtx expand_builtin_cexpi (tree, rtx);
116 static rtx expand_builtin_int_roundingfn (tree, rtx);
117 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
118 static rtx expand_builtin_next_arg (void);
119 static rtx expand_builtin_va_start (tree);
120 static rtx expand_builtin_va_end (tree);
121 static rtx expand_builtin_va_copy (tree);
122 static rtx inline_expand_builtin_string_cmp (tree, rtx);
123 static rtx expand_builtin_strcmp (tree, rtx);
124 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
125 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
126 static rtx expand_builtin_memchr (tree, rtx);
127 static rtx expand_builtin_memcpy (tree, rtx);
128 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
129 rtx target, tree exp, int endp);
130 static rtx expand_builtin_memmove (tree, rtx);
131 static rtx expand_builtin_mempcpy (tree, rtx);
132 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, int);
133 static rtx expand_builtin_strcat (tree, rtx);
134 static rtx expand_builtin_strcpy (tree, rtx);
135 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
136 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
137 static rtx expand_builtin_stpncpy (tree, rtx);
138 static rtx expand_builtin_strncat (tree, rtx);
139 static rtx expand_builtin_strncpy (tree, rtx);
140 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
141 static rtx expand_builtin_memset (tree, rtx, machine_mode);
142 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
143 static rtx expand_builtin_bzero (tree);
144 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
145 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
146 static rtx expand_builtin_alloca (tree);
147 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
148 static rtx expand_builtin_frame_address (tree, tree);
149 static tree stabilize_va_list_loc (location_t, tree, int);
150 static rtx expand_builtin_expect (tree, rtx);
151 static tree fold_builtin_constant_p (tree);
152 static tree fold_builtin_classify_type (tree);
153 static tree fold_builtin_strlen (location_t, tree, tree);
154 static tree fold_builtin_inf (location_t, tree, int);
155 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
156 static bool validate_arg (const_tree, enum tree_code code);
157 static rtx expand_builtin_fabs (tree, rtx, rtx);
158 static rtx expand_builtin_signbit (tree, rtx);
159 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
160 static tree fold_builtin_isascii (location_t, tree);
161 static tree fold_builtin_toascii (location_t, tree);
162 static tree fold_builtin_isdigit (location_t, tree);
163 static tree fold_builtin_fabs (location_t, tree, tree);
164 static tree fold_builtin_abs (location_t, tree, tree);
165 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
166 enum tree_code);
167 static tree fold_builtin_0 (location_t, tree);
168 static tree fold_builtin_1 (location_t, tree, tree);
169 static tree fold_builtin_2 (location_t, tree, tree, tree);
170 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
171 static tree fold_builtin_varargs (location_t, tree, tree*, int);
173 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
174 static tree fold_builtin_strspn (location_t, tree, tree);
175 static tree fold_builtin_strcspn (location_t, tree, tree);
177 static rtx expand_builtin_object_size (tree);
178 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
179 enum built_in_function);
180 static void maybe_emit_chk_warning (tree, enum built_in_function);
181 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
182 static void maybe_emit_free_warning (tree);
183 static tree fold_builtin_object_size (tree, tree);
185 unsigned HOST_WIDE_INT target_newline;
186 unsigned HOST_WIDE_INT target_percent;
187 static unsigned HOST_WIDE_INT target_c;
188 static unsigned HOST_WIDE_INT target_s;
189 char target_percent_c[3];
190 char target_percent_s[3];
191 char target_percent_s_newline[4];
192 static tree do_mpfr_remquo (tree, tree, tree);
193 static tree do_mpfr_lgamma_r (tree, tree, tree);
194 static void expand_builtin_sync_synchronize (void);
196 /* Return true if NAME starts with __builtin_ or __sync_. */
198 static bool
199 is_builtin_name (const char *name)
201 if (strncmp (name, "__builtin_", 10) == 0)
202 return true;
203 if (strncmp (name, "__sync_", 7) == 0)
204 return true;
205 if (strncmp (name, "__atomic_", 9) == 0)
206 return true;
207 return false;
211 /* Return true if DECL is a function symbol representing a built-in. */
213 bool
214 is_builtin_fn (tree decl)
216 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
219 /* Return true if NODE should be considered for inline expansion regardless
220 of the optimization level. This means whenever a function is invoked with
221 its "internal" name, which normally contains the prefix "__builtin". */
223 bool
224 called_as_built_in (tree node)
226 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
227 we want the name used to call the function, not the name it
228 will have. */
229 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
230 return is_builtin_name (name);
233 /* Compute values M and N such that M divides (address of EXP - N) and such
234 that N < M. If these numbers can be determined, store M in alignp and N in
235 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
236 *alignp and any bit-offset to *bitposp.
238 Note that the address (and thus the alignment) computed here is based
239 on the address to which a symbol resolves, whereas DECL_ALIGN is based
240 on the address at which an object is actually located. These two
241 addresses are not always the same. For example, on ARM targets,
242 the address &foo of a Thumb function foo() has the lowest bit set,
243 whereas foo() itself starts on an even address.
245 If ADDR_P is true we are taking the address of the memory reference EXP
246 and thus cannot rely on the access taking place. */
248 static bool
249 get_object_alignment_2 (tree exp, unsigned int *alignp,
250 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
252 poly_int64 bitsize, bitpos;
253 tree offset;
254 machine_mode mode;
255 int unsignedp, reversep, volatilep;
256 unsigned int align = BITS_PER_UNIT;
257 bool known_alignment = false;
259 /* Get the innermost object and the constant (bitpos) and possibly
260 variable (offset) offset of the access. */
261 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
262 &unsignedp, &reversep, &volatilep);
264 /* Extract alignment information from the innermost object and
265 possibly adjust bitpos and offset. */
266 if (TREE_CODE (exp) == FUNCTION_DECL)
268 /* Function addresses can encode extra information besides their
269 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
270 allows the low bit to be used as a virtual bit, we know
271 that the address itself must be at least 2-byte aligned. */
272 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
273 align = 2 * BITS_PER_UNIT;
275 else if (TREE_CODE (exp) == LABEL_DECL)
277 else if (TREE_CODE (exp) == CONST_DECL)
279 /* The alignment of a CONST_DECL is determined by its initializer. */
280 exp = DECL_INITIAL (exp);
281 align = TYPE_ALIGN (TREE_TYPE (exp));
282 if (CONSTANT_CLASS_P (exp))
283 align = targetm.constant_alignment (exp, align);
285 known_alignment = true;
287 else if (DECL_P (exp))
289 align = DECL_ALIGN (exp);
290 known_alignment = true;
292 else if (TREE_CODE (exp) == INDIRECT_REF
293 || TREE_CODE (exp) == MEM_REF
294 || TREE_CODE (exp) == TARGET_MEM_REF)
296 tree addr = TREE_OPERAND (exp, 0);
297 unsigned ptr_align;
298 unsigned HOST_WIDE_INT ptr_bitpos;
299 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
301 /* If the address is explicitely aligned, handle that. */
302 if (TREE_CODE (addr) == BIT_AND_EXPR
303 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
305 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
306 ptr_bitmask *= BITS_PER_UNIT;
307 align = least_bit_hwi (ptr_bitmask);
308 addr = TREE_OPERAND (addr, 0);
311 known_alignment
312 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
313 align = MAX (ptr_align, align);
315 /* Re-apply explicit alignment to the bitpos. */
316 ptr_bitpos &= ptr_bitmask;
318 /* The alignment of the pointer operand in a TARGET_MEM_REF
319 has to take the variable offset parts into account. */
320 if (TREE_CODE (exp) == TARGET_MEM_REF)
322 if (TMR_INDEX (exp))
324 unsigned HOST_WIDE_INT step = 1;
325 if (TMR_STEP (exp))
326 step = TREE_INT_CST_LOW (TMR_STEP (exp));
327 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
329 if (TMR_INDEX2 (exp))
330 align = BITS_PER_UNIT;
331 known_alignment = false;
334 /* When EXP is an actual memory reference then we can use
335 TYPE_ALIGN of a pointer indirection to derive alignment.
336 Do so only if get_pointer_alignment_1 did not reveal absolute
337 alignment knowledge and if using that alignment would
338 improve the situation. */
339 unsigned int talign;
340 if (!addr_p && !known_alignment
341 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
342 && talign > align)
343 align = talign;
344 else
346 /* Else adjust bitpos accordingly. */
347 bitpos += ptr_bitpos;
348 if (TREE_CODE (exp) == MEM_REF
349 || TREE_CODE (exp) == TARGET_MEM_REF)
350 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
353 else if (TREE_CODE (exp) == STRING_CST)
355 /* STRING_CST are the only constant objects we allow to be not
356 wrapped inside a CONST_DECL. */
357 align = TYPE_ALIGN (TREE_TYPE (exp));
358 if (CONSTANT_CLASS_P (exp))
359 align = targetm.constant_alignment (exp, align);
361 known_alignment = true;
364 /* If there is a non-constant offset part extract the maximum
365 alignment that can prevail. */
366 if (offset)
368 unsigned int trailing_zeros = tree_ctz (offset);
369 if (trailing_zeros < HOST_BITS_PER_INT)
371 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
372 if (inner)
373 align = MIN (align, inner);
377 /* Account for the alignment of runtime coefficients, so that the constant
378 bitpos is guaranteed to be accurate. */
379 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
380 if (alt_align != 0 && alt_align < align)
382 align = alt_align;
383 known_alignment = false;
386 *alignp = align;
387 *bitposp = bitpos.coeffs[0] & (align - 1);
388 return known_alignment;
391 /* For a memory reference expression EXP compute values M and N such that M
392 divides (&EXP - N) and such that N < M. If these numbers can be determined,
393 store M in alignp and N in *BITPOSP and return true. Otherwise return false
394 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
396 bool
397 get_object_alignment_1 (tree exp, unsigned int *alignp,
398 unsigned HOST_WIDE_INT *bitposp)
400 return get_object_alignment_2 (exp, alignp, bitposp, false);
403 /* Return the alignment in bits of EXP, an object. */
405 unsigned int
406 get_object_alignment (tree exp)
408 unsigned HOST_WIDE_INT bitpos = 0;
409 unsigned int align;
411 get_object_alignment_1 (exp, &align, &bitpos);
413 /* align and bitpos now specify known low bits of the pointer.
414 ptr & (align - 1) == bitpos. */
416 if (bitpos != 0)
417 align = least_bit_hwi (bitpos);
418 return align;
421 /* For a pointer valued expression EXP compute values M and N such that M
422 divides (EXP - N) and such that N < M. If these numbers can be determined,
423 store M in alignp and N in *BITPOSP and return true. Return false if
424 the results are just a conservative approximation.
426 If EXP is not a pointer, false is returned too. */
428 bool
429 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
430 unsigned HOST_WIDE_INT *bitposp)
432 STRIP_NOPS (exp);
434 if (TREE_CODE (exp) == ADDR_EXPR)
435 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
436 alignp, bitposp, true);
437 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
439 unsigned int align;
440 unsigned HOST_WIDE_INT bitpos;
441 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
442 &align, &bitpos);
443 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
444 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
445 else
447 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
448 if (trailing_zeros < HOST_BITS_PER_INT)
450 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
451 if (inner)
452 align = MIN (align, inner);
455 *alignp = align;
456 *bitposp = bitpos & (align - 1);
457 return res;
459 else if (TREE_CODE (exp) == SSA_NAME
460 && POINTER_TYPE_P (TREE_TYPE (exp)))
462 unsigned int ptr_align, ptr_misalign;
463 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
465 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
467 *bitposp = ptr_misalign * BITS_PER_UNIT;
468 *alignp = ptr_align * BITS_PER_UNIT;
469 /* Make sure to return a sensible alignment when the multiplication
470 by BITS_PER_UNIT overflowed. */
471 if (*alignp == 0)
472 *alignp = 1u << (HOST_BITS_PER_INT - 1);
473 /* We cannot really tell whether this result is an approximation. */
474 return false;
476 else
478 *bitposp = 0;
479 *alignp = BITS_PER_UNIT;
480 return false;
483 else if (TREE_CODE (exp) == INTEGER_CST)
485 *alignp = BIGGEST_ALIGNMENT;
486 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
487 & (BIGGEST_ALIGNMENT - 1));
488 return true;
491 *bitposp = 0;
492 *alignp = BITS_PER_UNIT;
493 return false;
496 /* Return the alignment in bits of EXP, a pointer valued expression.
497 The alignment returned is, by default, the alignment of the thing that
498 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
500 Otherwise, look at the expression to see if we can do better, i.e., if the
501 expression is actually pointing at an object whose alignment is tighter. */
503 unsigned int
504 get_pointer_alignment (tree exp)
506 unsigned HOST_WIDE_INT bitpos = 0;
507 unsigned int align;
509 get_pointer_alignment_1 (exp, &align, &bitpos);
511 /* align and bitpos now specify known low bits of the pointer.
512 ptr & (align - 1) == bitpos. */
514 if (bitpos != 0)
515 align = least_bit_hwi (bitpos);
517 return align;
520 /* Return the number of leading non-zero elements in the sequence
521 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
522 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
524 unsigned
525 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
527 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
529 unsigned n;
531 if (eltsize == 1)
533 /* Optimize the common case of plain char. */
534 for (n = 0; n < maxelts; n++)
536 const char *elt = (const char*) ptr + n;
537 if (!*elt)
538 break;
541 else
543 for (n = 0; n < maxelts; n++)
545 const char *elt = (const char*) ptr + n * eltsize;
546 if (!memcmp (elt, "\0\0\0\0", eltsize))
547 break;
550 return n;
553 /* Compute the length of a null-terminated character string or wide
554 character string handling character sizes of 1, 2, and 4 bytes.
555 TREE_STRING_LENGTH is not the right way because it evaluates to
556 the size of the character array in bytes (as opposed to characters)
557 and because it can contain a zero byte in the middle.
559 ONLY_VALUE should be nonzero if the result is not going to be emitted
560 into the instruction stream and zero if it is going to be expanded.
561 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
562 is returned, otherwise NULL, since
563 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
564 evaluate the side-effects.
566 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
567 accesses. Note that this implies the result is not going to be emitted
568 into the instruction stream.
570 The value returned is of type `ssizetype'.
572 Unfortunately, string_constant can't access the values of const char
573 arrays with initializers, so neither can we do so here. */
575 tree
576 c_strlen (tree src, int only_value)
578 STRIP_NOPS (src);
579 if (TREE_CODE (src) == COND_EXPR
580 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
582 tree len1, len2;
584 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
585 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
586 if (tree_int_cst_equal (len1, len2))
587 return len1;
590 if (TREE_CODE (src) == COMPOUND_EXPR
591 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
592 return c_strlen (TREE_OPERAND (src, 1), only_value);
594 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
596 /* Offset from the beginning of the string in bytes. */
597 tree byteoff;
598 src = string_constant (src, &byteoff);
599 if (src == 0)
600 return NULL_TREE;
602 /* Determine the size of the string element. */
603 unsigned eltsize
604 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src))));
606 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
607 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
608 in case the latter is less than the size of the array, such as when
609 SRC refers to a short string literal used to initialize a large array.
610 In that case, the elements of the array after the terminating NUL are
611 all NUL. */
612 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
613 strelts = strelts / eltsize - 1;
615 HOST_WIDE_INT maxelts = strelts;
616 tree type = TREE_TYPE (src);
617 if (tree size = TYPE_SIZE_UNIT (type))
618 if (tree_fits_shwi_p (size))
620 maxelts = tree_to_uhwi (size);
621 maxelts = maxelts / eltsize - 1;
624 /* PTR can point to the byte representation of any string type, including
625 char* and wchar_t*. */
626 const char *ptr = TREE_STRING_POINTER (src);
628 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
630 /* If the string has an internal NUL character followed by any
631 non-NUL characters (e.g., "foo\0bar"), we can't compute
632 the offset to the following NUL if we don't know where to
633 start searching for it. */
634 unsigned len = string_length (ptr, eltsize, strelts);
635 if (len < strelts)
637 /* Return when an embedded null character is found. */
638 return NULL_TREE;
641 if (!maxelts)
642 return ssize_int (0);
644 /* We don't know the starting offset, but we do know that the string
645 has no internal zero bytes. If the offset falls within the bounds
646 of the string subtract the offset from the length of the string,
647 and return that. Otherwise the length is zero. Take care to
648 use SAVE_EXPR in case the OFFSET has side-effects. */
649 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff) : byteoff;
650 offsave = fold_convert (ssizetype, offsave);
651 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
652 build_int_cst (ssizetype, len * eltsize));
653 tree lenexp = size_diffop_loc (loc, ssize_int (strelts * eltsize), offsave);
654 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
655 build_zero_cst (ssizetype));
658 /* Offset from the beginning of the string in elements. */
659 HOST_WIDE_INT eltoff;
661 /* We have a known offset into the string. Start searching there for
662 a null character if we can represent it as a single HOST_WIDE_INT. */
663 if (byteoff == 0)
664 eltoff = 0;
665 else if (! tree_fits_shwi_p (byteoff))
666 eltoff = -1;
667 else
668 eltoff = tree_to_shwi (byteoff) / eltsize;
670 /* If the offset is known to be out of bounds, warn, and call strlen at
671 runtime. */
672 if (eltoff < 0 || eltoff > maxelts)
674 /* Suppress multiple warnings for propagated constant strings. */
675 if (only_value != 2
676 && !TREE_NO_WARNING (src))
678 warning_at (loc, OPT_Warray_bounds,
679 "offset %qwi outside bounds of constant string",
680 eltoff);
681 TREE_NO_WARNING (src) = 1;
683 return NULL_TREE;
686 /* Use strlen to search for the first zero byte. Since any strings
687 constructed with build_string will have nulls appended, we win even
688 if we get handed something like (char[4])"abcd".
690 Since ELTOFF is our starting index into the string, no further
691 calculation is needed. */
692 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
693 maxelts - eltoff);
695 return ssize_int (len);
698 /* Return a constant integer corresponding to target reading
699 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
701 static rtx
702 c_readstr (const char *str, scalar_int_mode mode)
704 HOST_WIDE_INT ch;
705 unsigned int i, j;
706 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
708 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
709 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
710 / HOST_BITS_PER_WIDE_INT;
712 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
713 for (i = 0; i < len; i++)
714 tmp[i] = 0;
716 ch = 1;
717 for (i = 0; i < GET_MODE_SIZE (mode); i++)
719 j = i;
720 if (WORDS_BIG_ENDIAN)
721 j = GET_MODE_SIZE (mode) - i - 1;
722 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
723 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
724 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
725 j *= BITS_PER_UNIT;
727 if (ch)
728 ch = (unsigned char) str[i];
729 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
732 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
733 return immed_wide_int_const (c, mode);
736 /* Cast a target constant CST to target CHAR and if that value fits into
737 host char type, return zero and put that value into variable pointed to by
738 P. */
740 static int
741 target_char_cast (tree cst, char *p)
743 unsigned HOST_WIDE_INT val, hostval;
745 if (TREE_CODE (cst) != INTEGER_CST
746 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
747 return 1;
749 /* Do not care if it fits or not right here. */
750 val = TREE_INT_CST_LOW (cst);
752 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
753 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
755 hostval = val;
756 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
757 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
759 if (val != hostval)
760 return 1;
762 *p = hostval;
763 return 0;
766 /* Similar to save_expr, but assumes that arbitrary code is not executed
767 in between the multiple evaluations. In particular, we assume that a
768 non-addressable local variable will not be modified. */
770 static tree
771 builtin_save_expr (tree exp)
773 if (TREE_CODE (exp) == SSA_NAME
774 || (TREE_ADDRESSABLE (exp) == 0
775 && (TREE_CODE (exp) == PARM_DECL
776 || (VAR_P (exp) && !TREE_STATIC (exp)))))
777 return exp;
779 return save_expr (exp);
782 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
783 times to get the address of either a higher stack frame, or a return
784 address located within it (depending on FNDECL_CODE). */
786 static rtx
787 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
789 int i;
790 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
791 if (tem == NULL_RTX)
793 /* For a zero count with __builtin_return_address, we don't care what
794 frame address we return, because target-specific definitions will
795 override us. Therefore frame pointer elimination is OK, and using
796 the soft frame pointer is OK.
798 For a nonzero count, or a zero count with __builtin_frame_address,
799 we require a stable offset from the current frame pointer to the
800 previous one, so we must use the hard frame pointer, and
801 we must disable frame pointer elimination. */
802 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
803 tem = frame_pointer_rtx;
804 else
806 tem = hard_frame_pointer_rtx;
808 /* Tell reload not to eliminate the frame pointer. */
809 crtl->accesses_prior_frames = 1;
813 if (count > 0)
814 SETUP_FRAME_ADDRESSES ();
816 /* On the SPARC, the return address is not in the frame, it is in a
817 register. There is no way to access it off of the current frame
818 pointer, but it can be accessed off the previous frame pointer by
819 reading the value from the register window save area. */
820 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
821 count--;
823 /* Scan back COUNT frames to the specified frame. */
824 for (i = 0; i < count; i++)
826 /* Assume the dynamic chain pointer is in the word that the
827 frame address points to, unless otherwise specified. */
828 tem = DYNAMIC_CHAIN_ADDRESS (tem);
829 tem = memory_address (Pmode, tem);
830 tem = gen_frame_mem (Pmode, tem);
831 tem = copy_to_reg (tem);
834 /* For __builtin_frame_address, return what we've got. But, on
835 the SPARC for example, we may have to add a bias. */
836 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
837 return FRAME_ADDR_RTX (tem);
839 /* For __builtin_return_address, get the return address from that frame. */
840 #ifdef RETURN_ADDR_RTX
841 tem = RETURN_ADDR_RTX (count, tem);
842 #else
843 tem = memory_address (Pmode,
844 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
845 tem = gen_frame_mem (Pmode, tem);
846 #endif
847 return tem;
850 /* Alias set used for setjmp buffer. */
851 static alias_set_type setjmp_alias_set = -1;
853 /* Construct the leading half of a __builtin_setjmp call. Control will
854 return to RECEIVER_LABEL. This is also called directly by the SJLJ
855 exception handling code. */
857 void
858 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
860 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
861 rtx stack_save;
862 rtx mem;
864 if (setjmp_alias_set == -1)
865 setjmp_alias_set = new_alias_set ();
867 buf_addr = convert_memory_address (Pmode, buf_addr);
869 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
871 /* We store the frame pointer and the address of receiver_label in
872 the buffer and use the rest of it for the stack save area, which
873 is machine-dependent. */
875 mem = gen_rtx_MEM (Pmode, buf_addr);
876 set_mem_alias_set (mem, setjmp_alias_set);
877 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
879 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
880 GET_MODE_SIZE (Pmode))),
881 set_mem_alias_set (mem, setjmp_alias_set);
883 emit_move_insn (validize_mem (mem),
884 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
886 stack_save = gen_rtx_MEM (sa_mode,
887 plus_constant (Pmode, buf_addr,
888 2 * GET_MODE_SIZE (Pmode)));
889 set_mem_alias_set (stack_save, setjmp_alias_set);
890 emit_stack_save (SAVE_NONLOCAL, &stack_save);
892 /* If there is further processing to do, do it. */
893 if (targetm.have_builtin_setjmp_setup ())
894 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
896 /* We have a nonlocal label. */
897 cfun->has_nonlocal_label = 1;
900 /* Construct the trailing part of a __builtin_setjmp call. This is
901 also called directly by the SJLJ exception handling code.
902 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
904 void
905 expand_builtin_setjmp_receiver (rtx receiver_label)
907 rtx chain;
909 /* Mark the FP as used when we get here, so we have to make sure it's
910 marked as used by this function. */
911 emit_use (hard_frame_pointer_rtx);
913 /* Mark the static chain as clobbered here so life information
914 doesn't get messed up for it. */
915 chain = rtx_for_static_chain (current_function_decl, true);
916 if (chain && REG_P (chain))
917 emit_clobber (chain);
919 /* Now put in the code to restore the frame pointer, and argument
920 pointer, if needed. */
921 if (! targetm.have_nonlocal_goto ())
923 /* First adjust our frame pointer to its actual value. It was
924 previously set to the start of the virtual area corresponding to
925 the stacked variables when we branched here and now needs to be
926 adjusted to the actual hardware fp value.
928 Assignments to virtual registers are converted by
929 instantiate_virtual_regs into the corresponding assignment
930 to the underlying register (fp in this case) that makes
931 the original assignment true.
932 So the following insn will actually be decrementing fp by
933 TARGET_STARTING_FRAME_OFFSET. */
934 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
936 /* Restoring the frame pointer also modifies the hard frame pointer.
937 Mark it used (so that the previous assignment remains live once
938 the frame pointer is eliminated) and clobbered (to represent the
939 implicit update from the assignment). */
940 emit_use (hard_frame_pointer_rtx);
941 emit_clobber (hard_frame_pointer_rtx);
944 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
946 /* If the argument pointer can be eliminated in favor of the
947 frame pointer, we don't need to restore it. We assume here
948 that if such an elimination is present, it can always be used.
949 This is the case on all known machines; if we don't make this
950 assumption, we do unnecessary saving on many machines. */
951 size_t i;
952 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
954 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
955 if (elim_regs[i].from == ARG_POINTER_REGNUM
956 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
957 break;
959 if (i == ARRAY_SIZE (elim_regs))
961 /* Now restore our arg pointer from the address at which it
962 was saved in our stack frame. */
963 emit_move_insn (crtl->args.internal_arg_pointer,
964 copy_to_reg (get_arg_pointer_save_area ()));
968 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
969 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
970 else if (targetm.have_nonlocal_goto_receiver ())
971 emit_insn (targetm.gen_nonlocal_goto_receiver ());
972 else
973 { /* Nothing */ }
975 /* We must not allow the code we just generated to be reordered by
976 scheduling. Specifically, the update of the frame pointer must
977 happen immediately, not later. */
978 emit_insn (gen_blockage ());
981 /* __builtin_longjmp is passed a pointer to an array of five words (not
982 all will be used on all machines). It operates similarly to the C
983 library function of the same name, but is more efficient. Much of
984 the code below is copied from the handling of non-local gotos. */
986 static void
987 expand_builtin_longjmp (rtx buf_addr, rtx value)
989 rtx fp, lab, stack;
990 rtx_insn *insn, *last;
991 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
993 /* DRAP is needed for stack realign if longjmp is expanded to current
994 function */
995 if (SUPPORTS_STACK_ALIGNMENT)
996 crtl->need_drap = true;
998 if (setjmp_alias_set == -1)
999 setjmp_alias_set = new_alias_set ();
1001 buf_addr = convert_memory_address (Pmode, buf_addr);
1003 buf_addr = force_reg (Pmode, buf_addr);
1005 /* We require that the user must pass a second argument of 1, because
1006 that is what builtin_setjmp will return. */
1007 gcc_assert (value == const1_rtx);
1009 last = get_last_insn ();
1010 if (targetm.have_builtin_longjmp ())
1011 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1012 else
1014 fp = gen_rtx_MEM (Pmode, buf_addr);
1015 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1016 GET_MODE_SIZE (Pmode)));
1018 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1019 2 * GET_MODE_SIZE (Pmode)));
1020 set_mem_alias_set (fp, setjmp_alias_set);
1021 set_mem_alias_set (lab, setjmp_alias_set);
1022 set_mem_alias_set (stack, setjmp_alias_set);
1024 /* Pick up FP, label, and SP from the block and jump. This code is
1025 from expand_goto in stmt.c; see there for detailed comments. */
1026 if (targetm.have_nonlocal_goto ())
1027 /* We have to pass a value to the nonlocal_goto pattern that will
1028 get copied into the static_chain pointer, but it does not matter
1029 what that value is, because builtin_setjmp does not use it. */
1030 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1031 else
1033 lab = copy_to_reg (lab);
1035 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1036 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1038 emit_move_insn (hard_frame_pointer_rtx, fp);
1039 emit_stack_restore (SAVE_NONLOCAL, stack);
1041 emit_use (hard_frame_pointer_rtx);
1042 emit_use (stack_pointer_rtx);
1043 emit_indirect_jump (lab);
1047 /* Search backwards and mark the jump insn as a non-local goto.
1048 Note that this precludes the use of __builtin_longjmp to a
1049 __builtin_setjmp target in the same function. However, we've
1050 already cautioned the user that these functions are for
1051 internal exception handling use only. */
1052 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1054 gcc_assert (insn != last);
1056 if (JUMP_P (insn))
1058 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1059 break;
1061 else if (CALL_P (insn))
1062 break;
1066 static inline bool
1067 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1069 return (iter->i < iter->n);
1072 /* This function validates the types of a function call argument list
1073 against a specified list of tree_codes. If the last specifier is a 0,
1074 that represents an ellipsis, otherwise the last specifier must be a
1075 VOID_TYPE. */
1077 static bool
1078 validate_arglist (const_tree callexpr, ...)
1080 enum tree_code code;
1081 bool res = 0;
1082 va_list ap;
1083 const_call_expr_arg_iterator iter;
1084 const_tree arg;
1086 va_start (ap, callexpr);
1087 init_const_call_expr_arg_iterator (callexpr, &iter);
1089 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1090 tree fn = CALL_EXPR_FN (callexpr);
1091 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1093 for (unsigned argno = 1; ; ++argno)
1095 code = (enum tree_code) va_arg (ap, int);
1097 switch (code)
1099 case 0:
1100 /* This signifies an ellipses, any further arguments are all ok. */
1101 res = true;
1102 goto end;
1103 case VOID_TYPE:
1104 /* This signifies an endlink, if no arguments remain, return
1105 true, otherwise return false. */
1106 res = !more_const_call_expr_args_p (&iter);
1107 goto end;
1108 case POINTER_TYPE:
1109 /* The actual argument must be nonnull when either the whole
1110 called function has been declared nonnull, or when the formal
1111 argument corresponding to the actual argument has been. */
1112 if (argmap
1113 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1115 arg = next_const_call_expr_arg (&iter);
1116 if (!validate_arg (arg, code) || integer_zerop (arg))
1117 goto end;
1118 break;
1120 /* FALLTHRU */
1121 default:
1122 /* If no parameters remain or the parameter's code does not
1123 match the specified code, return false. Otherwise continue
1124 checking any remaining arguments. */
1125 arg = next_const_call_expr_arg (&iter);
1126 if (!validate_arg (arg, code))
1127 goto end;
1128 break;
1132 /* We need gotos here since we can only have one VA_CLOSE in a
1133 function. */
1134 end: ;
1135 va_end (ap);
1137 BITMAP_FREE (argmap);
1139 return res;
1142 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1143 and the address of the save area. */
1145 static rtx
1146 expand_builtin_nonlocal_goto (tree exp)
1148 tree t_label, t_save_area;
1149 rtx r_label, r_save_area, r_fp, r_sp;
1150 rtx_insn *insn;
1152 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1153 return NULL_RTX;
1155 t_label = CALL_EXPR_ARG (exp, 0);
1156 t_save_area = CALL_EXPR_ARG (exp, 1);
1158 r_label = expand_normal (t_label);
1159 r_label = convert_memory_address (Pmode, r_label);
1160 r_save_area = expand_normal (t_save_area);
1161 r_save_area = convert_memory_address (Pmode, r_save_area);
1162 /* Copy the address of the save location to a register just in case it was
1163 based on the frame pointer. */
1164 r_save_area = copy_to_reg (r_save_area);
1165 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1166 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1167 plus_constant (Pmode, r_save_area,
1168 GET_MODE_SIZE (Pmode)));
1170 crtl->has_nonlocal_goto = 1;
1172 /* ??? We no longer need to pass the static chain value, afaik. */
1173 if (targetm.have_nonlocal_goto ())
1174 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1175 else
1177 r_label = copy_to_reg (r_label);
1179 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1180 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1182 /* Restore frame pointer for containing function. */
1183 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1184 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1186 /* USE of hard_frame_pointer_rtx added for consistency;
1187 not clear if really needed. */
1188 emit_use (hard_frame_pointer_rtx);
1189 emit_use (stack_pointer_rtx);
1191 /* If the architecture is using a GP register, we must
1192 conservatively assume that the target function makes use of it.
1193 The prologue of functions with nonlocal gotos must therefore
1194 initialize the GP register to the appropriate value, and we
1195 must then make sure that this value is live at the point
1196 of the jump. (Note that this doesn't necessarily apply
1197 to targets with a nonlocal_goto pattern; they are free
1198 to implement it in their own way. Note also that this is
1199 a no-op if the GP register is a global invariant.) */
1200 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1201 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1202 emit_use (pic_offset_table_rtx);
1204 emit_indirect_jump (r_label);
1207 /* Search backwards to the jump insn and mark it as a
1208 non-local goto. */
1209 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1211 if (JUMP_P (insn))
1213 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1214 break;
1216 else if (CALL_P (insn))
1217 break;
1220 return const0_rtx;
1223 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1224 (not all will be used on all machines) that was passed to __builtin_setjmp.
1225 It updates the stack pointer in that block to the current value. This is
1226 also called directly by the SJLJ exception handling code. */
1228 void
1229 expand_builtin_update_setjmp_buf (rtx buf_addr)
1231 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1232 buf_addr = convert_memory_address (Pmode, buf_addr);
1233 rtx stack_save
1234 = gen_rtx_MEM (sa_mode,
1235 memory_address
1236 (sa_mode,
1237 plus_constant (Pmode, buf_addr,
1238 2 * GET_MODE_SIZE (Pmode))));
1240 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1243 /* Expand a call to __builtin_prefetch. For a target that does not support
1244 data prefetch, evaluate the memory address argument in case it has side
1245 effects. */
1247 static void
1248 expand_builtin_prefetch (tree exp)
1250 tree arg0, arg1, arg2;
1251 int nargs;
1252 rtx op0, op1, op2;
1254 if (!validate_arglist (exp, POINTER_TYPE, 0))
1255 return;
1257 arg0 = CALL_EXPR_ARG (exp, 0);
1259 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1260 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1261 locality). */
1262 nargs = call_expr_nargs (exp);
1263 if (nargs > 1)
1264 arg1 = CALL_EXPR_ARG (exp, 1);
1265 else
1266 arg1 = integer_zero_node;
1267 if (nargs > 2)
1268 arg2 = CALL_EXPR_ARG (exp, 2);
1269 else
1270 arg2 = integer_three_node;
1272 /* Argument 0 is an address. */
1273 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1275 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1276 if (TREE_CODE (arg1) != INTEGER_CST)
1278 error ("second argument to %<__builtin_prefetch%> must be a constant");
1279 arg1 = integer_zero_node;
1281 op1 = expand_normal (arg1);
1282 /* Argument 1 must be either zero or one. */
1283 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1285 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1286 " using zero");
1287 op1 = const0_rtx;
1290 /* Argument 2 (locality) must be a compile-time constant int. */
1291 if (TREE_CODE (arg2) != INTEGER_CST)
1293 error ("third argument to %<__builtin_prefetch%> must be a constant");
1294 arg2 = integer_zero_node;
1296 op2 = expand_normal (arg2);
1297 /* Argument 2 must be 0, 1, 2, or 3. */
1298 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1300 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1301 op2 = const0_rtx;
1304 if (targetm.have_prefetch ())
1306 struct expand_operand ops[3];
1308 create_address_operand (&ops[0], op0);
1309 create_integer_operand (&ops[1], INTVAL (op1));
1310 create_integer_operand (&ops[2], INTVAL (op2));
1311 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1312 return;
1315 /* Don't do anything with direct references to volatile memory, but
1316 generate code to handle other side effects. */
1317 if (!MEM_P (op0) && side_effects_p (op0))
1318 emit_insn (op0);
1321 /* Get a MEM rtx for expression EXP which is the address of an operand
1322 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1323 the maximum length of the block of memory that might be accessed or
1324 NULL if unknown. */
1326 static rtx
1327 get_memory_rtx (tree exp, tree len)
1329 tree orig_exp = exp;
1330 rtx addr, mem;
1332 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1333 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1334 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1335 exp = TREE_OPERAND (exp, 0);
1337 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1338 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1340 /* Get an expression we can use to find the attributes to assign to MEM.
1341 First remove any nops. */
1342 while (CONVERT_EXPR_P (exp)
1343 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1344 exp = TREE_OPERAND (exp, 0);
1346 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1347 (as builtin stringops may alias with anything). */
1348 exp = fold_build2 (MEM_REF,
1349 build_array_type (char_type_node,
1350 build_range_type (sizetype,
1351 size_one_node, len)),
1352 exp, build_int_cst (ptr_type_node, 0));
1354 /* If the MEM_REF has no acceptable address, try to get the base object
1355 from the original address we got, and build an all-aliasing
1356 unknown-sized access to that one. */
1357 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1358 set_mem_attributes (mem, exp, 0);
1359 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1360 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1361 0))))
1363 exp = build_fold_addr_expr (exp);
1364 exp = fold_build2 (MEM_REF,
1365 build_array_type (char_type_node,
1366 build_range_type (sizetype,
1367 size_zero_node,
1368 NULL)),
1369 exp, build_int_cst (ptr_type_node, 0));
1370 set_mem_attributes (mem, exp, 0);
1372 set_mem_alias_set (mem, 0);
1373 return mem;
1376 /* Built-in functions to perform an untyped call and return. */
1378 #define apply_args_mode \
1379 (this_target_builtins->x_apply_args_mode)
1380 #define apply_result_mode \
1381 (this_target_builtins->x_apply_result_mode)
1383 /* Return the size required for the block returned by __builtin_apply_args,
1384 and initialize apply_args_mode. */
1386 static int
1387 apply_args_size (void)
1389 static int size = -1;
1390 int align;
1391 unsigned int regno;
1393 /* The values computed by this function never change. */
1394 if (size < 0)
1396 /* The first value is the incoming arg-pointer. */
1397 size = GET_MODE_SIZE (Pmode);
1399 /* The second value is the structure value address unless this is
1400 passed as an "invisible" first argument. */
1401 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1402 size += GET_MODE_SIZE (Pmode);
1404 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1405 if (FUNCTION_ARG_REGNO_P (regno))
1407 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1409 gcc_assert (mode != VOIDmode);
1411 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1412 if (size % align != 0)
1413 size = CEIL (size, align) * align;
1414 size += GET_MODE_SIZE (mode);
1415 apply_args_mode[regno] = mode;
1417 else
1419 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1422 return size;
1425 /* Return the size required for the block returned by __builtin_apply,
1426 and initialize apply_result_mode. */
1428 static int
1429 apply_result_size (void)
1431 static int size = -1;
1432 int align, regno;
1434 /* The values computed by this function never change. */
1435 if (size < 0)
1437 size = 0;
1439 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1440 if (targetm.calls.function_value_regno_p (regno))
1442 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1444 gcc_assert (mode != VOIDmode);
1446 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1447 if (size % align != 0)
1448 size = CEIL (size, align) * align;
1449 size += GET_MODE_SIZE (mode);
1450 apply_result_mode[regno] = mode;
1452 else
1453 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1455 /* Allow targets that use untyped_call and untyped_return to override
1456 the size so that machine-specific information can be stored here. */
1457 #ifdef APPLY_RESULT_SIZE
1458 size = APPLY_RESULT_SIZE;
1459 #endif
1461 return size;
1464 /* Create a vector describing the result block RESULT. If SAVEP is true,
1465 the result block is used to save the values; otherwise it is used to
1466 restore the values. */
1468 static rtx
1469 result_vector (int savep, rtx result)
1471 int regno, size, align, nelts;
1472 fixed_size_mode mode;
1473 rtx reg, mem;
1474 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1476 size = nelts = 0;
1477 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1478 if ((mode = apply_result_mode[regno]) != VOIDmode)
1480 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1481 if (size % align != 0)
1482 size = CEIL (size, align) * align;
1483 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1484 mem = adjust_address (result, mode, size);
1485 savevec[nelts++] = (savep
1486 ? gen_rtx_SET (mem, reg)
1487 : gen_rtx_SET (reg, mem));
1488 size += GET_MODE_SIZE (mode);
1490 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1493 /* Save the state required to perform an untyped call with the same
1494 arguments as were passed to the current function. */
1496 static rtx
1497 expand_builtin_apply_args_1 (void)
1499 rtx registers, tem;
1500 int size, align, regno;
1501 fixed_size_mode mode;
1502 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1504 /* Create a block where the arg-pointer, structure value address,
1505 and argument registers can be saved. */
1506 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1508 /* Walk past the arg-pointer and structure value address. */
1509 size = GET_MODE_SIZE (Pmode);
1510 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1511 size += GET_MODE_SIZE (Pmode);
1513 /* Save each register used in calling a function to the block. */
1514 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1515 if ((mode = apply_args_mode[regno]) != VOIDmode)
1517 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1518 if (size % align != 0)
1519 size = CEIL (size, align) * align;
1521 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1523 emit_move_insn (adjust_address (registers, mode, size), tem);
1524 size += GET_MODE_SIZE (mode);
1527 /* Save the arg pointer to the block. */
1528 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1529 /* We need the pointer as the caller actually passed them to us, not
1530 as we might have pretended they were passed. Make sure it's a valid
1531 operand, as emit_move_insn isn't expected to handle a PLUS. */
1532 if (STACK_GROWS_DOWNWARD)
1534 = force_operand (plus_constant (Pmode, tem,
1535 crtl->args.pretend_args_size),
1536 NULL_RTX);
1537 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1539 size = GET_MODE_SIZE (Pmode);
1541 /* Save the structure value address unless this is passed as an
1542 "invisible" first argument. */
1543 if (struct_incoming_value)
1545 emit_move_insn (adjust_address (registers, Pmode, size),
1546 copy_to_reg (struct_incoming_value));
1547 size += GET_MODE_SIZE (Pmode);
1550 /* Return the address of the block. */
1551 return copy_addr_to_reg (XEXP (registers, 0));
1554 /* __builtin_apply_args returns block of memory allocated on
1555 the stack into which is stored the arg pointer, structure
1556 value address, static chain, and all the registers that might
1557 possibly be used in performing a function call. The code is
1558 moved to the start of the function so the incoming values are
1559 saved. */
1561 static rtx
1562 expand_builtin_apply_args (void)
1564 /* Don't do __builtin_apply_args more than once in a function.
1565 Save the result of the first call and reuse it. */
1566 if (apply_args_value != 0)
1567 return apply_args_value;
1569 /* When this function is called, it means that registers must be
1570 saved on entry to this function. So we migrate the
1571 call to the first insn of this function. */
1572 rtx temp;
1574 start_sequence ();
1575 temp = expand_builtin_apply_args_1 ();
1576 rtx_insn *seq = get_insns ();
1577 end_sequence ();
1579 apply_args_value = temp;
1581 /* Put the insns after the NOTE that starts the function.
1582 If this is inside a start_sequence, make the outer-level insn
1583 chain current, so the code is placed at the start of the
1584 function. If internal_arg_pointer is a non-virtual pseudo,
1585 it needs to be placed after the function that initializes
1586 that pseudo. */
1587 push_topmost_sequence ();
1588 if (REG_P (crtl->args.internal_arg_pointer)
1589 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1590 emit_insn_before (seq, parm_birth_insn);
1591 else
1592 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1593 pop_topmost_sequence ();
1594 return temp;
1598 /* Perform an untyped call and save the state required to perform an
1599 untyped return of whatever value was returned by the given function. */
1601 static rtx
1602 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1604 int size, align, regno;
1605 fixed_size_mode mode;
1606 rtx incoming_args, result, reg, dest, src;
1607 rtx_call_insn *call_insn;
1608 rtx old_stack_level = 0;
1609 rtx call_fusage = 0;
1610 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1612 arguments = convert_memory_address (Pmode, arguments);
1614 /* Create a block where the return registers can be saved. */
1615 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1617 /* Fetch the arg pointer from the ARGUMENTS block. */
1618 incoming_args = gen_reg_rtx (Pmode);
1619 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1620 if (!STACK_GROWS_DOWNWARD)
1621 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1622 incoming_args, 0, OPTAB_LIB_WIDEN);
1624 /* Push a new argument block and copy the arguments. Do not allow
1625 the (potential) memcpy call below to interfere with our stack
1626 manipulations. */
1627 do_pending_stack_adjust ();
1628 NO_DEFER_POP;
1630 /* Save the stack with nonlocal if available. */
1631 if (targetm.have_save_stack_nonlocal ())
1632 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1633 else
1634 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1636 /* Allocate a block of memory onto the stack and copy the memory
1637 arguments to the outgoing arguments address. We can pass TRUE
1638 as the 4th argument because we just saved the stack pointer
1639 and will restore it right after the call. */
1640 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1642 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1643 may have already set current_function_calls_alloca to true.
1644 current_function_calls_alloca won't be set if argsize is zero,
1645 so we have to guarantee need_drap is true here. */
1646 if (SUPPORTS_STACK_ALIGNMENT)
1647 crtl->need_drap = true;
1649 dest = virtual_outgoing_args_rtx;
1650 if (!STACK_GROWS_DOWNWARD)
1652 if (CONST_INT_P (argsize))
1653 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1654 else
1655 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1657 dest = gen_rtx_MEM (BLKmode, dest);
1658 set_mem_align (dest, PARM_BOUNDARY);
1659 src = gen_rtx_MEM (BLKmode, incoming_args);
1660 set_mem_align (src, PARM_BOUNDARY);
1661 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1663 /* Refer to the argument block. */
1664 apply_args_size ();
1665 arguments = gen_rtx_MEM (BLKmode, arguments);
1666 set_mem_align (arguments, PARM_BOUNDARY);
1668 /* Walk past the arg-pointer and structure value address. */
1669 size = GET_MODE_SIZE (Pmode);
1670 if (struct_value)
1671 size += GET_MODE_SIZE (Pmode);
1673 /* Restore each of the registers previously saved. Make USE insns
1674 for each of these registers for use in making the call. */
1675 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1676 if ((mode = apply_args_mode[regno]) != VOIDmode)
1678 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1679 if (size % align != 0)
1680 size = CEIL (size, align) * align;
1681 reg = gen_rtx_REG (mode, regno);
1682 emit_move_insn (reg, adjust_address (arguments, mode, size));
1683 use_reg (&call_fusage, reg);
1684 size += GET_MODE_SIZE (mode);
1687 /* Restore the structure value address unless this is passed as an
1688 "invisible" first argument. */
1689 size = GET_MODE_SIZE (Pmode);
1690 if (struct_value)
1692 rtx value = gen_reg_rtx (Pmode);
1693 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1694 emit_move_insn (struct_value, value);
1695 if (REG_P (struct_value))
1696 use_reg (&call_fusage, struct_value);
1697 size += GET_MODE_SIZE (Pmode);
1700 /* All arguments and registers used for the call are set up by now! */
1701 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1703 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1704 and we don't want to load it into a register as an optimization,
1705 because prepare_call_address already did it if it should be done. */
1706 if (GET_CODE (function) != SYMBOL_REF)
1707 function = memory_address (FUNCTION_MODE, function);
1709 /* Generate the actual call instruction and save the return value. */
1710 if (targetm.have_untyped_call ())
1712 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1713 emit_call_insn (targetm.gen_untyped_call (mem, result,
1714 result_vector (1, result)));
1716 else if (targetm.have_call_value ())
1718 rtx valreg = 0;
1720 /* Locate the unique return register. It is not possible to
1721 express a call that sets more than one return register using
1722 call_value; use untyped_call for that. In fact, untyped_call
1723 only needs to save the return registers in the given block. */
1724 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1725 if ((mode = apply_result_mode[regno]) != VOIDmode)
1727 gcc_assert (!valreg); /* have_untyped_call required. */
1729 valreg = gen_rtx_REG (mode, regno);
1732 emit_insn (targetm.gen_call_value (valreg,
1733 gen_rtx_MEM (FUNCTION_MODE, function),
1734 const0_rtx, NULL_RTX, const0_rtx));
1736 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1738 else
1739 gcc_unreachable ();
1741 /* Find the CALL insn we just emitted, and attach the register usage
1742 information. */
1743 call_insn = last_call_insn ();
1744 add_function_usage_to (call_insn, call_fusage);
1746 /* Restore the stack. */
1747 if (targetm.have_save_stack_nonlocal ())
1748 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1749 else
1750 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1751 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1753 OK_DEFER_POP;
1755 /* Return the address of the result block. */
1756 result = copy_addr_to_reg (XEXP (result, 0));
1757 return convert_memory_address (ptr_mode, result);
1760 /* Perform an untyped return. */
1762 static void
1763 expand_builtin_return (rtx result)
1765 int size, align, regno;
1766 fixed_size_mode mode;
1767 rtx reg;
1768 rtx_insn *call_fusage = 0;
1770 result = convert_memory_address (Pmode, result);
1772 apply_result_size ();
1773 result = gen_rtx_MEM (BLKmode, result);
1775 if (targetm.have_untyped_return ())
1777 rtx vector = result_vector (0, result);
1778 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1779 emit_barrier ();
1780 return;
1783 /* Restore the return value and note that each value is used. */
1784 size = 0;
1785 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1786 if ((mode = apply_result_mode[regno]) != VOIDmode)
1788 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1789 if (size % align != 0)
1790 size = CEIL (size, align) * align;
1791 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1792 emit_move_insn (reg, adjust_address (result, mode, size));
1794 push_to_sequence (call_fusage);
1795 emit_use (reg);
1796 call_fusage = get_insns ();
1797 end_sequence ();
1798 size += GET_MODE_SIZE (mode);
1801 /* Put the USE insns before the return. */
1802 emit_insn (call_fusage);
1804 /* Return whatever values was restored by jumping directly to the end
1805 of the function. */
1806 expand_naked_return ();
1809 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1811 static enum type_class
1812 type_to_class (tree type)
1814 switch (TREE_CODE (type))
1816 case VOID_TYPE: return void_type_class;
1817 case INTEGER_TYPE: return integer_type_class;
1818 case ENUMERAL_TYPE: return enumeral_type_class;
1819 case BOOLEAN_TYPE: return boolean_type_class;
1820 case POINTER_TYPE: return pointer_type_class;
1821 case REFERENCE_TYPE: return reference_type_class;
1822 case OFFSET_TYPE: return offset_type_class;
1823 case REAL_TYPE: return real_type_class;
1824 case COMPLEX_TYPE: return complex_type_class;
1825 case FUNCTION_TYPE: return function_type_class;
1826 case METHOD_TYPE: return method_type_class;
1827 case RECORD_TYPE: return record_type_class;
1828 case UNION_TYPE:
1829 case QUAL_UNION_TYPE: return union_type_class;
1830 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1831 ? string_type_class : array_type_class);
1832 case LANG_TYPE: return lang_type_class;
1833 default: return no_type_class;
1837 /* Expand a call EXP to __builtin_classify_type. */
1839 static rtx
1840 expand_builtin_classify_type (tree exp)
1842 if (call_expr_nargs (exp))
1843 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1844 return GEN_INT (no_type_class);
1847 /* This helper macro, meant to be used in mathfn_built_in below, determines
1848 which among a set of builtin math functions is appropriate for a given type
1849 mode. The `F' (float) and `L' (long double) are automatically generated
1850 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1851 types, there are additional types that are considered with 'F32', 'F64',
1852 'F128', etc. suffixes. */
1853 #define CASE_MATHFN(MATHFN) \
1854 CASE_CFN_##MATHFN: \
1855 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1856 fcodel = BUILT_IN_##MATHFN##L ; break;
1857 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1858 types. */
1859 #define CASE_MATHFN_FLOATN(MATHFN) \
1860 CASE_CFN_##MATHFN: \
1861 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1862 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1863 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1864 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1865 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1866 break;
1867 /* Similar to above, but appends _R after any F/L suffix. */
1868 #define CASE_MATHFN_REENT(MATHFN) \
1869 case CFN_BUILT_IN_##MATHFN##_R: \
1870 case CFN_BUILT_IN_##MATHFN##F_R: \
1871 case CFN_BUILT_IN_##MATHFN##L_R: \
1872 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1873 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1875 /* Return a function equivalent to FN but operating on floating-point
1876 values of type TYPE, or END_BUILTINS if no such function exists.
1877 This is purely an operation on function codes; it does not guarantee
1878 that the target actually has an implementation of the function. */
1880 static built_in_function
1881 mathfn_built_in_2 (tree type, combined_fn fn)
1883 tree mtype;
1884 built_in_function fcode, fcodef, fcodel;
1885 built_in_function fcodef16 = END_BUILTINS;
1886 built_in_function fcodef32 = END_BUILTINS;
1887 built_in_function fcodef64 = END_BUILTINS;
1888 built_in_function fcodef128 = END_BUILTINS;
1889 built_in_function fcodef32x = END_BUILTINS;
1890 built_in_function fcodef64x = END_BUILTINS;
1891 built_in_function fcodef128x = END_BUILTINS;
1893 switch (fn)
1895 CASE_MATHFN (ACOS)
1896 CASE_MATHFN (ACOSH)
1897 CASE_MATHFN (ASIN)
1898 CASE_MATHFN (ASINH)
1899 CASE_MATHFN (ATAN)
1900 CASE_MATHFN (ATAN2)
1901 CASE_MATHFN (ATANH)
1902 CASE_MATHFN (CBRT)
1903 CASE_MATHFN_FLOATN (CEIL)
1904 CASE_MATHFN (CEXPI)
1905 CASE_MATHFN_FLOATN (COPYSIGN)
1906 CASE_MATHFN (COS)
1907 CASE_MATHFN (COSH)
1908 CASE_MATHFN (DREM)
1909 CASE_MATHFN (ERF)
1910 CASE_MATHFN (ERFC)
1911 CASE_MATHFN (EXP)
1912 CASE_MATHFN (EXP10)
1913 CASE_MATHFN (EXP2)
1914 CASE_MATHFN (EXPM1)
1915 CASE_MATHFN (FABS)
1916 CASE_MATHFN (FDIM)
1917 CASE_MATHFN_FLOATN (FLOOR)
1918 CASE_MATHFN_FLOATN (FMA)
1919 CASE_MATHFN_FLOATN (FMAX)
1920 CASE_MATHFN_FLOATN (FMIN)
1921 CASE_MATHFN (FMOD)
1922 CASE_MATHFN (FREXP)
1923 CASE_MATHFN (GAMMA)
1924 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1925 CASE_MATHFN (HUGE_VAL)
1926 CASE_MATHFN (HYPOT)
1927 CASE_MATHFN (ILOGB)
1928 CASE_MATHFN (ICEIL)
1929 CASE_MATHFN (IFLOOR)
1930 CASE_MATHFN (INF)
1931 CASE_MATHFN (IRINT)
1932 CASE_MATHFN (IROUND)
1933 CASE_MATHFN (ISINF)
1934 CASE_MATHFN (J0)
1935 CASE_MATHFN (J1)
1936 CASE_MATHFN (JN)
1937 CASE_MATHFN (LCEIL)
1938 CASE_MATHFN (LDEXP)
1939 CASE_MATHFN (LFLOOR)
1940 CASE_MATHFN (LGAMMA)
1941 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1942 CASE_MATHFN (LLCEIL)
1943 CASE_MATHFN (LLFLOOR)
1944 CASE_MATHFN (LLRINT)
1945 CASE_MATHFN (LLROUND)
1946 CASE_MATHFN (LOG)
1947 CASE_MATHFN (LOG10)
1948 CASE_MATHFN (LOG1P)
1949 CASE_MATHFN (LOG2)
1950 CASE_MATHFN (LOGB)
1951 CASE_MATHFN (LRINT)
1952 CASE_MATHFN (LROUND)
1953 CASE_MATHFN (MODF)
1954 CASE_MATHFN (NAN)
1955 CASE_MATHFN (NANS)
1956 CASE_MATHFN_FLOATN (NEARBYINT)
1957 CASE_MATHFN (NEXTAFTER)
1958 CASE_MATHFN (NEXTTOWARD)
1959 CASE_MATHFN (POW)
1960 CASE_MATHFN (POWI)
1961 CASE_MATHFN (POW10)
1962 CASE_MATHFN (REMAINDER)
1963 CASE_MATHFN (REMQUO)
1964 CASE_MATHFN_FLOATN (RINT)
1965 CASE_MATHFN_FLOATN (ROUND)
1966 CASE_MATHFN (SCALB)
1967 CASE_MATHFN (SCALBLN)
1968 CASE_MATHFN (SCALBN)
1969 CASE_MATHFN (SIGNBIT)
1970 CASE_MATHFN (SIGNIFICAND)
1971 CASE_MATHFN (SIN)
1972 CASE_MATHFN (SINCOS)
1973 CASE_MATHFN (SINH)
1974 CASE_MATHFN_FLOATN (SQRT)
1975 CASE_MATHFN (TAN)
1976 CASE_MATHFN (TANH)
1977 CASE_MATHFN (TGAMMA)
1978 CASE_MATHFN_FLOATN (TRUNC)
1979 CASE_MATHFN (Y0)
1980 CASE_MATHFN (Y1)
1981 CASE_MATHFN (YN)
1983 default:
1984 return END_BUILTINS;
1987 mtype = TYPE_MAIN_VARIANT (type);
1988 if (mtype == double_type_node)
1989 return fcode;
1990 else if (mtype == float_type_node)
1991 return fcodef;
1992 else if (mtype == long_double_type_node)
1993 return fcodel;
1994 else if (mtype == float16_type_node)
1995 return fcodef16;
1996 else if (mtype == float32_type_node)
1997 return fcodef32;
1998 else if (mtype == float64_type_node)
1999 return fcodef64;
2000 else if (mtype == float128_type_node)
2001 return fcodef128;
2002 else if (mtype == float32x_type_node)
2003 return fcodef32x;
2004 else if (mtype == float64x_type_node)
2005 return fcodef64x;
2006 else if (mtype == float128x_type_node)
2007 return fcodef128x;
2008 else
2009 return END_BUILTINS;
2012 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2013 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2014 otherwise use the explicit declaration. If we can't do the conversion,
2015 return null. */
2017 static tree
2018 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2020 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2021 if (fcode2 == END_BUILTINS)
2022 return NULL_TREE;
2024 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2025 return NULL_TREE;
2027 return builtin_decl_explicit (fcode2);
2030 /* Like mathfn_built_in_1, but always use the implicit array. */
2032 tree
2033 mathfn_built_in (tree type, combined_fn fn)
2035 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2038 /* Like mathfn_built_in_1, but take a built_in_function and
2039 always use the implicit array. */
2041 tree
2042 mathfn_built_in (tree type, enum built_in_function fn)
2044 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2047 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2048 return its code, otherwise return IFN_LAST. Note that this function
2049 only tests whether the function is defined in internals.def, not whether
2050 it is actually available on the target. */
2052 internal_fn
2053 associated_internal_fn (tree fndecl)
2055 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2056 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2057 switch (DECL_FUNCTION_CODE (fndecl))
2059 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2060 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2061 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2062 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2063 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2064 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2065 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2066 #include "internal-fn.def"
2068 CASE_FLT_FN (BUILT_IN_POW10):
2069 return IFN_EXP10;
2071 CASE_FLT_FN (BUILT_IN_DREM):
2072 return IFN_REMAINDER;
2074 CASE_FLT_FN (BUILT_IN_SCALBN):
2075 CASE_FLT_FN (BUILT_IN_SCALBLN):
2076 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2077 return IFN_LDEXP;
2078 return IFN_LAST;
2080 default:
2081 return IFN_LAST;
2085 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2086 on the current target by a call to an internal function, return the
2087 code of that internal function, otherwise return IFN_LAST. The caller
2088 is responsible for ensuring that any side-effects of the built-in
2089 call are dealt with correctly. E.g. if CALL sets errno, the caller
2090 must decide that the errno result isn't needed or make it available
2091 in some other way. */
2093 internal_fn
2094 replacement_internal_fn (gcall *call)
2096 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2098 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2099 if (ifn != IFN_LAST)
2101 tree_pair types = direct_internal_fn_types (ifn, call);
2102 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2103 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2104 return ifn;
2107 return IFN_LAST;
2110 /* Expand a call to the builtin trinary math functions (fma).
2111 Return NULL_RTX if a normal call should be emitted rather than expanding the
2112 function in-line. EXP is the expression that is a call to the builtin
2113 function; if convenient, the result should be placed in TARGET.
2114 SUBTARGET may be used as the target for computing one of EXP's
2115 operands. */
2117 static rtx
2118 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2120 optab builtin_optab;
2121 rtx op0, op1, op2, result;
2122 rtx_insn *insns;
2123 tree fndecl = get_callee_fndecl (exp);
2124 tree arg0, arg1, arg2;
2125 machine_mode mode;
2127 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2128 return NULL_RTX;
2130 arg0 = CALL_EXPR_ARG (exp, 0);
2131 arg1 = CALL_EXPR_ARG (exp, 1);
2132 arg2 = CALL_EXPR_ARG (exp, 2);
2134 switch (DECL_FUNCTION_CODE (fndecl))
2136 CASE_FLT_FN (BUILT_IN_FMA):
2137 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2138 builtin_optab = fma_optab; break;
2139 default:
2140 gcc_unreachable ();
2143 /* Make a suitable register to place result in. */
2144 mode = TYPE_MODE (TREE_TYPE (exp));
2146 /* Before working hard, check whether the instruction is available. */
2147 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2148 return NULL_RTX;
2150 result = gen_reg_rtx (mode);
2152 /* Always stabilize the argument list. */
2153 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2154 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2155 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2157 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2158 op1 = expand_normal (arg1);
2159 op2 = expand_normal (arg2);
2161 start_sequence ();
2163 /* Compute into RESULT.
2164 Set RESULT to wherever the result comes back. */
2165 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2166 result, 0);
2168 /* If we were unable to expand via the builtin, stop the sequence
2169 (without outputting the insns) and call to the library function
2170 with the stabilized argument list. */
2171 if (result == 0)
2173 end_sequence ();
2174 return expand_call (exp, target, target == const0_rtx);
2177 /* Output the entire sequence. */
2178 insns = get_insns ();
2179 end_sequence ();
2180 emit_insn (insns);
2182 return result;
2185 /* Expand a call to the builtin sin and cos math functions.
2186 Return NULL_RTX if a normal call should be emitted rather than expanding the
2187 function in-line. EXP is the expression that is a call to the builtin
2188 function; if convenient, the result should be placed in TARGET.
2189 SUBTARGET may be used as the target for computing one of EXP's
2190 operands. */
2192 static rtx
2193 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2195 optab builtin_optab;
2196 rtx op0;
2197 rtx_insn *insns;
2198 tree fndecl = get_callee_fndecl (exp);
2199 machine_mode mode;
2200 tree arg;
2202 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2203 return NULL_RTX;
2205 arg = CALL_EXPR_ARG (exp, 0);
2207 switch (DECL_FUNCTION_CODE (fndecl))
2209 CASE_FLT_FN (BUILT_IN_SIN):
2210 CASE_FLT_FN (BUILT_IN_COS):
2211 builtin_optab = sincos_optab; break;
2212 default:
2213 gcc_unreachable ();
2216 /* Make a suitable register to place result in. */
2217 mode = TYPE_MODE (TREE_TYPE (exp));
2219 /* Check if sincos insn is available, otherwise fallback
2220 to sin or cos insn. */
2221 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2222 switch (DECL_FUNCTION_CODE (fndecl))
2224 CASE_FLT_FN (BUILT_IN_SIN):
2225 builtin_optab = sin_optab; break;
2226 CASE_FLT_FN (BUILT_IN_COS):
2227 builtin_optab = cos_optab; break;
2228 default:
2229 gcc_unreachable ();
2232 /* Before working hard, check whether the instruction is available. */
2233 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2235 rtx result = gen_reg_rtx (mode);
2237 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2238 need to expand the argument again. This way, we will not perform
2239 side-effects more the once. */
2240 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2242 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2244 start_sequence ();
2246 /* Compute into RESULT.
2247 Set RESULT to wherever the result comes back. */
2248 if (builtin_optab == sincos_optab)
2250 int ok;
2252 switch (DECL_FUNCTION_CODE (fndecl))
2254 CASE_FLT_FN (BUILT_IN_SIN):
2255 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2256 break;
2257 CASE_FLT_FN (BUILT_IN_COS):
2258 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2259 break;
2260 default:
2261 gcc_unreachable ();
2263 gcc_assert (ok);
2265 else
2266 result = expand_unop (mode, builtin_optab, op0, result, 0);
2268 if (result != 0)
2270 /* Output the entire sequence. */
2271 insns = get_insns ();
2272 end_sequence ();
2273 emit_insn (insns);
2274 return result;
2277 /* If we were unable to expand via the builtin, stop the sequence
2278 (without outputting the insns) and call to the library function
2279 with the stabilized argument list. */
2280 end_sequence ();
2283 return expand_call (exp, target, target == const0_rtx);
2286 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2287 return an RTL instruction code that implements the functionality.
2288 If that isn't possible or available return CODE_FOR_nothing. */
2290 static enum insn_code
2291 interclass_mathfn_icode (tree arg, tree fndecl)
2293 bool errno_set = false;
2294 optab builtin_optab = unknown_optab;
2295 machine_mode mode;
2297 switch (DECL_FUNCTION_CODE (fndecl))
2299 CASE_FLT_FN (BUILT_IN_ILOGB):
2300 errno_set = true; builtin_optab = ilogb_optab; break;
2301 CASE_FLT_FN (BUILT_IN_ISINF):
2302 builtin_optab = isinf_optab; break;
2303 case BUILT_IN_ISNORMAL:
2304 case BUILT_IN_ISFINITE:
2305 CASE_FLT_FN (BUILT_IN_FINITE):
2306 case BUILT_IN_FINITED32:
2307 case BUILT_IN_FINITED64:
2308 case BUILT_IN_FINITED128:
2309 case BUILT_IN_ISINFD32:
2310 case BUILT_IN_ISINFD64:
2311 case BUILT_IN_ISINFD128:
2312 /* These builtins have no optabs (yet). */
2313 break;
2314 default:
2315 gcc_unreachable ();
2318 /* There's no easy way to detect the case we need to set EDOM. */
2319 if (flag_errno_math && errno_set)
2320 return CODE_FOR_nothing;
2322 /* Optab mode depends on the mode of the input argument. */
2323 mode = TYPE_MODE (TREE_TYPE (arg));
2325 if (builtin_optab)
2326 return optab_handler (builtin_optab, mode);
2327 return CODE_FOR_nothing;
2330 /* Expand a call to one of the builtin math functions that operate on
2331 floating point argument and output an integer result (ilogb, isinf,
2332 isnan, etc).
2333 Return 0 if a normal call should be emitted rather than expanding the
2334 function in-line. EXP is the expression that is a call to the builtin
2335 function; if convenient, the result should be placed in TARGET. */
2337 static rtx
2338 expand_builtin_interclass_mathfn (tree exp, rtx target)
2340 enum insn_code icode = CODE_FOR_nothing;
2341 rtx op0;
2342 tree fndecl = get_callee_fndecl (exp);
2343 machine_mode mode;
2344 tree arg;
2346 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2347 return NULL_RTX;
2349 arg = CALL_EXPR_ARG (exp, 0);
2350 icode = interclass_mathfn_icode (arg, fndecl);
2351 mode = TYPE_MODE (TREE_TYPE (arg));
2353 if (icode != CODE_FOR_nothing)
2355 struct expand_operand ops[1];
2356 rtx_insn *last = get_last_insn ();
2357 tree orig_arg = arg;
2359 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2360 need to expand the argument again. This way, we will not perform
2361 side-effects more the once. */
2362 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2364 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2366 if (mode != GET_MODE (op0))
2367 op0 = convert_to_mode (mode, op0, 0);
2369 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2370 if (maybe_legitimize_operands (icode, 0, 1, ops)
2371 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2372 return ops[0].value;
2374 delete_insns_since (last);
2375 CALL_EXPR_ARG (exp, 0) = orig_arg;
2378 return NULL_RTX;
2381 /* Expand a call to the builtin sincos math function.
2382 Return NULL_RTX if a normal call should be emitted rather than expanding the
2383 function in-line. EXP is the expression that is a call to the builtin
2384 function. */
2386 static rtx
2387 expand_builtin_sincos (tree exp)
2389 rtx op0, op1, op2, target1, target2;
2390 machine_mode mode;
2391 tree arg, sinp, cosp;
2392 int result;
2393 location_t loc = EXPR_LOCATION (exp);
2394 tree alias_type, alias_off;
2396 if (!validate_arglist (exp, REAL_TYPE,
2397 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2398 return NULL_RTX;
2400 arg = CALL_EXPR_ARG (exp, 0);
2401 sinp = CALL_EXPR_ARG (exp, 1);
2402 cosp = CALL_EXPR_ARG (exp, 2);
2404 /* Make a suitable register to place result in. */
2405 mode = TYPE_MODE (TREE_TYPE (arg));
2407 /* Check if sincos insn is available, otherwise emit the call. */
2408 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2409 return NULL_RTX;
2411 target1 = gen_reg_rtx (mode);
2412 target2 = gen_reg_rtx (mode);
2414 op0 = expand_normal (arg);
2415 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2416 alias_off = build_int_cst (alias_type, 0);
2417 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2418 sinp, alias_off));
2419 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2420 cosp, alias_off));
2422 /* Compute into target1 and target2.
2423 Set TARGET to wherever the result comes back. */
2424 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2425 gcc_assert (result);
2427 /* Move target1 and target2 to the memory locations indicated
2428 by op1 and op2. */
2429 emit_move_insn (op1, target1);
2430 emit_move_insn (op2, target2);
2432 return const0_rtx;
2435 /* Expand a call to the internal cexpi builtin to the sincos math function.
2436 EXP is the expression that is a call to the builtin function; if convenient,
2437 the result should be placed in TARGET. */
2439 static rtx
2440 expand_builtin_cexpi (tree exp, rtx target)
2442 tree fndecl = get_callee_fndecl (exp);
2443 tree arg, type;
2444 machine_mode mode;
2445 rtx op0, op1, op2;
2446 location_t loc = EXPR_LOCATION (exp);
2448 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2449 return NULL_RTX;
2451 arg = CALL_EXPR_ARG (exp, 0);
2452 type = TREE_TYPE (arg);
2453 mode = TYPE_MODE (TREE_TYPE (arg));
2455 /* Try expanding via a sincos optab, fall back to emitting a libcall
2456 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2457 is only generated from sincos, cexp or if we have either of them. */
2458 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2460 op1 = gen_reg_rtx (mode);
2461 op2 = gen_reg_rtx (mode);
2463 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2465 /* Compute into op1 and op2. */
2466 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2468 else if (targetm.libc_has_function (function_sincos))
2470 tree call, fn = NULL_TREE;
2471 tree top1, top2;
2472 rtx op1a, op2a;
2474 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2475 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2476 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2477 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2478 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2479 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2480 else
2481 gcc_unreachable ();
2483 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2484 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2485 op1a = copy_addr_to_reg (XEXP (op1, 0));
2486 op2a = copy_addr_to_reg (XEXP (op2, 0));
2487 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2488 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2490 /* Make sure not to fold the sincos call again. */
2491 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2492 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2493 call, 3, arg, top1, top2));
2495 else
2497 tree call, fn = NULL_TREE, narg;
2498 tree ctype = build_complex_type (type);
2500 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2501 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2502 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2503 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2504 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2505 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2506 else
2507 gcc_unreachable ();
2509 /* If we don't have a decl for cexp create one. This is the
2510 friendliest fallback if the user calls __builtin_cexpi
2511 without full target C99 function support. */
2512 if (fn == NULL_TREE)
2514 tree fntype;
2515 const char *name = NULL;
2517 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2518 name = "cexpf";
2519 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2520 name = "cexp";
2521 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2522 name = "cexpl";
2524 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2525 fn = build_fn_decl (name, fntype);
2528 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2529 build_real (type, dconst0), arg);
2531 /* Make sure not to fold the cexp call again. */
2532 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2533 return expand_expr (build_call_nary (ctype, call, 1, narg),
2534 target, VOIDmode, EXPAND_NORMAL);
2537 /* Now build the proper return type. */
2538 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2539 make_tree (TREE_TYPE (arg), op2),
2540 make_tree (TREE_TYPE (arg), op1)),
2541 target, VOIDmode, EXPAND_NORMAL);
2544 /* Conveniently construct a function call expression. FNDECL names the
2545 function to be called, N is the number of arguments, and the "..."
2546 parameters are the argument expressions. Unlike build_call_exr
2547 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2549 static tree
2550 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2552 va_list ap;
2553 tree fntype = TREE_TYPE (fndecl);
2554 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2556 va_start (ap, n);
2557 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2558 va_end (ap);
2559 SET_EXPR_LOCATION (fn, loc);
2560 return fn;
2563 /* Expand a call to one of the builtin rounding functions gcc defines
2564 as an extension (lfloor and lceil). As these are gcc extensions we
2565 do not need to worry about setting errno to EDOM.
2566 If expanding via optab fails, lower expression to (int)(floor(x)).
2567 EXP is the expression that is a call to the builtin function;
2568 if convenient, the result should be placed in TARGET. */
2570 static rtx
2571 expand_builtin_int_roundingfn (tree exp, rtx target)
2573 convert_optab builtin_optab;
2574 rtx op0, tmp;
2575 rtx_insn *insns;
2576 tree fndecl = get_callee_fndecl (exp);
2577 enum built_in_function fallback_fn;
2578 tree fallback_fndecl;
2579 machine_mode mode;
2580 tree arg;
2582 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2583 gcc_unreachable ();
2585 arg = CALL_EXPR_ARG (exp, 0);
2587 switch (DECL_FUNCTION_CODE (fndecl))
2589 CASE_FLT_FN (BUILT_IN_ICEIL):
2590 CASE_FLT_FN (BUILT_IN_LCEIL):
2591 CASE_FLT_FN (BUILT_IN_LLCEIL):
2592 builtin_optab = lceil_optab;
2593 fallback_fn = BUILT_IN_CEIL;
2594 break;
2596 CASE_FLT_FN (BUILT_IN_IFLOOR):
2597 CASE_FLT_FN (BUILT_IN_LFLOOR):
2598 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2599 builtin_optab = lfloor_optab;
2600 fallback_fn = BUILT_IN_FLOOR;
2601 break;
2603 default:
2604 gcc_unreachable ();
2607 /* Make a suitable register to place result in. */
2608 mode = TYPE_MODE (TREE_TYPE (exp));
2610 target = gen_reg_rtx (mode);
2612 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2613 need to expand the argument again. This way, we will not perform
2614 side-effects more the once. */
2615 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2617 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2619 start_sequence ();
2621 /* Compute into TARGET. */
2622 if (expand_sfix_optab (target, op0, builtin_optab))
2624 /* Output the entire sequence. */
2625 insns = get_insns ();
2626 end_sequence ();
2627 emit_insn (insns);
2628 return target;
2631 /* If we were unable to expand via the builtin, stop the sequence
2632 (without outputting the insns). */
2633 end_sequence ();
2635 /* Fall back to floating point rounding optab. */
2636 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2638 /* For non-C99 targets we may end up without a fallback fndecl here
2639 if the user called __builtin_lfloor directly. In this case emit
2640 a call to the floor/ceil variants nevertheless. This should result
2641 in the best user experience for not full C99 targets. */
2642 if (fallback_fndecl == NULL_TREE)
2644 tree fntype;
2645 const char *name = NULL;
2647 switch (DECL_FUNCTION_CODE (fndecl))
2649 case BUILT_IN_ICEIL:
2650 case BUILT_IN_LCEIL:
2651 case BUILT_IN_LLCEIL:
2652 name = "ceil";
2653 break;
2654 case BUILT_IN_ICEILF:
2655 case BUILT_IN_LCEILF:
2656 case BUILT_IN_LLCEILF:
2657 name = "ceilf";
2658 break;
2659 case BUILT_IN_ICEILL:
2660 case BUILT_IN_LCEILL:
2661 case BUILT_IN_LLCEILL:
2662 name = "ceill";
2663 break;
2664 case BUILT_IN_IFLOOR:
2665 case BUILT_IN_LFLOOR:
2666 case BUILT_IN_LLFLOOR:
2667 name = "floor";
2668 break;
2669 case BUILT_IN_IFLOORF:
2670 case BUILT_IN_LFLOORF:
2671 case BUILT_IN_LLFLOORF:
2672 name = "floorf";
2673 break;
2674 case BUILT_IN_IFLOORL:
2675 case BUILT_IN_LFLOORL:
2676 case BUILT_IN_LLFLOORL:
2677 name = "floorl";
2678 break;
2679 default:
2680 gcc_unreachable ();
2683 fntype = build_function_type_list (TREE_TYPE (arg),
2684 TREE_TYPE (arg), NULL_TREE);
2685 fallback_fndecl = build_fn_decl (name, fntype);
2688 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2690 tmp = expand_normal (exp);
2691 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2693 /* Truncate the result of floating point optab to integer
2694 via expand_fix (). */
2695 target = gen_reg_rtx (mode);
2696 expand_fix (target, tmp, 0);
2698 return target;
2701 /* Expand a call to one of the builtin math functions doing integer
2702 conversion (lrint).
2703 Return 0 if a normal call should be emitted rather than expanding the
2704 function in-line. EXP is the expression that is a call to the builtin
2705 function; if convenient, the result should be placed in TARGET. */
2707 static rtx
2708 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2710 convert_optab builtin_optab;
2711 rtx op0;
2712 rtx_insn *insns;
2713 tree fndecl = get_callee_fndecl (exp);
2714 tree arg;
2715 machine_mode mode;
2716 enum built_in_function fallback_fn = BUILT_IN_NONE;
2718 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2719 gcc_unreachable ();
2721 arg = CALL_EXPR_ARG (exp, 0);
2723 switch (DECL_FUNCTION_CODE (fndecl))
2725 CASE_FLT_FN (BUILT_IN_IRINT):
2726 fallback_fn = BUILT_IN_LRINT;
2727 gcc_fallthrough ();
2728 CASE_FLT_FN (BUILT_IN_LRINT):
2729 CASE_FLT_FN (BUILT_IN_LLRINT):
2730 builtin_optab = lrint_optab;
2731 break;
2733 CASE_FLT_FN (BUILT_IN_IROUND):
2734 fallback_fn = BUILT_IN_LROUND;
2735 gcc_fallthrough ();
2736 CASE_FLT_FN (BUILT_IN_LROUND):
2737 CASE_FLT_FN (BUILT_IN_LLROUND):
2738 builtin_optab = lround_optab;
2739 break;
2741 default:
2742 gcc_unreachable ();
2745 /* There's no easy way to detect the case we need to set EDOM. */
2746 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2747 return NULL_RTX;
2749 /* Make a suitable register to place result in. */
2750 mode = TYPE_MODE (TREE_TYPE (exp));
2752 /* There's no easy way to detect the case we need to set EDOM. */
2753 if (!flag_errno_math)
2755 rtx result = gen_reg_rtx (mode);
2757 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2758 need to expand the argument again. This way, we will not perform
2759 side-effects more the once. */
2760 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2762 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2764 start_sequence ();
2766 if (expand_sfix_optab (result, op0, builtin_optab))
2768 /* Output the entire sequence. */
2769 insns = get_insns ();
2770 end_sequence ();
2771 emit_insn (insns);
2772 return result;
2775 /* If we were unable to expand via the builtin, stop the sequence
2776 (without outputting the insns) and call to the library function
2777 with the stabilized argument list. */
2778 end_sequence ();
2781 if (fallback_fn != BUILT_IN_NONE)
2783 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2784 targets, (int) round (x) should never be transformed into
2785 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2786 a call to lround in the hope that the target provides at least some
2787 C99 functions. This should result in the best user experience for
2788 not full C99 targets. */
2789 tree fallback_fndecl = mathfn_built_in_1
2790 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2792 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2793 fallback_fndecl, 1, arg);
2795 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2796 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2797 return convert_to_mode (mode, target, 0);
2800 return expand_call (exp, target, target == const0_rtx);
2803 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2804 a normal call should be emitted rather than expanding the function
2805 in-line. EXP is the expression that is a call to the builtin
2806 function; if convenient, the result should be placed in TARGET. */
2808 static rtx
2809 expand_builtin_powi (tree exp, rtx target)
2811 tree arg0, arg1;
2812 rtx op0, op1;
2813 machine_mode mode;
2814 machine_mode mode2;
2816 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2817 return NULL_RTX;
2819 arg0 = CALL_EXPR_ARG (exp, 0);
2820 arg1 = CALL_EXPR_ARG (exp, 1);
2821 mode = TYPE_MODE (TREE_TYPE (exp));
2823 /* Emit a libcall to libgcc. */
2825 /* Mode of the 2nd argument must match that of an int. */
2826 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2828 if (target == NULL_RTX)
2829 target = gen_reg_rtx (mode);
2831 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2832 if (GET_MODE (op0) != mode)
2833 op0 = convert_to_mode (mode, op0, 0);
2834 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2835 if (GET_MODE (op1) != mode2)
2836 op1 = convert_to_mode (mode2, op1, 0);
2838 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2839 target, LCT_CONST, mode,
2840 op0, mode, op1, mode2);
2842 return target;
2845 /* Expand expression EXP which is a call to the strlen builtin. Return
2846 NULL_RTX if we failed and the caller should emit a normal call, otherwise
2847 try to get the result in TARGET, if convenient. */
2849 static rtx
2850 expand_builtin_strlen (tree exp, rtx target,
2851 machine_mode target_mode)
2853 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2854 return NULL_RTX;
2856 struct expand_operand ops[4];
2857 rtx pat;
2858 tree len;
2859 tree src = CALL_EXPR_ARG (exp, 0);
2860 rtx src_reg;
2861 rtx_insn *before_strlen;
2862 machine_mode insn_mode;
2863 enum insn_code icode = CODE_FOR_nothing;
2864 unsigned int align;
2866 /* If the length can be computed at compile-time, return it. */
2867 len = c_strlen (src, 0);
2868 if (len)
2869 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2871 /* If the length can be computed at compile-time and is constant
2872 integer, but there are side-effects in src, evaluate
2873 src for side-effects, then return len.
2874 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2875 can be optimized into: i++; x = 3; */
2876 len = c_strlen (src, 1);
2877 if (len && TREE_CODE (len) == INTEGER_CST)
2879 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2880 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2883 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2885 /* If SRC is not a pointer type, don't do this operation inline. */
2886 if (align == 0)
2887 return NULL_RTX;
2889 /* Bail out if we can't compute strlen in the right mode. */
2890 FOR_EACH_MODE_FROM (insn_mode, target_mode)
2892 icode = optab_handler (strlen_optab, insn_mode);
2893 if (icode != CODE_FOR_nothing)
2894 break;
2896 if (insn_mode == VOIDmode)
2897 return NULL_RTX;
2899 /* Make a place to hold the source address. We will not expand
2900 the actual source until we are sure that the expansion will
2901 not fail -- there are trees that cannot be expanded twice. */
2902 src_reg = gen_reg_rtx (Pmode);
2904 /* Mark the beginning of the strlen sequence so we can emit the
2905 source operand later. */
2906 before_strlen = get_last_insn ();
2908 create_output_operand (&ops[0], target, insn_mode);
2909 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2910 create_integer_operand (&ops[2], 0);
2911 create_integer_operand (&ops[3], align);
2912 if (!maybe_expand_insn (icode, 4, ops))
2913 return NULL_RTX;
2915 /* Check to see if the argument was declared attribute nonstring
2916 and if so, issue a warning since at this point it's not known
2917 to be nul-terminated. */
2918 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
2920 /* Now that we are assured of success, expand the source. */
2921 start_sequence ();
2922 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2923 if (pat != src_reg)
2925 #ifdef POINTERS_EXTEND_UNSIGNED
2926 if (GET_MODE (pat) != Pmode)
2927 pat = convert_to_mode (Pmode, pat,
2928 POINTERS_EXTEND_UNSIGNED);
2929 #endif
2930 emit_move_insn (src_reg, pat);
2932 pat = get_insns ();
2933 end_sequence ();
2935 if (before_strlen)
2936 emit_insn_after (pat, before_strlen);
2937 else
2938 emit_insn_before (pat, get_insns ());
2940 /* Return the value in the proper mode for this function. */
2941 if (GET_MODE (ops[0].value) == target_mode)
2942 target = ops[0].value;
2943 else if (target != 0)
2944 convert_move (target, ops[0].value, 0);
2945 else
2946 target = convert_to_mode (target_mode, ops[0].value, 0);
2948 return target;
2951 /* Expand call EXP to the strnlen built-in, returning the result
2952 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
2954 static rtx
2955 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
2957 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2958 return NULL_RTX;
2960 tree src = CALL_EXPR_ARG (exp, 0);
2961 tree bound = CALL_EXPR_ARG (exp, 1);
2963 if (!bound)
2964 return NULL_RTX;
2966 location_t loc = UNKNOWN_LOCATION;
2967 if (EXPR_HAS_LOCATION (exp))
2968 loc = EXPR_LOCATION (exp);
2970 tree maxobjsize = max_object_size ();
2971 tree func = get_callee_fndecl (exp);
2973 tree len = c_strlen (src, 0);
2975 if (TREE_CODE (bound) == INTEGER_CST)
2977 if (!TREE_NO_WARNING (exp)
2978 && tree_int_cst_lt (maxobjsize, bound)
2979 && warning_at (loc, OPT_Wstringop_overflow_,
2980 "%K%qD specified bound %E "
2981 "exceeds maximum object size %E",
2982 exp, func, bound, maxobjsize))
2983 TREE_NO_WARNING (exp) = true;
2985 if (!len || TREE_CODE (len) != INTEGER_CST)
2986 return NULL_RTX;
2988 len = fold_convert_loc (loc, size_type_node, len);
2989 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
2990 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2993 if (TREE_CODE (bound) != SSA_NAME)
2994 return NULL_RTX;
2996 wide_int min, max;
2997 enum value_range_type rng = get_range_info (bound, &min, &max);
2998 if (rng != VR_RANGE)
2999 return NULL_RTX;
3001 if (!TREE_NO_WARNING (exp)
3002 && wi::ltu_p (wi::to_wide (maxobjsize), min)
3003 && warning_at (loc, OPT_Wstringop_overflow_,
3004 "%K%qD specified bound [%wu, %wu] "
3005 "exceeds maximum object size %E",
3006 exp, func, min.to_uhwi (), max.to_uhwi (), maxobjsize))
3007 TREE_NO_WARNING (exp) = true;
3009 if (!len || TREE_CODE (len) != INTEGER_CST)
3010 return NULL_RTX;
3012 if (wi::gtu_p (min, wi::to_wide (len)))
3013 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3015 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3016 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3019 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3020 bytes from constant string DATA + OFFSET and return it as target
3021 constant. */
3023 static rtx
3024 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3025 scalar_int_mode mode)
3027 const char *str = (const char *) data;
3029 gcc_assert (offset >= 0
3030 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3031 <= strlen (str) + 1));
3033 return c_readstr (str + offset, mode);
3036 /* LEN specify length of the block of memcpy/memset operation.
3037 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3038 In some cases we can make very likely guess on max size, then we
3039 set it into PROBABLE_MAX_SIZE. */
3041 static void
3042 determine_block_size (tree len, rtx len_rtx,
3043 unsigned HOST_WIDE_INT *min_size,
3044 unsigned HOST_WIDE_INT *max_size,
3045 unsigned HOST_WIDE_INT *probable_max_size)
3047 if (CONST_INT_P (len_rtx))
3049 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3050 return;
3052 else
3054 wide_int min, max;
3055 enum value_range_type range_type = VR_UNDEFINED;
3057 /* Determine bounds from the type. */
3058 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3059 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3060 else
3061 *min_size = 0;
3062 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3063 *probable_max_size = *max_size
3064 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3065 else
3066 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3068 if (TREE_CODE (len) == SSA_NAME)
3069 range_type = get_range_info (len, &min, &max);
3070 if (range_type == VR_RANGE)
3072 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3073 *min_size = min.to_uhwi ();
3074 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3075 *probable_max_size = *max_size = max.to_uhwi ();
3077 else if (range_type == VR_ANTI_RANGE)
3079 /* Anti range 0...N lets us to determine minimal size to N+1. */
3080 if (min == 0)
3082 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3083 *min_size = max.to_uhwi () + 1;
3085 /* Code like
3087 int n;
3088 if (n < 100)
3089 memcpy (a, b, n)
3091 Produce anti range allowing negative values of N. We still
3092 can use the information and make a guess that N is not negative.
3094 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3095 *probable_max_size = min.to_uhwi () - 1;
3098 gcc_checking_assert (*max_size <=
3099 (unsigned HOST_WIDE_INT)
3100 GET_MODE_MASK (GET_MODE (len_rtx)));
3103 /* Try to verify that the sizes and lengths of the arguments to a string
3104 manipulation function given by EXP are within valid bounds and that
3105 the operation does not lead to buffer overflow or read past the end.
3106 Arguments other than EXP may be null. When non-null, the arguments
3107 have the following meaning:
3108 DST is the destination of a copy call or NULL otherwise.
3109 SRC is the source of a copy call or NULL otherwise.
3110 DSTWRITE is the number of bytes written into the destination obtained
3111 from the user-supplied size argument to the function (such as in
3112 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3113 MAXREAD is the user-supplied bound on the length of the source sequence
3114 (such as in strncat(d, s, N). It specifies the upper limit on the number
3115 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3116 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3117 expression EXP is a string function call (as opposed to a memory call
3118 like memcpy). As an exception, SRCSTR can also be an integer denoting
3119 the precomputed size of the source string or object (for functions like
3120 memcpy).
3121 DSTSIZE is the size of the destination object specified by the last
3122 argument to the _chk builtins, typically resulting from the expansion
3123 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3124 DSTSIZE).
3126 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3127 SIZE_MAX.
3129 If the call is successfully verified as safe return true, otherwise
3130 return false. */
3132 static bool
3133 check_access (tree exp, tree, tree, tree dstwrite,
3134 tree maxread, tree srcstr, tree dstsize)
3136 int opt = OPT_Wstringop_overflow_;
3138 /* The size of the largest object is half the address space, or
3139 PTRDIFF_MAX. (This is way too permissive.) */
3140 tree maxobjsize = max_object_size ();
3142 /* Either the length of the source string for string functions or
3143 the size of the source object for raw memory functions. */
3144 tree slen = NULL_TREE;
3146 tree range[2] = { NULL_TREE, NULL_TREE };
3148 /* Set to true when the exact number of bytes written by a string
3149 function like strcpy is not known and the only thing that is
3150 known is that it must be at least one (for the terminating nul). */
3151 bool at_least_one = false;
3152 if (srcstr)
3154 /* SRCSTR is normally a pointer to string but as a special case
3155 it can be an integer denoting the length of a string. */
3156 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
3158 /* Try to determine the range of lengths the source string
3159 refers to. If it can be determined and is less than
3160 the upper bound given by MAXREAD add one to it for
3161 the terminating nul. Otherwise, set it to one for
3162 the same reason, or to MAXREAD as appropriate. */
3163 get_range_strlen (srcstr, range);
3164 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
3166 if (maxread && tree_int_cst_le (maxread, range[0]))
3167 range[0] = range[1] = maxread;
3168 else
3169 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3170 range[0], size_one_node);
3172 if (maxread && tree_int_cst_le (maxread, range[1]))
3173 range[1] = maxread;
3174 else if (!integer_all_onesp (range[1]))
3175 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3176 range[1], size_one_node);
3178 slen = range[0];
3180 else
3182 at_least_one = true;
3183 slen = size_one_node;
3186 else
3187 slen = srcstr;
3190 if (!dstwrite && !maxread)
3192 /* When the only available piece of data is the object size
3193 there is nothing to do. */
3194 if (!slen)
3195 return true;
3197 /* Otherwise, when the length of the source sequence is known
3198 (as with strlen), set DSTWRITE to it. */
3199 if (!range[0])
3200 dstwrite = slen;
3203 if (!dstsize)
3204 dstsize = maxobjsize;
3206 if (dstwrite)
3207 get_size_range (dstwrite, range);
3209 tree func = get_callee_fndecl (exp);
3211 /* First check the number of bytes to be written against the maximum
3212 object size. */
3213 if (range[0]
3214 && TREE_CODE (range[0]) == INTEGER_CST
3215 && tree_int_cst_lt (maxobjsize, range[0]))
3217 if (TREE_NO_WARNING (exp))
3218 return false;
3220 location_t loc = tree_nonartificial_location (exp);
3221 loc = expansion_point_location_if_in_system_header (loc);
3223 bool warned;
3224 if (range[0] == range[1])
3225 warned = warning_at (loc, opt,
3226 "%K%qD specified size %E "
3227 "exceeds maximum object size %E",
3228 exp, func, range[0], maxobjsize);
3229 else
3230 warned = warning_at (loc, opt,
3231 "%K%qD specified size between %E and %E "
3232 "exceeds maximum object size %E",
3233 exp, func,
3234 range[0], range[1], maxobjsize);
3235 if (warned)
3236 TREE_NO_WARNING (exp) = true;
3238 return false;
3241 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3242 constant, and in range of unsigned HOST_WIDE_INT. */
3243 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3245 /* Next check the number of bytes to be written against the destination
3246 object size. */
3247 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
3249 if (range[0]
3250 && TREE_CODE (range[0]) == INTEGER_CST
3251 && ((tree_fits_uhwi_p (dstsize)
3252 && tree_int_cst_lt (dstsize, range[0]))
3253 || (dstwrite
3254 && tree_fits_uhwi_p (dstwrite)
3255 && tree_int_cst_lt (dstwrite, range[0]))))
3257 if (TREE_NO_WARNING (exp))
3258 return false;
3260 location_t loc = tree_nonartificial_location (exp);
3261 loc = expansion_point_location_if_in_system_header (loc);
3263 if (dstwrite == slen && at_least_one)
3265 /* This is a call to strcpy with a destination of 0 size
3266 and a source of unknown length. The call will write
3267 at least one byte past the end of the destination. */
3268 warning_at (loc, opt,
3269 "%K%qD writing %E or more bytes into a region "
3270 "of size %E overflows the destination",
3271 exp, func, range[0], dstsize);
3273 else if (tree_int_cst_equal (range[0], range[1]))
3274 warning_n (loc, opt, tree_to_uhwi (range[0]),
3275 "%K%qD writing %E byte into a region "
3276 "of size %E overflows the destination",
3277 "%K%qD writing %E bytes into a region "
3278 "of size %E overflows the destination",
3279 exp, func, range[0], dstsize);
3280 else if (tree_int_cst_sign_bit (range[1]))
3282 /* Avoid printing the upper bound if it's invalid. */
3283 warning_at (loc, opt,
3284 "%K%qD writing %E or more bytes into a region "
3285 "of size %E overflows the destination",
3286 exp, func, range[0], dstsize);
3288 else
3289 warning_at (loc, opt,
3290 "%K%qD writing between %E and %E bytes into "
3291 "a region of size %E overflows the destination",
3292 exp, func, range[0], range[1],
3293 dstsize);
3295 /* Return error when an overflow has been detected. */
3296 return false;
3300 /* Check the maximum length of the source sequence against the size
3301 of the destination object if known, or against the maximum size
3302 of an object. */
3303 if (maxread)
3305 get_size_range (maxread, range);
3307 /* Use the lower end for MAXREAD from now on. */
3308 if (range[0])
3309 maxread = range[0];
3311 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
3313 location_t loc = tree_nonartificial_location (exp);
3314 loc = expansion_point_location_if_in_system_header (loc);
3316 if (tree_int_cst_lt (maxobjsize, range[0]))
3318 if (TREE_NO_WARNING (exp))
3319 return false;
3321 /* Warn about crazy big sizes first since that's more
3322 likely to be meaningful than saying that the bound
3323 is greater than the object size if both are big. */
3324 if (range[0] == range[1])
3325 warning_at (loc, opt,
3326 "%K%qD specified bound %E "
3327 "exceeds maximum object size %E",
3328 exp, func,
3329 range[0], maxobjsize);
3330 else
3331 warning_at (loc, opt,
3332 "%K%qD specified bound between %E and %E "
3333 "exceeds maximum object size %E",
3334 exp, func,
3335 range[0], range[1], maxobjsize);
3337 return false;
3340 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
3342 if (TREE_NO_WARNING (exp))
3343 return false;
3345 if (tree_int_cst_equal (range[0], range[1]))
3346 warning_at (loc, opt,
3347 "%K%qD specified bound %E "
3348 "exceeds destination size %E",
3349 exp, func,
3350 range[0], dstsize);
3351 else
3352 warning_at (loc, opt,
3353 "%K%qD specified bound between %E and %E "
3354 "exceeds destination size %E",
3355 exp, func,
3356 range[0], range[1], dstsize);
3357 return false;
3362 /* Check for reading past the end of SRC. */
3363 if (slen
3364 && slen == srcstr
3365 && dstwrite && range[0]
3366 && tree_int_cst_lt (slen, range[0]))
3368 if (TREE_NO_WARNING (exp))
3369 return false;
3371 location_t loc = tree_nonartificial_location (exp);
3373 if (tree_int_cst_equal (range[0], range[1]))
3374 warning_n (loc, opt, tree_to_uhwi (range[0]),
3375 "%K%qD reading %E byte from a region of size %E",
3376 "%K%qD reading %E bytes from a region of size %E",
3377 exp, func, range[0], slen);
3378 else if (tree_int_cst_sign_bit (range[1]))
3380 /* Avoid printing the upper bound if it's invalid. */
3381 warning_at (loc, opt,
3382 "%K%qD reading %E or more bytes from a region "
3383 "of size %E",
3384 exp, func, range[0], slen);
3386 else
3387 warning_at (loc, opt,
3388 "%K%qD reading between %E and %E bytes from a region "
3389 "of size %E",
3390 exp, func, range[0], range[1], slen);
3391 return false;
3394 return true;
3397 /* Helper to compute the size of the object referenced by the DEST
3398 expression which must have pointer type, using Object Size type
3399 OSTYPE (only the least significant 2 bits are used). Return
3400 an estimate of the size of the object if successful or NULL when
3401 the size cannot be determined. When the referenced object involves
3402 a non-constant offset in some range the returned value represents
3403 the largest size given the smallest non-negative offset in the
3404 range. The function is intended for diagnostics and should not
3405 be used to influence code generation or optimization. */
3407 tree
3408 compute_objsize (tree dest, int ostype)
3410 unsigned HOST_WIDE_INT size;
3412 /* Only the two least significant bits are meaningful. */
3413 ostype &= 3;
3415 if (compute_builtin_object_size (dest, ostype, &size))
3416 return build_int_cst (sizetype, size);
3418 if (TREE_CODE (dest) == SSA_NAME)
3420 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3421 if (!is_gimple_assign (stmt))
3422 return NULL_TREE;
3424 dest = gimple_assign_rhs1 (stmt);
3426 tree_code code = gimple_assign_rhs_code (stmt);
3427 if (code == POINTER_PLUS_EXPR)
3429 /* compute_builtin_object_size fails for addresses with
3430 non-constant offsets. Try to determine the range of
3431 such an offset here and use it to adjust the constant
3432 size. */
3433 tree off = gimple_assign_rhs2 (stmt);
3434 if (TREE_CODE (off) == INTEGER_CST)
3436 if (tree size = compute_objsize (dest, ostype))
3438 wide_int wioff = wi::to_wide (off);
3439 wide_int wisiz = wi::to_wide (size);
3441 /* Ignore negative offsets for now. For others,
3442 use the lower bound as the most optimistic
3443 estimate of the (remaining) size. */
3444 if (wi::sign_mask (wioff))
3446 else if (wi::ltu_p (wioff, wisiz))
3447 return wide_int_to_tree (TREE_TYPE (size),
3448 wi::sub (wisiz, wioff));
3449 else
3450 return size_zero_node;
3453 else if (TREE_CODE (off) == SSA_NAME
3454 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3456 wide_int min, max;
3457 enum value_range_type rng = get_range_info (off, &min, &max);
3459 if (rng == VR_RANGE)
3461 if (tree size = compute_objsize (dest, ostype))
3463 wide_int wisiz = wi::to_wide (size);
3465 /* Ignore negative offsets for now. For others,
3466 use the lower bound as the most optimistic
3467 estimate of the (remaining)size. */
3468 if (wi::sign_mask (min))
3470 else if (wi::ltu_p (min, wisiz))
3471 return wide_int_to_tree (TREE_TYPE (size),
3472 wi::sub (wisiz, min));
3473 else
3474 return size_zero_node;
3479 else if (code != ADDR_EXPR)
3480 return NULL_TREE;
3483 /* Unless computing the largest size (for memcpy and other raw memory
3484 functions), try to determine the size of the object from its type. */
3485 if (!ostype)
3486 return NULL_TREE;
3488 if (TREE_CODE (dest) != ADDR_EXPR)
3489 return NULL_TREE;
3491 tree type = TREE_TYPE (dest);
3492 if (TREE_CODE (type) == POINTER_TYPE)
3493 type = TREE_TYPE (type);
3495 type = TYPE_MAIN_VARIANT (type);
3497 if (TREE_CODE (type) == ARRAY_TYPE
3498 && !array_at_struct_end_p (TREE_OPERAND (dest, 0)))
3500 /* Return the constant size unless it's zero (that's a zero-length
3501 array likely at the end of a struct). */
3502 tree size = TYPE_SIZE_UNIT (type);
3503 if (size && TREE_CODE (size) == INTEGER_CST
3504 && !integer_zerop (size))
3505 return size;
3508 return NULL_TREE;
3511 /* Helper to determine and check the sizes of the source and the destination
3512 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3513 call expression, DEST is the destination argument, SRC is the source
3514 argument or null, and LEN is the number of bytes. Use Object Size type-0
3515 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3516 (no overflow or invalid sizes), false otherwise. */
3518 static bool
3519 check_memop_access (tree exp, tree dest, tree src, tree size)
3521 /* For functions like memset and memcpy that operate on raw memory
3522 try to determine the size of the largest source and destination
3523 object using type-0 Object Size regardless of the object size
3524 type specified by the option. */
3525 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3526 tree dstsize = compute_objsize (dest, 0);
3528 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
3529 srcsize, dstsize);
3532 /* Validate memchr arguments without performing any expansion.
3533 Return NULL_RTX. */
3535 static rtx
3536 expand_builtin_memchr (tree exp, rtx)
3538 if (!validate_arglist (exp,
3539 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3540 return NULL_RTX;
3542 tree arg1 = CALL_EXPR_ARG (exp, 0);
3543 tree len = CALL_EXPR_ARG (exp, 2);
3545 /* Diagnose calls where the specified length exceeds the size
3546 of the object. */
3547 if (warn_stringop_overflow)
3549 tree size = compute_objsize (arg1, 0);
3550 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
3551 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
3554 return NULL_RTX;
3557 /* Expand a call EXP to the memcpy builtin.
3558 Return NULL_RTX if we failed, the caller should emit a normal call,
3559 otherwise try to get the result in TARGET, if convenient (and in
3560 mode MODE if that's convenient). */
3562 static rtx
3563 expand_builtin_memcpy (tree exp, rtx target)
3565 if (!validate_arglist (exp,
3566 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3567 return NULL_RTX;
3569 tree dest = CALL_EXPR_ARG (exp, 0);
3570 tree src = CALL_EXPR_ARG (exp, 1);
3571 tree len = CALL_EXPR_ARG (exp, 2);
3573 check_memop_access (exp, dest, src, len);
3575 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3576 /*endp=*/ 0);
3579 /* Check a call EXP to the memmove built-in for validity.
3580 Return NULL_RTX on both success and failure. */
3582 static rtx
3583 expand_builtin_memmove (tree exp, rtx)
3585 if (!validate_arglist (exp,
3586 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3587 return NULL_RTX;
3589 tree dest = CALL_EXPR_ARG (exp, 0);
3590 tree src = CALL_EXPR_ARG (exp, 1);
3591 tree len = CALL_EXPR_ARG (exp, 2);
3593 check_memop_access (exp, dest, src, len);
3595 return NULL_RTX;
3598 /* Expand a call EXP to the mempcpy builtin.
3599 Return NULL_RTX if we failed; the caller should emit a normal call,
3600 otherwise try to get the result in TARGET, if convenient (and in
3601 mode MODE if that's convenient). If ENDP is 0 return the
3602 destination pointer, if ENDP is 1 return the end pointer ala
3603 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3604 stpcpy. */
3606 static rtx
3607 expand_builtin_mempcpy (tree exp, rtx target)
3609 if (!validate_arglist (exp,
3610 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3611 return NULL_RTX;
3613 tree dest = CALL_EXPR_ARG (exp, 0);
3614 tree src = CALL_EXPR_ARG (exp, 1);
3615 tree len = CALL_EXPR_ARG (exp, 2);
3617 /* Policy does not generally allow using compute_objsize (which
3618 is used internally by check_memop_size) to change code generation
3619 or drive optimization decisions.
3621 In this instance it is safe because the code we generate has
3622 the same semantics regardless of the return value of
3623 check_memop_sizes. Exactly the same amount of data is copied
3624 and the return value is exactly the same in both cases.
3626 Furthermore, check_memop_size always uses mode 0 for the call to
3627 compute_objsize, so the imprecise nature of compute_objsize is
3628 avoided. */
3630 /* Avoid expanding mempcpy into memcpy when the call is determined
3631 to overflow the buffer. This also prevents the same overflow
3632 from being diagnosed again when expanding memcpy. */
3633 if (!check_memop_access (exp, dest, src, len))
3634 return NULL_RTX;
3636 return expand_builtin_mempcpy_args (dest, src, len,
3637 target, exp, /*endp=*/ 1);
3640 /* Helper function to do the actual work for expand of memory copy family
3641 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3642 of memory from SRC to DEST and assign to TARGET if convenient.
3643 If ENDP is 0 return the
3644 destination pointer, if ENDP is 1 return the end pointer ala
3645 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3646 stpcpy. */
3648 static rtx
3649 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3650 rtx target, tree exp, int endp)
3652 const char *src_str;
3653 unsigned int src_align = get_pointer_alignment (src);
3654 unsigned int dest_align = get_pointer_alignment (dest);
3655 rtx dest_mem, src_mem, dest_addr, len_rtx;
3656 HOST_WIDE_INT expected_size = -1;
3657 unsigned int expected_align = 0;
3658 unsigned HOST_WIDE_INT min_size;
3659 unsigned HOST_WIDE_INT max_size;
3660 unsigned HOST_WIDE_INT probable_max_size;
3662 /* If DEST is not a pointer type, call the normal function. */
3663 if (dest_align == 0)
3664 return NULL_RTX;
3666 /* If either SRC is not a pointer type, don't do this
3667 operation in-line. */
3668 if (src_align == 0)
3669 return NULL_RTX;
3671 if (currently_expanding_gimple_stmt)
3672 stringop_block_profile (currently_expanding_gimple_stmt,
3673 &expected_align, &expected_size);
3675 if (expected_align < dest_align)
3676 expected_align = dest_align;
3677 dest_mem = get_memory_rtx (dest, len);
3678 set_mem_align (dest_mem, dest_align);
3679 len_rtx = expand_normal (len);
3680 determine_block_size (len, len_rtx, &min_size, &max_size,
3681 &probable_max_size);
3682 src_str = c_getstr (src);
3684 /* If SRC is a string constant and block move would be done
3685 by pieces, we can avoid loading the string from memory
3686 and only stored the computed constants. */
3687 if (src_str
3688 && CONST_INT_P (len_rtx)
3689 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3690 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3691 CONST_CAST (char *, src_str),
3692 dest_align, false))
3694 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3695 builtin_memcpy_read_str,
3696 CONST_CAST (char *, src_str),
3697 dest_align, false, endp);
3698 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3699 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3700 return dest_mem;
3703 src_mem = get_memory_rtx (src, len);
3704 set_mem_align (src_mem, src_align);
3706 /* Copy word part most expediently. */
3707 enum block_op_methods method = BLOCK_OP_NORMAL;
3708 if (CALL_EXPR_TAILCALL (exp) && (endp == 0 || target == const0_rtx))
3709 method = BLOCK_OP_TAILCALL;
3710 if (endp == 1 && target != const0_rtx)
3711 method = BLOCK_OP_NO_LIBCALL_RET;
3712 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3713 expected_align, expected_size,
3714 min_size, max_size, probable_max_size);
3715 if (dest_addr == pc_rtx)
3716 return NULL_RTX;
3718 if (dest_addr == 0)
3720 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3721 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3724 if (endp && target != const0_rtx)
3726 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3727 /* stpcpy pointer to last byte. */
3728 if (endp == 2)
3729 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3732 return dest_addr;
3735 static rtx
3736 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3737 rtx target, tree orig_exp, int endp)
3739 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3740 endp);
3743 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3744 we failed, the caller should emit a normal call, otherwise try to
3745 get the result in TARGET, if convenient. If ENDP is 0 return the
3746 destination pointer, if ENDP is 1 return the end pointer ala
3747 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3748 stpcpy. */
3750 static rtx
3751 expand_movstr (tree dest, tree src, rtx target, int endp)
3753 struct expand_operand ops[3];
3754 rtx dest_mem;
3755 rtx src_mem;
3757 if (!targetm.have_movstr ())
3758 return NULL_RTX;
3760 dest_mem = get_memory_rtx (dest, NULL);
3761 src_mem = get_memory_rtx (src, NULL);
3762 if (!endp)
3764 target = force_reg (Pmode, XEXP (dest_mem, 0));
3765 dest_mem = replace_equiv_address (dest_mem, target);
3768 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3769 create_fixed_operand (&ops[1], dest_mem);
3770 create_fixed_operand (&ops[2], src_mem);
3771 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3772 return NULL_RTX;
3774 if (endp && target != const0_rtx)
3776 target = ops[0].value;
3777 /* movstr is supposed to set end to the address of the NUL
3778 terminator. If the caller requested a mempcpy-like return value,
3779 adjust it. */
3780 if (endp == 1)
3782 rtx tem = plus_constant (GET_MODE (target),
3783 gen_lowpart (GET_MODE (target), target), 1);
3784 emit_move_insn (target, force_operand (tem, NULL_RTX));
3787 return target;
3790 /* Do some very basic size validation of a call to the strcpy builtin
3791 given by EXP. Return NULL_RTX to have the built-in expand to a call
3792 to the library function. */
3794 static rtx
3795 expand_builtin_strcat (tree exp, rtx)
3797 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3798 || !warn_stringop_overflow)
3799 return NULL_RTX;
3801 tree dest = CALL_EXPR_ARG (exp, 0);
3802 tree src = CALL_EXPR_ARG (exp, 1);
3804 /* There is no way here to determine the length of the string in
3805 the destination to which the SRC string is being appended so
3806 just diagnose cases when the souce string is longer than
3807 the destination object. */
3809 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3811 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
3812 destsize);
3814 return NULL_RTX;
3817 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3818 NULL_RTX if we failed the caller should emit a normal call, otherwise
3819 try to get the result in TARGET, if convenient (and in mode MODE if that's
3820 convenient). */
3822 static rtx
3823 expand_builtin_strcpy (tree exp, rtx target)
3825 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3826 return NULL_RTX;
3828 tree dest = CALL_EXPR_ARG (exp, 0);
3829 tree src = CALL_EXPR_ARG (exp, 1);
3831 if (warn_stringop_overflow)
3833 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3834 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3835 src, destsize);
3838 if (rtx ret = expand_builtin_strcpy_args (dest, src, target))
3840 /* Check to see if the argument was declared attribute nonstring
3841 and if so, issue a warning since at this point it's not known
3842 to be nul-terminated. */
3843 tree fndecl = get_callee_fndecl (exp);
3844 maybe_warn_nonstring_arg (fndecl, exp);
3845 return ret;
3848 return NULL_RTX;
3851 /* Helper function to do the actual work for expand_builtin_strcpy. The
3852 arguments to the builtin_strcpy call DEST and SRC are broken out
3853 so that this can also be called without constructing an actual CALL_EXPR.
3854 The other arguments and return value are the same as for
3855 expand_builtin_strcpy. */
3857 static rtx
3858 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3860 return expand_movstr (dest, src, target, /*endp=*/0);
3863 /* Expand a call EXP to the stpcpy builtin.
3864 Return NULL_RTX if we failed the caller should emit a normal call,
3865 otherwise try to get the result in TARGET, if convenient (and in
3866 mode MODE if that's convenient). */
3868 static rtx
3869 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
3871 tree dst, src;
3872 location_t loc = EXPR_LOCATION (exp);
3874 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3875 return NULL_RTX;
3877 dst = CALL_EXPR_ARG (exp, 0);
3878 src = CALL_EXPR_ARG (exp, 1);
3880 if (warn_stringop_overflow)
3882 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
3883 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3884 src, destsize);
3887 /* If return value is ignored, transform stpcpy into strcpy. */
3888 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3890 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3891 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3892 return expand_expr (result, target, mode, EXPAND_NORMAL);
3894 else
3896 tree len, lenp1;
3897 rtx ret;
3899 /* Ensure we get an actual string whose length can be evaluated at
3900 compile-time, not an expression containing a string. This is
3901 because the latter will potentially produce pessimized code
3902 when used to produce the return value. */
3903 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3904 return expand_movstr (dst, src, target, /*endp=*/2);
3906 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3907 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3908 target, exp, /*endp=*/2);
3910 if (ret)
3911 return ret;
3913 if (TREE_CODE (len) == INTEGER_CST)
3915 rtx len_rtx = expand_normal (len);
3917 if (CONST_INT_P (len_rtx))
3919 ret = expand_builtin_strcpy_args (dst, src, target);
3921 if (ret)
3923 if (! target)
3925 if (mode != VOIDmode)
3926 target = gen_reg_rtx (mode);
3927 else
3928 target = gen_reg_rtx (GET_MODE (ret));
3930 if (GET_MODE (target) != GET_MODE (ret))
3931 ret = gen_lowpart (GET_MODE (target), ret);
3933 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3934 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3935 gcc_assert (ret);
3937 return target;
3942 return expand_movstr (dst, src, target, /*endp=*/2);
3946 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
3947 arguments while being careful to avoid duplicate warnings (which could
3948 be issued if the expander were to expand the call, resulting in it
3949 being emitted in expand_call(). */
3951 static rtx
3952 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3954 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
3956 /* The call has been successfully expanded. Check for nonstring
3957 arguments and issue warnings as appropriate. */
3958 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3959 return ret;
3962 return NULL_RTX;
3965 /* Check a call EXP to the stpncpy built-in for validity.
3966 Return NULL_RTX on both success and failure. */
3968 static rtx
3969 expand_builtin_stpncpy (tree exp, rtx)
3971 if (!validate_arglist (exp,
3972 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3973 || !warn_stringop_overflow)
3974 return NULL_RTX;
3976 /* The source and destination of the call. */
3977 tree dest = CALL_EXPR_ARG (exp, 0);
3978 tree src = CALL_EXPR_ARG (exp, 1);
3980 /* The exact number of bytes to write (not the maximum). */
3981 tree len = CALL_EXPR_ARG (exp, 2);
3983 /* The size of the destination object. */
3984 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3986 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
3988 return NULL_RTX;
3991 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3992 bytes from constant string DATA + OFFSET and return it as target
3993 constant. */
3996 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3997 scalar_int_mode mode)
3999 const char *str = (const char *) data;
4001 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
4002 return const0_rtx;
4004 return c_readstr (str + offset, mode);
4007 /* Helper to check the sizes of sequences and the destination of calls
4008 to __builtin_strncat and __builtin___strncat_chk. Returns true on
4009 success (no overflow or invalid sizes), false otherwise. */
4011 static bool
4012 check_strncat_sizes (tree exp, tree objsize)
4014 tree dest = CALL_EXPR_ARG (exp, 0);
4015 tree src = CALL_EXPR_ARG (exp, 1);
4016 tree maxread = CALL_EXPR_ARG (exp, 2);
4018 /* Try to determine the range of lengths that the source expression
4019 refers to. */
4020 tree lenrange[2];
4021 get_range_strlen (src, lenrange);
4023 /* Try to verify that the destination is big enough for the shortest
4024 string. */
4026 if (!objsize && warn_stringop_overflow)
4028 /* If it hasn't been provided by __strncat_chk, try to determine
4029 the size of the destination object into which the source is
4030 being copied. */
4031 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
4034 /* Add one for the terminating nul. */
4035 tree srclen = (lenrange[0]
4036 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4037 size_one_node)
4038 : NULL_TREE);
4040 /* The strncat function copies at most MAXREAD bytes and always appends
4041 the terminating nul so the specified upper bound should never be equal
4042 to (or greater than) the size of the destination. */
4043 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4044 && tree_int_cst_equal (objsize, maxread))
4046 location_t loc = tree_nonartificial_location (exp);
4047 loc = expansion_point_location_if_in_system_header (loc);
4049 warning_at (loc, OPT_Wstringop_overflow_,
4050 "%K%qD specified bound %E equals destination size",
4051 exp, get_callee_fndecl (exp), maxread);
4053 return false;
4056 if (!srclen
4057 || (maxread && tree_fits_uhwi_p (maxread)
4058 && tree_fits_uhwi_p (srclen)
4059 && tree_int_cst_lt (maxread, srclen)))
4060 srclen = maxread;
4062 /* The number of bytes to write is LEN but check_access will also
4063 check SRCLEN if LEN's value isn't known. */
4064 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
4065 objsize);
4068 /* Similar to expand_builtin_strcat, do some very basic size validation
4069 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
4070 the built-in expand to a call to the library function. */
4072 static rtx
4073 expand_builtin_strncat (tree exp, rtx)
4075 if (!validate_arglist (exp,
4076 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4077 || !warn_stringop_overflow)
4078 return NULL_RTX;
4080 tree dest = CALL_EXPR_ARG (exp, 0);
4081 tree src = CALL_EXPR_ARG (exp, 1);
4082 /* The upper bound on the number of bytes to write. */
4083 tree maxread = CALL_EXPR_ARG (exp, 2);
4084 /* The length of the source sequence. */
4085 tree slen = c_strlen (src, 1);
4087 /* Try to determine the range of lengths that the source expression
4088 refers to. */
4089 tree lenrange[2];
4090 if (slen)
4091 lenrange[0] = lenrange[1] = slen;
4092 else
4093 get_range_strlen (src, lenrange);
4095 /* Try to verify that the destination is big enough for the shortest
4096 string. First try to determine the size of the destination object
4097 into which the source is being copied. */
4098 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4100 /* Add one for the terminating nul. */
4101 tree srclen = (lenrange[0]
4102 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4103 size_one_node)
4104 : NULL_TREE);
4106 /* The strncat function copies at most MAXREAD bytes and always appends
4107 the terminating nul so the specified upper bound should never be equal
4108 to (or greater than) the size of the destination. */
4109 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4110 && tree_int_cst_equal (destsize, maxread))
4112 location_t loc = tree_nonartificial_location (exp);
4113 loc = expansion_point_location_if_in_system_header (loc);
4115 warning_at (loc, OPT_Wstringop_overflow_,
4116 "%K%qD specified bound %E equals destination size",
4117 exp, get_callee_fndecl (exp), maxread);
4119 return NULL_RTX;
4122 if (!srclen
4123 || (maxread && tree_fits_uhwi_p (maxread)
4124 && tree_fits_uhwi_p (srclen)
4125 && tree_int_cst_lt (maxread, srclen)))
4126 srclen = maxread;
4128 /* The number of bytes to write is SRCLEN. */
4129 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
4131 return NULL_RTX;
4134 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4135 NULL_RTX if we failed the caller should emit a normal call. */
4137 static rtx
4138 expand_builtin_strncpy (tree exp, rtx target)
4140 location_t loc = EXPR_LOCATION (exp);
4142 if (validate_arglist (exp,
4143 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4145 tree dest = CALL_EXPR_ARG (exp, 0);
4146 tree src = CALL_EXPR_ARG (exp, 1);
4147 /* The number of bytes to write (not the maximum). */
4148 tree len = CALL_EXPR_ARG (exp, 2);
4149 /* The length of the source sequence. */
4150 tree slen = c_strlen (src, 1);
4152 if (warn_stringop_overflow)
4154 tree destsize = compute_objsize (dest,
4155 warn_stringop_overflow - 1);
4157 /* The number of bytes to write is LEN but check_access will also
4158 check SLEN if LEN's value isn't known. */
4159 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4160 destsize);
4163 /* We must be passed a constant len and src parameter. */
4164 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4165 return NULL_RTX;
4167 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4169 /* We're required to pad with trailing zeros if the requested
4170 len is greater than strlen(s2)+1. In that case try to
4171 use store_by_pieces, if it fails, punt. */
4172 if (tree_int_cst_lt (slen, len))
4174 unsigned int dest_align = get_pointer_alignment (dest);
4175 const char *p = c_getstr (src);
4176 rtx dest_mem;
4178 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4179 || !can_store_by_pieces (tree_to_uhwi (len),
4180 builtin_strncpy_read_str,
4181 CONST_CAST (char *, p),
4182 dest_align, false))
4183 return NULL_RTX;
4185 dest_mem = get_memory_rtx (dest, len);
4186 store_by_pieces (dest_mem, tree_to_uhwi (len),
4187 builtin_strncpy_read_str,
4188 CONST_CAST (char *, p), dest_align, false, 0);
4189 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4190 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4191 return dest_mem;
4194 return NULL_RTX;
4197 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4198 bytes from constant string DATA + OFFSET and return it as target
4199 constant. */
4202 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4203 scalar_int_mode mode)
4205 const char *c = (const char *) data;
4206 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4208 memset (p, *c, GET_MODE_SIZE (mode));
4210 return c_readstr (p, mode);
4213 /* Callback routine for store_by_pieces. Return the RTL of a register
4214 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4215 char value given in the RTL register data. For example, if mode is
4216 4 bytes wide, return the RTL for 0x01010101*data. */
4218 static rtx
4219 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4220 scalar_int_mode mode)
4222 rtx target, coeff;
4223 size_t size;
4224 char *p;
4226 size = GET_MODE_SIZE (mode);
4227 if (size == 1)
4228 return (rtx) data;
4230 p = XALLOCAVEC (char, size);
4231 memset (p, 1, size);
4232 coeff = c_readstr (p, mode);
4234 target = convert_to_mode (mode, (rtx) data, 1);
4235 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4236 return force_reg (mode, target);
4239 /* Expand expression EXP, which is a call to the memset builtin. Return
4240 NULL_RTX if we failed the caller should emit a normal call, otherwise
4241 try to get the result in TARGET, if convenient (and in mode MODE if that's
4242 convenient). */
4244 static rtx
4245 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4247 if (!validate_arglist (exp,
4248 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4249 return NULL_RTX;
4251 tree dest = CALL_EXPR_ARG (exp, 0);
4252 tree val = CALL_EXPR_ARG (exp, 1);
4253 tree len = CALL_EXPR_ARG (exp, 2);
4255 check_memop_access (exp, dest, NULL_TREE, len);
4257 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4260 /* Helper function to do the actual work for expand_builtin_memset. The
4261 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4262 so that this can also be called without constructing an actual CALL_EXPR.
4263 The other arguments and return value are the same as for
4264 expand_builtin_memset. */
4266 static rtx
4267 expand_builtin_memset_args (tree dest, tree val, tree len,
4268 rtx target, machine_mode mode, tree orig_exp)
4270 tree fndecl, fn;
4271 enum built_in_function fcode;
4272 machine_mode val_mode;
4273 char c;
4274 unsigned int dest_align;
4275 rtx dest_mem, dest_addr, len_rtx;
4276 HOST_WIDE_INT expected_size = -1;
4277 unsigned int expected_align = 0;
4278 unsigned HOST_WIDE_INT min_size;
4279 unsigned HOST_WIDE_INT max_size;
4280 unsigned HOST_WIDE_INT probable_max_size;
4282 dest_align = get_pointer_alignment (dest);
4284 /* If DEST is not a pointer type, don't do this operation in-line. */
4285 if (dest_align == 0)
4286 return NULL_RTX;
4288 if (currently_expanding_gimple_stmt)
4289 stringop_block_profile (currently_expanding_gimple_stmt,
4290 &expected_align, &expected_size);
4292 if (expected_align < dest_align)
4293 expected_align = dest_align;
4295 /* If the LEN parameter is zero, return DEST. */
4296 if (integer_zerop (len))
4298 /* Evaluate and ignore VAL in case it has side-effects. */
4299 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4300 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4303 /* Stabilize the arguments in case we fail. */
4304 dest = builtin_save_expr (dest);
4305 val = builtin_save_expr (val);
4306 len = builtin_save_expr (len);
4308 len_rtx = expand_normal (len);
4309 determine_block_size (len, len_rtx, &min_size, &max_size,
4310 &probable_max_size);
4311 dest_mem = get_memory_rtx (dest, len);
4312 val_mode = TYPE_MODE (unsigned_char_type_node);
4314 if (TREE_CODE (val) != INTEGER_CST)
4316 rtx val_rtx;
4318 val_rtx = expand_normal (val);
4319 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4321 /* Assume that we can memset by pieces if we can store
4322 * the coefficients by pieces (in the required modes).
4323 * We can't pass builtin_memset_gen_str as that emits RTL. */
4324 c = 1;
4325 if (tree_fits_uhwi_p (len)
4326 && can_store_by_pieces (tree_to_uhwi (len),
4327 builtin_memset_read_str, &c, dest_align,
4328 true))
4330 val_rtx = force_reg (val_mode, val_rtx);
4331 store_by_pieces (dest_mem, tree_to_uhwi (len),
4332 builtin_memset_gen_str, val_rtx, dest_align,
4333 true, 0);
4335 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4336 dest_align, expected_align,
4337 expected_size, min_size, max_size,
4338 probable_max_size))
4339 goto do_libcall;
4341 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4342 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4343 return dest_mem;
4346 if (target_char_cast (val, &c))
4347 goto do_libcall;
4349 if (c)
4351 if (tree_fits_uhwi_p (len)
4352 && can_store_by_pieces (tree_to_uhwi (len),
4353 builtin_memset_read_str, &c, dest_align,
4354 true))
4355 store_by_pieces (dest_mem, tree_to_uhwi (len),
4356 builtin_memset_read_str, &c, dest_align, true, 0);
4357 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4358 gen_int_mode (c, val_mode),
4359 dest_align, expected_align,
4360 expected_size, min_size, max_size,
4361 probable_max_size))
4362 goto do_libcall;
4364 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4365 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4366 return dest_mem;
4369 set_mem_align (dest_mem, dest_align);
4370 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4371 CALL_EXPR_TAILCALL (orig_exp)
4372 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4373 expected_align, expected_size,
4374 min_size, max_size,
4375 probable_max_size);
4377 if (dest_addr == 0)
4379 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4380 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4383 return dest_addr;
4385 do_libcall:
4386 fndecl = get_callee_fndecl (orig_exp);
4387 fcode = DECL_FUNCTION_CODE (fndecl);
4388 if (fcode == BUILT_IN_MEMSET)
4389 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4390 dest, val, len);
4391 else if (fcode == BUILT_IN_BZERO)
4392 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4393 dest, len);
4394 else
4395 gcc_unreachable ();
4396 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4397 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4398 return expand_call (fn, target, target == const0_rtx);
4401 /* Expand expression EXP, which is a call to the bzero builtin. Return
4402 NULL_RTX if we failed the caller should emit a normal call. */
4404 static rtx
4405 expand_builtin_bzero (tree exp)
4407 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4408 return NULL_RTX;
4410 tree dest = CALL_EXPR_ARG (exp, 0);
4411 tree size = CALL_EXPR_ARG (exp, 1);
4413 check_memop_access (exp, dest, NULL_TREE, size);
4415 /* New argument list transforming bzero(ptr x, int y) to
4416 memset(ptr x, int 0, size_t y). This is done this way
4417 so that if it isn't expanded inline, we fallback to
4418 calling bzero instead of memset. */
4420 location_t loc = EXPR_LOCATION (exp);
4422 return expand_builtin_memset_args (dest, integer_zero_node,
4423 fold_convert_loc (loc,
4424 size_type_node, size),
4425 const0_rtx, VOIDmode, exp);
4428 /* Try to expand cmpstr operation ICODE with the given operands.
4429 Return the result rtx on success, otherwise return null. */
4431 static rtx
4432 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4433 HOST_WIDE_INT align)
4435 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4437 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4438 target = NULL_RTX;
4440 struct expand_operand ops[4];
4441 create_output_operand (&ops[0], target, insn_mode);
4442 create_fixed_operand (&ops[1], arg1_rtx);
4443 create_fixed_operand (&ops[2], arg2_rtx);
4444 create_integer_operand (&ops[3], align);
4445 if (maybe_expand_insn (icode, 4, ops))
4446 return ops[0].value;
4447 return NULL_RTX;
4450 /* Expand expression EXP, which is a call to the memcmp built-in function.
4451 Return NULL_RTX if we failed and the caller should emit a normal call,
4452 otherwise try to get the result in TARGET, if convenient.
4453 RESULT_EQ is true if we can relax the returned value to be either zero
4454 or nonzero, without caring about the sign. */
4456 static rtx
4457 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4459 if (!validate_arglist (exp,
4460 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4461 return NULL_RTX;
4463 tree arg1 = CALL_EXPR_ARG (exp, 0);
4464 tree arg2 = CALL_EXPR_ARG (exp, 1);
4465 tree len = CALL_EXPR_ARG (exp, 2);
4466 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4467 bool no_overflow = true;
4469 /* Diagnose calls where the specified length exceeds the size of either
4470 object. */
4471 tree size = compute_objsize (arg1, 0);
4472 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4473 len, /*maxread=*/NULL_TREE, size,
4474 /*objsize=*/NULL_TREE);
4475 if (no_overflow)
4477 size = compute_objsize (arg2, 0);
4478 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4479 len, /*maxread=*/NULL_TREE, size,
4480 /*objsize=*/NULL_TREE);
4483 /* Due to the performance benefit, always inline the calls first
4484 when result_eq is false. */
4485 rtx result = NULL_RTX;
4487 if (!result_eq && fcode != BUILT_IN_BCMP && no_overflow)
4489 result = inline_expand_builtin_string_cmp (exp, target);
4490 if (result)
4491 return result;
4494 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4495 location_t loc = EXPR_LOCATION (exp);
4497 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4498 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4500 /* If we don't have POINTER_TYPE, call the function. */
4501 if (arg1_align == 0 || arg2_align == 0)
4502 return NULL_RTX;
4504 rtx arg1_rtx = get_memory_rtx (arg1, len);
4505 rtx arg2_rtx = get_memory_rtx (arg2, len);
4506 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4508 /* Set MEM_SIZE as appropriate. */
4509 if (CONST_INT_P (len_rtx))
4511 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4512 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4515 by_pieces_constfn constfn = NULL;
4517 const char *src_str = c_getstr (arg2);
4518 if (result_eq && src_str == NULL)
4520 src_str = c_getstr (arg1);
4521 if (src_str != NULL)
4522 std::swap (arg1_rtx, arg2_rtx);
4525 /* If SRC is a string constant and block move would be done
4526 by pieces, we can avoid loading the string from memory
4527 and only stored the computed constants. */
4528 if (src_str
4529 && CONST_INT_P (len_rtx)
4530 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4531 constfn = builtin_memcpy_read_str;
4533 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4534 TREE_TYPE (len), target,
4535 result_eq, constfn,
4536 CONST_CAST (char *, src_str));
4538 if (result)
4540 /* Return the value in the proper mode for this function. */
4541 if (GET_MODE (result) == mode)
4542 return result;
4544 if (target != 0)
4546 convert_move (target, result, 0);
4547 return target;
4550 return convert_to_mode (mode, result, 0);
4553 return NULL_RTX;
4556 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4557 if we failed the caller should emit a normal call, otherwise try to get
4558 the result in TARGET, if convenient. */
4560 static rtx
4561 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4563 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4564 return NULL_RTX;
4566 /* Due to the performance benefit, always inline the calls first. */
4567 rtx result = NULL_RTX;
4568 result = inline_expand_builtin_string_cmp (exp, target);
4569 if (result)
4570 return result;
4572 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4573 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4574 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4575 return NULL_RTX;
4577 tree arg1 = CALL_EXPR_ARG (exp, 0);
4578 tree arg2 = CALL_EXPR_ARG (exp, 1);
4580 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4581 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4583 /* If we don't have POINTER_TYPE, call the function. */
4584 if (arg1_align == 0 || arg2_align == 0)
4585 return NULL_RTX;
4587 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4588 arg1 = builtin_save_expr (arg1);
4589 arg2 = builtin_save_expr (arg2);
4591 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4592 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4594 /* Try to call cmpstrsi. */
4595 if (cmpstr_icode != CODE_FOR_nothing)
4596 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4597 MIN (arg1_align, arg2_align));
4599 /* Try to determine at least one length and call cmpstrnsi. */
4600 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4602 tree len;
4603 rtx arg3_rtx;
4605 tree len1 = c_strlen (arg1, 1);
4606 tree len2 = c_strlen (arg2, 1);
4608 if (len1)
4609 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4610 if (len2)
4611 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4613 /* If we don't have a constant length for the first, use the length
4614 of the second, if we know it. We don't require a constant for
4615 this case; some cost analysis could be done if both are available
4616 but neither is constant. For now, assume they're equally cheap,
4617 unless one has side effects. If both strings have constant lengths,
4618 use the smaller. */
4620 if (!len1)
4621 len = len2;
4622 else if (!len2)
4623 len = len1;
4624 else if (TREE_SIDE_EFFECTS (len1))
4625 len = len2;
4626 else if (TREE_SIDE_EFFECTS (len2))
4627 len = len1;
4628 else if (TREE_CODE (len1) != INTEGER_CST)
4629 len = len2;
4630 else if (TREE_CODE (len2) != INTEGER_CST)
4631 len = len1;
4632 else if (tree_int_cst_lt (len1, len2))
4633 len = len1;
4634 else
4635 len = len2;
4637 /* If both arguments have side effects, we cannot optimize. */
4638 if (len && !TREE_SIDE_EFFECTS (len))
4640 arg3_rtx = expand_normal (len);
4641 result = expand_cmpstrn_or_cmpmem
4642 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4643 arg3_rtx, MIN (arg1_align, arg2_align));
4647 tree fndecl = get_callee_fndecl (exp);
4648 if (result)
4650 /* Check to see if the argument was declared attribute nonstring
4651 and if so, issue a warning since at this point it's not known
4652 to be nul-terminated. */
4653 maybe_warn_nonstring_arg (fndecl, exp);
4655 /* Return the value in the proper mode for this function. */
4656 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4657 if (GET_MODE (result) == mode)
4658 return result;
4659 if (target == 0)
4660 return convert_to_mode (mode, result, 0);
4661 convert_move (target, result, 0);
4662 return target;
4665 /* Expand the library call ourselves using a stabilized argument
4666 list to avoid re-evaluating the function's arguments twice. */
4667 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4668 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4669 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4670 return expand_call (fn, target, target == const0_rtx);
4673 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4674 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4675 the result in TARGET, if convenient. */
4677 static rtx
4678 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4679 ATTRIBUTE_UNUSED machine_mode mode)
4681 if (!validate_arglist (exp,
4682 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4683 return NULL_RTX;
4685 /* Due to the performance benefit, always inline the calls first. */
4686 rtx result = NULL_RTX;
4687 result = inline_expand_builtin_string_cmp (exp, target);
4688 if (result)
4689 return result;
4691 /* If c_strlen can determine an expression for one of the string
4692 lengths, and it doesn't have side effects, then emit cmpstrnsi
4693 using length MIN(strlen(string)+1, arg3). */
4694 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4695 if (cmpstrn_icode == CODE_FOR_nothing)
4696 return NULL_RTX;
4698 tree len;
4700 tree arg1 = CALL_EXPR_ARG (exp, 0);
4701 tree arg2 = CALL_EXPR_ARG (exp, 1);
4702 tree arg3 = CALL_EXPR_ARG (exp, 2);
4704 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4705 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4707 tree len1 = c_strlen (arg1, 1);
4708 tree len2 = c_strlen (arg2, 1);
4710 location_t loc = EXPR_LOCATION (exp);
4712 if (len1)
4713 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4714 if (len2)
4715 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4717 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4719 /* If we don't have a constant length for the first, use the length
4720 of the second, if we know it. If neither string is constant length,
4721 use the given length argument. We don't require a constant for
4722 this case; some cost analysis could be done if both are available
4723 but neither is constant. For now, assume they're equally cheap,
4724 unless one has side effects. If both strings have constant lengths,
4725 use the smaller. */
4727 if (!len1 && !len2)
4728 len = len3;
4729 else if (!len1)
4730 len = len2;
4731 else if (!len2)
4732 len = len1;
4733 else if (TREE_SIDE_EFFECTS (len1))
4734 len = len2;
4735 else if (TREE_SIDE_EFFECTS (len2))
4736 len = len1;
4737 else if (TREE_CODE (len1) != INTEGER_CST)
4738 len = len2;
4739 else if (TREE_CODE (len2) != INTEGER_CST)
4740 len = len1;
4741 else if (tree_int_cst_lt (len1, len2))
4742 len = len1;
4743 else
4744 len = len2;
4746 /* If we are not using the given length, we must incorporate it here.
4747 The actual new length parameter will be MIN(len,arg3) in this case. */
4748 if (len != len3)
4749 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4750 rtx arg1_rtx = get_memory_rtx (arg1, len);
4751 rtx arg2_rtx = get_memory_rtx (arg2, len);
4752 rtx arg3_rtx = expand_normal (len);
4753 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4754 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4755 MIN (arg1_align, arg2_align));
4757 tree fndecl = get_callee_fndecl (exp);
4758 if (result)
4760 /* Check to see if the argument was declared attribute nonstring
4761 and if so, issue a warning since at this point it's not known
4762 to be nul-terminated. */
4763 maybe_warn_nonstring_arg (fndecl, exp);
4765 /* Return the value in the proper mode for this function. */
4766 mode = TYPE_MODE (TREE_TYPE (exp));
4767 if (GET_MODE (result) == mode)
4768 return result;
4769 if (target == 0)
4770 return convert_to_mode (mode, result, 0);
4771 convert_move (target, result, 0);
4772 return target;
4775 /* Expand the library call ourselves using a stabilized argument
4776 list to avoid re-evaluating the function's arguments twice. */
4777 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4778 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4779 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4780 return expand_call (fn, target, target == const0_rtx);
4783 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4784 if that's convenient. */
4787 expand_builtin_saveregs (void)
4789 rtx val;
4790 rtx_insn *seq;
4792 /* Don't do __builtin_saveregs more than once in a function.
4793 Save the result of the first call and reuse it. */
4794 if (saveregs_value != 0)
4795 return saveregs_value;
4797 /* When this function is called, it means that registers must be
4798 saved on entry to this function. So we migrate the call to the
4799 first insn of this function. */
4801 start_sequence ();
4803 /* Do whatever the machine needs done in this case. */
4804 val = targetm.calls.expand_builtin_saveregs ();
4806 seq = get_insns ();
4807 end_sequence ();
4809 saveregs_value = val;
4811 /* Put the insns after the NOTE that starts the function. If this
4812 is inside a start_sequence, make the outer-level insn chain current, so
4813 the code is placed at the start of the function. */
4814 push_topmost_sequence ();
4815 emit_insn_after (seq, entry_of_function ());
4816 pop_topmost_sequence ();
4818 return val;
4821 /* Expand a call to __builtin_next_arg. */
4823 static rtx
4824 expand_builtin_next_arg (void)
4826 /* Checking arguments is already done in fold_builtin_next_arg
4827 that must be called before this function. */
4828 return expand_binop (ptr_mode, add_optab,
4829 crtl->args.internal_arg_pointer,
4830 crtl->args.arg_offset_rtx,
4831 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4834 /* Make it easier for the backends by protecting the valist argument
4835 from multiple evaluations. */
4837 static tree
4838 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4840 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4842 /* The current way of determining the type of valist is completely
4843 bogus. We should have the information on the va builtin instead. */
4844 if (!vatype)
4845 vatype = targetm.fn_abi_va_list (cfun->decl);
4847 if (TREE_CODE (vatype) == ARRAY_TYPE)
4849 if (TREE_SIDE_EFFECTS (valist))
4850 valist = save_expr (valist);
4852 /* For this case, the backends will be expecting a pointer to
4853 vatype, but it's possible we've actually been given an array
4854 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4855 So fix it. */
4856 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4858 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4859 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4862 else
4864 tree pt = build_pointer_type (vatype);
4866 if (! needs_lvalue)
4868 if (! TREE_SIDE_EFFECTS (valist))
4869 return valist;
4871 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4872 TREE_SIDE_EFFECTS (valist) = 1;
4875 if (TREE_SIDE_EFFECTS (valist))
4876 valist = save_expr (valist);
4877 valist = fold_build2_loc (loc, MEM_REF,
4878 vatype, valist, build_int_cst (pt, 0));
4881 return valist;
4884 /* The "standard" definition of va_list is void*. */
4886 tree
4887 std_build_builtin_va_list (void)
4889 return ptr_type_node;
4892 /* The "standard" abi va_list is va_list_type_node. */
4894 tree
4895 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4897 return va_list_type_node;
4900 /* The "standard" type of va_list is va_list_type_node. */
4902 tree
4903 std_canonical_va_list_type (tree type)
4905 tree wtype, htype;
4907 wtype = va_list_type_node;
4908 htype = type;
4910 if (TREE_CODE (wtype) == ARRAY_TYPE)
4912 /* If va_list is an array type, the argument may have decayed
4913 to a pointer type, e.g. by being passed to another function.
4914 In that case, unwrap both types so that we can compare the
4915 underlying records. */
4916 if (TREE_CODE (htype) == ARRAY_TYPE
4917 || POINTER_TYPE_P (htype))
4919 wtype = TREE_TYPE (wtype);
4920 htype = TREE_TYPE (htype);
4923 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4924 return va_list_type_node;
4926 return NULL_TREE;
4929 /* The "standard" implementation of va_start: just assign `nextarg' to
4930 the variable. */
4932 void
4933 std_expand_builtin_va_start (tree valist, rtx nextarg)
4935 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4936 convert_move (va_r, nextarg, 0);
4939 /* Expand EXP, a call to __builtin_va_start. */
4941 static rtx
4942 expand_builtin_va_start (tree exp)
4944 rtx nextarg;
4945 tree valist;
4946 location_t loc = EXPR_LOCATION (exp);
4948 if (call_expr_nargs (exp) < 2)
4950 error_at (loc, "too few arguments to function %<va_start%>");
4951 return const0_rtx;
4954 if (fold_builtin_next_arg (exp, true))
4955 return const0_rtx;
4957 nextarg = expand_builtin_next_arg ();
4958 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4960 if (targetm.expand_builtin_va_start)
4961 targetm.expand_builtin_va_start (valist, nextarg);
4962 else
4963 std_expand_builtin_va_start (valist, nextarg);
4965 return const0_rtx;
4968 /* Expand EXP, a call to __builtin_va_end. */
4970 static rtx
4971 expand_builtin_va_end (tree exp)
4973 tree valist = CALL_EXPR_ARG (exp, 0);
4975 /* Evaluate for side effects, if needed. I hate macros that don't
4976 do that. */
4977 if (TREE_SIDE_EFFECTS (valist))
4978 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4980 return const0_rtx;
4983 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4984 builtin rather than just as an assignment in stdarg.h because of the
4985 nastiness of array-type va_list types. */
4987 static rtx
4988 expand_builtin_va_copy (tree exp)
4990 tree dst, src, t;
4991 location_t loc = EXPR_LOCATION (exp);
4993 dst = CALL_EXPR_ARG (exp, 0);
4994 src = CALL_EXPR_ARG (exp, 1);
4996 dst = stabilize_va_list_loc (loc, dst, 1);
4997 src = stabilize_va_list_loc (loc, src, 0);
4999 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5001 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5003 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5004 TREE_SIDE_EFFECTS (t) = 1;
5005 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5007 else
5009 rtx dstb, srcb, size;
5011 /* Evaluate to pointers. */
5012 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5013 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5014 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5015 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5017 dstb = convert_memory_address (Pmode, dstb);
5018 srcb = convert_memory_address (Pmode, srcb);
5020 /* "Dereference" to BLKmode memories. */
5021 dstb = gen_rtx_MEM (BLKmode, dstb);
5022 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5023 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5024 srcb = gen_rtx_MEM (BLKmode, srcb);
5025 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5026 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5028 /* Copy. */
5029 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5032 return const0_rtx;
5035 /* Expand a call to one of the builtin functions __builtin_frame_address or
5036 __builtin_return_address. */
5038 static rtx
5039 expand_builtin_frame_address (tree fndecl, tree exp)
5041 /* The argument must be a nonnegative integer constant.
5042 It counts the number of frames to scan up the stack.
5043 The value is either the frame pointer value or the return
5044 address saved in that frame. */
5045 if (call_expr_nargs (exp) == 0)
5046 /* Warning about missing arg was already issued. */
5047 return const0_rtx;
5048 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
5050 error ("invalid argument to %qD", fndecl);
5051 return const0_rtx;
5053 else
5055 /* Number of frames to scan up the stack. */
5056 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5058 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
5060 /* Some ports cannot access arbitrary stack frames. */
5061 if (tem == NULL)
5063 warning (0, "unsupported argument to %qD", fndecl);
5064 return const0_rtx;
5067 if (count)
5069 /* Warn since no effort is made to ensure that any frame
5070 beyond the current one exists or can be safely reached. */
5071 warning (OPT_Wframe_address, "calling %qD with "
5072 "a nonzero argument is unsafe", fndecl);
5075 /* For __builtin_frame_address, return what we've got. */
5076 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5077 return tem;
5079 if (!REG_P (tem)
5080 && ! CONSTANT_P (tem))
5081 tem = copy_addr_to_reg (tem);
5082 return tem;
5086 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5087 failed and the caller should emit a normal call. */
5089 static rtx
5090 expand_builtin_alloca (tree exp)
5092 rtx op0;
5093 rtx result;
5094 unsigned int align;
5095 tree fndecl = get_callee_fndecl (exp);
5096 HOST_WIDE_INT max_size;
5097 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5098 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5099 bool valid_arglist
5100 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5101 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5102 VOID_TYPE)
5103 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5104 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5105 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5107 if (!valid_arglist)
5108 return NULL_RTX;
5110 if ((alloca_for_var
5111 && warn_vla_limit >= HOST_WIDE_INT_MAX
5112 && warn_alloc_size_limit < warn_vla_limit)
5113 || (!alloca_for_var
5114 && warn_alloca_limit >= HOST_WIDE_INT_MAX
5115 && warn_alloc_size_limit < warn_alloca_limit
5118 /* -Walloca-larger-than and -Wvla-larger-than settings of
5119 less than HOST_WIDE_INT_MAX override the more general
5120 -Walloc-size-larger-than so unless either of the former
5121 options is smaller than the last one (wchich would imply
5122 that the call was already checked), check the alloca
5123 arguments for overflow. */
5124 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5125 int idx[] = { 0, -1 };
5126 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5129 /* Compute the argument. */
5130 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5132 /* Compute the alignment. */
5133 align = (fcode == BUILT_IN_ALLOCA
5134 ? BIGGEST_ALIGNMENT
5135 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5137 /* Compute the maximum size. */
5138 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5139 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5140 : -1);
5142 /* Allocate the desired space. If the allocation stems from the declaration
5143 of a variable-sized object, it cannot accumulate. */
5144 result
5145 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5146 result = convert_memory_address (ptr_mode, result);
5148 return result;
5151 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5152 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5153 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5154 handle_builtin_stack_restore function. */
5156 static rtx
5157 expand_asan_emit_allocas_unpoison (tree exp)
5159 tree arg0 = CALL_EXPR_ARG (exp, 0);
5160 tree arg1 = CALL_EXPR_ARG (exp, 1);
5161 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5162 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5163 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5164 stack_pointer_rtx, NULL_RTX, 0,
5165 OPTAB_LIB_WIDEN);
5166 off = convert_modes (ptr_mode, Pmode, off, 0);
5167 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5168 OPTAB_LIB_WIDEN);
5169 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5170 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5171 top, ptr_mode, bot, ptr_mode);
5172 return ret;
5175 /* Expand a call to bswap builtin in EXP.
5176 Return NULL_RTX if a normal call should be emitted rather than expanding the
5177 function in-line. If convenient, the result should be placed in TARGET.
5178 SUBTARGET may be used as the target for computing one of EXP's operands. */
5180 static rtx
5181 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5182 rtx subtarget)
5184 tree arg;
5185 rtx op0;
5187 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5188 return NULL_RTX;
5190 arg = CALL_EXPR_ARG (exp, 0);
5191 op0 = expand_expr (arg,
5192 subtarget && GET_MODE (subtarget) == target_mode
5193 ? subtarget : NULL_RTX,
5194 target_mode, EXPAND_NORMAL);
5195 if (GET_MODE (op0) != target_mode)
5196 op0 = convert_to_mode (target_mode, op0, 1);
5198 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5200 gcc_assert (target);
5202 return convert_to_mode (target_mode, target, 1);
5205 /* Expand a call to a unary builtin in EXP.
5206 Return NULL_RTX if a normal call should be emitted rather than expanding the
5207 function in-line. If convenient, the result should be placed in TARGET.
5208 SUBTARGET may be used as the target for computing one of EXP's operands. */
5210 static rtx
5211 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5212 rtx subtarget, optab op_optab)
5214 rtx op0;
5216 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5217 return NULL_RTX;
5219 /* Compute the argument. */
5220 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5221 (subtarget
5222 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5223 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5224 VOIDmode, EXPAND_NORMAL);
5225 /* Compute op, into TARGET if possible.
5226 Set TARGET to wherever the result comes back. */
5227 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5228 op_optab, op0, target, op_optab != clrsb_optab);
5229 gcc_assert (target);
5231 return convert_to_mode (target_mode, target, 0);
5234 /* Expand a call to __builtin_expect. We just return our argument
5235 as the builtin_expect semantic should've been already executed by
5236 tree branch prediction pass. */
5238 static rtx
5239 expand_builtin_expect (tree exp, rtx target)
5241 tree arg;
5243 if (call_expr_nargs (exp) < 2)
5244 return const0_rtx;
5245 arg = CALL_EXPR_ARG (exp, 0);
5247 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5248 /* When guessing was done, the hints should be already stripped away. */
5249 gcc_assert (!flag_guess_branch_prob
5250 || optimize == 0 || seen_error ());
5251 return target;
5254 /* Expand a call to __builtin_assume_aligned. We just return our first
5255 argument as the builtin_assume_aligned semantic should've been already
5256 executed by CCP. */
5258 static rtx
5259 expand_builtin_assume_aligned (tree exp, rtx target)
5261 if (call_expr_nargs (exp) < 2)
5262 return const0_rtx;
5263 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5264 EXPAND_NORMAL);
5265 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5266 && (call_expr_nargs (exp) < 3
5267 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5268 return target;
5271 void
5272 expand_builtin_trap (void)
5274 if (targetm.have_trap ())
5276 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5277 /* For trap insns when not accumulating outgoing args force
5278 REG_ARGS_SIZE note to prevent crossjumping of calls with
5279 different args sizes. */
5280 if (!ACCUMULATE_OUTGOING_ARGS)
5281 add_args_size_note (insn, stack_pointer_delta);
5283 else
5285 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5286 tree call_expr = build_call_expr (fn, 0);
5287 expand_call (call_expr, NULL_RTX, false);
5290 emit_barrier ();
5293 /* Expand a call to __builtin_unreachable. We do nothing except emit
5294 a barrier saying that control flow will not pass here.
5296 It is the responsibility of the program being compiled to ensure
5297 that control flow does never reach __builtin_unreachable. */
5298 static void
5299 expand_builtin_unreachable (void)
5301 emit_barrier ();
5304 /* Expand EXP, a call to fabs, fabsf or fabsl.
5305 Return NULL_RTX if a normal call should be emitted rather than expanding
5306 the function inline. If convenient, the result should be placed
5307 in TARGET. SUBTARGET may be used as the target for computing
5308 the operand. */
5310 static rtx
5311 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5313 machine_mode mode;
5314 tree arg;
5315 rtx op0;
5317 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5318 return NULL_RTX;
5320 arg = CALL_EXPR_ARG (exp, 0);
5321 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5322 mode = TYPE_MODE (TREE_TYPE (arg));
5323 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5324 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5327 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5328 Return NULL is a normal call should be emitted rather than expanding the
5329 function inline. If convenient, the result should be placed in TARGET.
5330 SUBTARGET may be used as the target for computing the operand. */
5332 static rtx
5333 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5335 rtx op0, op1;
5336 tree arg;
5338 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5339 return NULL_RTX;
5341 arg = CALL_EXPR_ARG (exp, 0);
5342 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5344 arg = CALL_EXPR_ARG (exp, 1);
5345 op1 = expand_normal (arg);
5347 return expand_copysign (op0, op1, target);
5350 /* Expand a call to __builtin___clear_cache. */
5352 static rtx
5353 expand_builtin___clear_cache (tree exp)
5355 if (!targetm.code_for_clear_cache)
5357 #ifdef CLEAR_INSN_CACHE
5358 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5359 does something. Just do the default expansion to a call to
5360 __clear_cache(). */
5361 return NULL_RTX;
5362 #else
5363 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5364 does nothing. There is no need to call it. Do nothing. */
5365 return const0_rtx;
5366 #endif /* CLEAR_INSN_CACHE */
5369 /* We have a "clear_cache" insn, and it will handle everything. */
5370 tree begin, end;
5371 rtx begin_rtx, end_rtx;
5373 /* We must not expand to a library call. If we did, any
5374 fallback library function in libgcc that might contain a call to
5375 __builtin___clear_cache() would recurse infinitely. */
5376 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5378 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5379 return const0_rtx;
5382 if (targetm.have_clear_cache ())
5384 struct expand_operand ops[2];
5386 begin = CALL_EXPR_ARG (exp, 0);
5387 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5389 end = CALL_EXPR_ARG (exp, 1);
5390 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5392 create_address_operand (&ops[0], begin_rtx);
5393 create_address_operand (&ops[1], end_rtx);
5394 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5395 return const0_rtx;
5397 return const0_rtx;
5400 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5402 static rtx
5403 round_trampoline_addr (rtx tramp)
5405 rtx temp, addend, mask;
5407 /* If we don't need too much alignment, we'll have been guaranteed
5408 proper alignment by get_trampoline_type. */
5409 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5410 return tramp;
5412 /* Round address up to desired boundary. */
5413 temp = gen_reg_rtx (Pmode);
5414 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5415 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5417 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5418 temp, 0, OPTAB_LIB_WIDEN);
5419 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5420 temp, 0, OPTAB_LIB_WIDEN);
5422 return tramp;
5425 static rtx
5426 expand_builtin_init_trampoline (tree exp, bool onstack)
5428 tree t_tramp, t_func, t_chain;
5429 rtx m_tramp, r_tramp, r_chain, tmp;
5431 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5432 POINTER_TYPE, VOID_TYPE))
5433 return NULL_RTX;
5435 t_tramp = CALL_EXPR_ARG (exp, 0);
5436 t_func = CALL_EXPR_ARG (exp, 1);
5437 t_chain = CALL_EXPR_ARG (exp, 2);
5439 r_tramp = expand_normal (t_tramp);
5440 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5441 MEM_NOTRAP_P (m_tramp) = 1;
5443 /* If ONSTACK, the TRAMP argument should be the address of a field
5444 within the local function's FRAME decl. Either way, let's see if
5445 we can fill in the MEM_ATTRs for this memory. */
5446 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5447 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5449 /* Creator of a heap trampoline is responsible for making sure the
5450 address is aligned to at least STACK_BOUNDARY. Normally malloc
5451 will ensure this anyhow. */
5452 tmp = round_trampoline_addr (r_tramp);
5453 if (tmp != r_tramp)
5455 m_tramp = change_address (m_tramp, BLKmode, tmp);
5456 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5457 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5460 /* The FUNC argument should be the address of the nested function.
5461 Extract the actual function decl to pass to the hook. */
5462 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5463 t_func = TREE_OPERAND (t_func, 0);
5464 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5466 r_chain = expand_normal (t_chain);
5468 /* Generate insns to initialize the trampoline. */
5469 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5471 if (onstack)
5473 trampolines_created = 1;
5475 if (targetm.calls.custom_function_descriptors != 0)
5476 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5477 "trampoline generated for nested function %qD", t_func);
5480 return const0_rtx;
5483 static rtx
5484 expand_builtin_adjust_trampoline (tree exp)
5486 rtx tramp;
5488 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5489 return NULL_RTX;
5491 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5492 tramp = round_trampoline_addr (tramp);
5493 if (targetm.calls.trampoline_adjust_address)
5494 tramp = targetm.calls.trampoline_adjust_address (tramp);
5496 return tramp;
5499 /* Expand a call to the builtin descriptor initialization routine.
5500 A descriptor is made up of a couple of pointers to the static
5501 chain and the code entry in this order. */
5503 static rtx
5504 expand_builtin_init_descriptor (tree exp)
5506 tree t_descr, t_func, t_chain;
5507 rtx m_descr, r_descr, r_func, r_chain;
5509 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5510 VOID_TYPE))
5511 return NULL_RTX;
5513 t_descr = CALL_EXPR_ARG (exp, 0);
5514 t_func = CALL_EXPR_ARG (exp, 1);
5515 t_chain = CALL_EXPR_ARG (exp, 2);
5517 r_descr = expand_normal (t_descr);
5518 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5519 MEM_NOTRAP_P (m_descr) = 1;
5521 r_func = expand_normal (t_func);
5522 r_chain = expand_normal (t_chain);
5524 /* Generate insns to initialize the descriptor. */
5525 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5526 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5527 POINTER_SIZE / BITS_PER_UNIT), r_func);
5529 return const0_rtx;
5532 /* Expand a call to the builtin descriptor adjustment routine. */
5534 static rtx
5535 expand_builtin_adjust_descriptor (tree exp)
5537 rtx tramp;
5539 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5540 return NULL_RTX;
5542 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5544 /* Unalign the descriptor to allow runtime identification. */
5545 tramp = plus_constant (ptr_mode, tramp,
5546 targetm.calls.custom_function_descriptors);
5548 return force_operand (tramp, NULL_RTX);
5551 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5552 function. The function first checks whether the back end provides
5553 an insn to implement signbit for the respective mode. If not, it
5554 checks whether the floating point format of the value is such that
5555 the sign bit can be extracted. If that is not the case, error out.
5556 EXP is the expression that is a call to the builtin function; if
5557 convenient, the result should be placed in TARGET. */
5558 static rtx
5559 expand_builtin_signbit (tree exp, rtx target)
5561 const struct real_format *fmt;
5562 scalar_float_mode fmode;
5563 scalar_int_mode rmode, imode;
5564 tree arg;
5565 int word, bitpos;
5566 enum insn_code icode;
5567 rtx temp;
5568 location_t loc = EXPR_LOCATION (exp);
5570 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5571 return NULL_RTX;
5573 arg = CALL_EXPR_ARG (exp, 0);
5574 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5575 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5576 fmt = REAL_MODE_FORMAT (fmode);
5578 arg = builtin_save_expr (arg);
5580 /* Expand the argument yielding a RTX expression. */
5581 temp = expand_normal (arg);
5583 /* Check if the back end provides an insn that handles signbit for the
5584 argument's mode. */
5585 icode = optab_handler (signbit_optab, fmode);
5586 if (icode != CODE_FOR_nothing)
5588 rtx_insn *last = get_last_insn ();
5589 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5590 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5591 return target;
5592 delete_insns_since (last);
5595 /* For floating point formats without a sign bit, implement signbit
5596 as "ARG < 0.0". */
5597 bitpos = fmt->signbit_ro;
5598 if (bitpos < 0)
5600 /* But we can't do this if the format supports signed zero. */
5601 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5603 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5604 build_real (TREE_TYPE (arg), dconst0));
5605 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5608 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5610 imode = int_mode_for_mode (fmode).require ();
5611 temp = gen_lowpart (imode, temp);
5613 else
5615 imode = word_mode;
5616 /* Handle targets with different FP word orders. */
5617 if (FLOAT_WORDS_BIG_ENDIAN)
5618 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5619 else
5620 word = bitpos / BITS_PER_WORD;
5621 temp = operand_subword_force (temp, word, fmode);
5622 bitpos = bitpos % BITS_PER_WORD;
5625 /* Force the intermediate word_mode (or narrower) result into a
5626 register. This avoids attempting to create paradoxical SUBREGs
5627 of floating point modes below. */
5628 temp = force_reg (imode, temp);
5630 /* If the bitpos is within the "result mode" lowpart, the operation
5631 can be implement with a single bitwise AND. Otherwise, we need
5632 a right shift and an AND. */
5634 if (bitpos < GET_MODE_BITSIZE (rmode))
5636 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5638 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5639 temp = gen_lowpart (rmode, temp);
5640 temp = expand_binop (rmode, and_optab, temp,
5641 immed_wide_int_const (mask, rmode),
5642 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5644 else
5646 /* Perform a logical right shift to place the signbit in the least
5647 significant bit, then truncate the result to the desired mode
5648 and mask just this bit. */
5649 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5650 temp = gen_lowpart (rmode, temp);
5651 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5652 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5655 return temp;
5658 /* Expand fork or exec calls. TARGET is the desired target of the
5659 call. EXP is the call. FN is the
5660 identificator of the actual function. IGNORE is nonzero if the
5661 value is to be ignored. */
5663 static rtx
5664 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5666 tree id, decl;
5667 tree call;
5669 /* If we are not profiling, just call the function. */
5670 if (!profile_arc_flag)
5671 return NULL_RTX;
5673 /* Otherwise call the wrapper. This should be equivalent for the rest of
5674 compiler, so the code does not diverge, and the wrapper may run the
5675 code necessary for keeping the profiling sane. */
5677 switch (DECL_FUNCTION_CODE (fn))
5679 case BUILT_IN_FORK:
5680 id = get_identifier ("__gcov_fork");
5681 break;
5683 case BUILT_IN_EXECL:
5684 id = get_identifier ("__gcov_execl");
5685 break;
5687 case BUILT_IN_EXECV:
5688 id = get_identifier ("__gcov_execv");
5689 break;
5691 case BUILT_IN_EXECLP:
5692 id = get_identifier ("__gcov_execlp");
5693 break;
5695 case BUILT_IN_EXECLE:
5696 id = get_identifier ("__gcov_execle");
5697 break;
5699 case BUILT_IN_EXECVP:
5700 id = get_identifier ("__gcov_execvp");
5701 break;
5703 case BUILT_IN_EXECVE:
5704 id = get_identifier ("__gcov_execve");
5705 break;
5707 default:
5708 gcc_unreachable ();
5711 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5712 FUNCTION_DECL, id, TREE_TYPE (fn));
5713 DECL_EXTERNAL (decl) = 1;
5714 TREE_PUBLIC (decl) = 1;
5715 DECL_ARTIFICIAL (decl) = 1;
5716 TREE_NOTHROW (decl) = 1;
5717 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5718 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5719 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5720 return expand_call (call, target, ignore);
5725 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5726 the pointer in these functions is void*, the tree optimizers may remove
5727 casts. The mode computed in expand_builtin isn't reliable either, due
5728 to __sync_bool_compare_and_swap.
5730 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5731 group of builtins. This gives us log2 of the mode size. */
5733 static inline machine_mode
5734 get_builtin_sync_mode (int fcode_diff)
5736 /* The size is not negotiable, so ask not to get BLKmode in return
5737 if the target indicates that a smaller size would be better. */
5738 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5741 /* Expand the memory expression LOC and return the appropriate memory operand
5742 for the builtin_sync operations. */
5744 static rtx
5745 get_builtin_sync_mem (tree loc, machine_mode mode)
5747 rtx addr, mem;
5749 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5750 addr = convert_memory_address (Pmode, addr);
5752 /* Note that we explicitly do not want any alias information for this
5753 memory, so that we kill all other live memories. Otherwise we don't
5754 satisfy the full barrier semantics of the intrinsic. */
5755 mem = validize_mem (gen_rtx_MEM (mode, addr));
5757 /* The alignment needs to be at least according to that of the mode. */
5758 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5759 get_pointer_alignment (loc)));
5760 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5761 MEM_VOLATILE_P (mem) = 1;
5763 return mem;
5766 /* Make sure an argument is in the right mode.
5767 EXP is the tree argument.
5768 MODE is the mode it should be in. */
5770 static rtx
5771 expand_expr_force_mode (tree exp, machine_mode mode)
5773 rtx val;
5774 machine_mode old_mode;
5776 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5777 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5778 of CONST_INTs, where we know the old_mode only from the call argument. */
5780 old_mode = GET_MODE (val);
5781 if (old_mode == VOIDmode)
5782 old_mode = TYPE_MODE (TREE_TYPE (exp));
5783 val = convert_modes (mode, old_mode, val, 1);
5784 return val;
5788 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5789 EXP is the CALL_EXPR. CODE is the rtx code
5790 that corresponds to the arithmetic or logical operation from the name;
5791 an exception here is that NOT actually means NAND. TARGET is an optional
5792 place for us to store the results; AFTER is true if this is the
5793 fetch_and_xxx form. */
5795 static rtx
5796 expand_builtin_sync_operation (machine_mode mode, tree exp,
5797 enum rtx_code code, bool after,
5798 rtx target)
5800 rtx val, mem;
5801 location_t loc = EXPR_LOCATION (exp);
5803 if (code == NOT && warn_sync_nand)
5805 tree fndecl = get_callee_fndecl (exp);
5806 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5808 static bool warned_f_a_n, warned_n_a_f;
5810 switch (fcode)
5812 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5813 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5814 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5815 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5816 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5817 if (warned_f_a_n)
5818 break;
5820 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5821 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5822 warned_f_a_n = true;
5823 break;
5825 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5826 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5827 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5828 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5829 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5830 if (warned_n_a_f)
5831 break;
5833 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5834 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5835 warned_n_a_f = true;
5836 break;
5838 default:
5839 gcc_unreachable ();
5843 /* Expand the operands. */
5844 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5845 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5847 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5848 after);
5851 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5852 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5853 true if this is the boolean form. TARGET is a place for us to store the
5854 results; this is NOT optional if IS_BOOL is true. */
5856 static rtx
5857 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5858 bool is_bool, rtx target)
5860 rtx old_val, new_val, mem;
5861 rtx *pbool, *poval;
5863 /* Expand the operands. */
5864 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5865 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5866 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5868 pbool = poval = NULL;
5869 if (target != const0_rtx)
5871 if (is_bool)
5872 pbool = &target;
5873 else
5874 poval = &target;
5876 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5877 false, MEMMODEL_SYNC_SEQ_CST,
5878 MEMMODEL_SYNC_SEQ_CST))
5879 return NULL_RTX;
5881 return target;
5884 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5885 general form is actually an atomic exchange, and some targets only
5886 support a reduced form with the second argument being a constant 1.
5887 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5888 the results. */
5890 static rtx
5891 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5892 rtx target)
5894 rtx val, mem;
5896 /* Expand the operands. */
5897 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5898 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5900 return expand_sync_lock_test_and_set (target, mem, val);
5903 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5905 static void
5906 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5908 rtx mem;
5910 /* Expand the operands. */
5911 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5913 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5916 /* Given an integer representing an ``enum memmodel'', verify its
5917 correctness and return the memory model enum. */
5919 static enum memmodel
5920 get_memmodel (tree exp)
5922 rtx op;
5923 unsigned HOST_WIDE_INT val;
5924 source_location loc
5925 = expansion_point_location_if_in_system_header (input_location);
5927 /* If the parameter is not a constant, it's a run time value so we'll just
5928 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5929 if (TREE_CODE (exp) != INTEGER_CST)
5930 return MEMMODEL_SEQ_CST;
5932 op = expand_normal (exp);
5934 val = INTVAL (op);
5935 if (targetm.memmodel_check)
5936 val = targetm.memmodel_check (val);
5937 else if (val & ~MEMMODEL_MASK)
5939 warning_at (loc, OPT_Winvalid_memory_model,
5940 "unknown architecture specifier in memory model to builtin");
5941 return MEMMODEL_SEQ_CST;
5944 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5945 if (memmodel_base (val) >= MEMMODEL_LAST)
5947 warning_at (loc, OPT_Winvalid_memory_model,
5948 "invalid memory model argument to builtin");
5949 return MEMMODEL_SEQ_CST;
5952 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5953 be conservative and promote consume to acquire. */
5954 if (val == MEMMODEL_CONSUME)
5955 val = MEMMODEL_ACQUIRE;
5957 return (enum memmodel) val;
5960 /* Expand the __atomic_exchange intrinsic:
5961 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5962 EXP is the CALL_EXPR.
5963 TARGET is an optional place for us to store the results. */
5965 static rtx
5966 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5968 rtx val, mem;
5969 enum memmodel model;
5971 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5973 if (!flag_inline_atomics)
5974 return NULL_RTX;
5976 /* Expand the operands. */
5977 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5978 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5980 return expand_atomic_exchange (target, mem, val, model);
5983 /* Expand the __atomic_compare_exchange intrinsic:
5984 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5985 TYPE desired, BOOL weak,
5986 enum memmodel success,
5987 enum memmodel failure)
5988 EXP is the CALL_EXPR.
5989 TARGET is an optional place for us to store the results. */
5991 static rtx
5992 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5993 rtx target)
5995 rtx expect, desired, mem, oldval;
5996 rtx_code_label *label;
5997 enum memmodel success, failure;
5998 tree weak;
5999 bool is_weak;
6000 source_location loc
6001 = expansion_point_location_if_in_system_header (input_location);
6003 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6004 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6006 if (failure > success)
6008 warning_at (loc, OPT_Winvalid_memory_model,
6009 "failure memory model cannot be stronger than success "
6010 "memory model for %<__atomic_compare_exchange%>");
6011 success = MEMMODEL_SEQ_CST;
6014 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6016 warning_at (loc, OPT_Winvalid_memory_model,
6017 "invalid failure memory model for "
6018 "%<__atomic_compare_exchange%>");
6019 failure = MEMMODEL_SEQ_CST;
6020 success = MEMMODEL_SEQ_CST;
6024 if (!flag_inline_atomics)
6025 return NULL_RTX;
6027 /* Expand the operands. */
6028 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6030 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6031 expect = convert_memory_address (Pmode, expect);
6032 expect = gen_rtx_MEM (mode, expect);
6033 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6035 weak = CALL_EXPR_ARG (exp, 3);
6036 is_weak = false;
6037 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
6038 is_weak = true;
6040 if (target == const0_rtx)
6041 target = NULL;
6043 /* Lest the rtl backend create a race condition with an imporoper store
6044 to memory, always create a new pseudo for OLDVAL. */
6045 oldval = NULL;
6047 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6048 is_weak, success, failure))
6049 return NULL_RTX;
6051 /* Conditionally store back to EXPECT, lest we create a race condition
6052 with an improper store to memory. */
6053 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6054 the normal case where EXPECT is totally private, i.e. a register. At
6055 which point the store can be unconditional. */
6056 label = gen_label_rtx ();
6057 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6058 GET_MODE (target), 1, label);
6059 emit_move_insn (expect, oldval);
6060 emit_label (label);
6062 return target;
6065 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6066 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6067 call. The weak parameter must be dropped to match the expected parameter
6068 list and the expected argument changed from value to pointer to memory
6069 slot. */
6071 static void
6072 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6074 unsigned int z;
6075 vec<tree, va_gc> *vec;
6077 vec_alloc (vec, 5);
6078 vec->quick_push (gimple_call_arg (call, 0));
6079 tree expected = gimple_call_arg (call, 1);
6080 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6081 TREE_TYPE (expected));
6082 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6083 if (expd != x)
6084 emit_move_insn (x, expd);
6085 tree v = make_tree (TREE_TYPE (expected), x);
6086 vec->quick_push (build1 (ADDR_EXPR,
6087 build_pointer_type (TREE_TYPE (expected)), v));
6088 vec->quick_push (gimple_call_arg (call, 2));
6089 /* Skip the boolean weak parameter. */
6090 for (z = 4; z < 6; z++)
6091 vec->quick_push (gimple_call_arg (call, z));
6092 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6093 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6094 gcc_assert (bytes_log2 < 5);
6095 built_in_function fncode
6096 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6097 + bytes_log2);
6098 tree fndecl = builtin_decl_explicit (fncode);
6099 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6100 fndecl);
6101 tree exp = build_call_vec (boolean_type_node, fn, vec);
6102 tree lhs = gimple_call_lhs (call);
6103 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6104 if (lhs)
6106 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6107 if (GET_MODE (boolret) != mode)
6108 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6109 x = force_reg (mode, x);
6110 write_complex_part (target, boolret, true);
6111 write_complex_part (target, x, false);
6115 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6117 void
6118 expand_ifn_atomic_compare_exchange (gcall *call)
6120 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6121 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6122 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6123 rtx expect, desired, mem, oldval, boolret;
6124 enum memmodel success, failure;
6125 tree lhs;
6126 bool is_weak;
6127 source_location loc
6128 = expansion_point_location_if_in_system_header (gimple_location (call));
6130 success = get_memmodel (gimple_call_arg (call, 4));
6131 failure = get_memmodel (gimple_call_arg (call, 5));
6133 if (failure > success)
6135 warning_at (loc, OPT_Winvalid_memory_model,
6136 "failure memory model cannot be stronger than success "
6137 "memory model for %<__atomic_compare_exchange%>");
6138 success = MEMMODEL_SEQ_CST;
6141 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6143 warning_at (loc, OPT_Winvalid_memory_model,
6144 "invalid failure memory model for "
6145 "%<__atomic_compare_exchange%>");
6146 failure = MEMMODEL_SEQ_CST;
6147 success = MEMMODEL_SEQ_CST;
6150 if (!flag_inline_atomics)
6152 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6153 return;
6156 /* Expand the operands. */
6157 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6159 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6160 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6162 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6164 boolret = NULL;
6165 oldval = NULL;
6167 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6168 is_weak, success, failure))
6170 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6171 return;
6174 lhs = gimple_call_lhs (call);
6175 if (lhs)
6177 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6178 if (GET_MODE (boolret) != mode)
6179 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6180 write_complex_part (target, boolret, true);
6181 write_complex_part (target, oldval, false);
6185 /* Expand the __atomic_load intrinsic:
6186 TYPE __atomic_load (TYPE *object, enum memmodel)
6187 EXP is the CALL_EXPR.
6188 TARGET is an optional place for us to store the results. */
6190 static rtx
6191 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6193 rtx mem;
6194 enum memmodel model;
6196 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6197 if (is_mm_release (model) || is_mm_acq_rel (model))
6199 source_location loc
6200 = expansion_point_location_if_in_system_header (input_location);
6201 warning_at (loc, OPT_Winvalid_memory_model,
6202 "invalid memory model for %<__atomic_load%>");
6203 model = MEMMODEL_SEQ_CST;
6206 if (!flag_inline_atomics)
6207 return NULL_RTX;
6209 /* Expand the operand. */
6210 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6212 return expand_atomic_load (target, mem, model);
6216 /* Expand the __atomic_store intrinsic:
6217 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6218 EXP is the CALL_EXPR.
6219 TARGET is an optional place for us to store the results. */
6221 static rtx
6222 expand_builtin_atomic_store (machine_mode mode, tree exp)
6224 rtx mem, val;
6225 enum memmodel model;
6227 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6228 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6229 || is_mm_release (model)))
6231 source_location loc
6232 = expansion_point_location_if_in_system_header (input_location);
6233 warning_at (loc, OPT_Winvalid_memory_model,
6234 "invalid memory model for %<__atomic_store%>");
6235 model = MEMMODEL_SEQ_CST;
6238 if (!flag_inline_atomics)
6239 return NULL_RTX;
6241 /* Expand the operands. */
6242 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6243 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6245 return expand_atomic_store (mem, val, model, false);
6248 /* Expand the __atomic_fetch_XXX intrinsic:
6249 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6250 EXP is the CALL_EXPR.
6251 TARGET is an optional place for us to store the results.
6252 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6253 FETCH_AFTER is true if returning the result of the operation.
6254 FETCH_AFTER is false if returning the value before the operation.
6255 IGNORE is true if the result is not used.
6256 EXT_CALL is the correct builtin for an external call if this cannot be
6257 resolved to an instruction sequence. */
6259 static rtx
6260 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6261 enum rtx_code code, bool fetch_after,
6262 bool ignore, enum built_in_function ext_call)
6264 rtx val, mem, ret;
6265 enum memmodel model;
6266 tree fndecl;
6267 tree addr;
6269 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6271 /* Expand the operands. */
6272 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6273 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6275 /* Only try generating instructions if inlining is turned on. */
6276 if (flag_inline_atomics)
6278 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6279 if (ret)
6280 return ret;
6283 /* Return if a different routine isn't needed for the library call. */
6284 if (ext_call == BUILT_IN_NONE)
6285 return NULL_RTX;
6287 /* Change the call to the specified function. */
6288 fndecl = get_callee_fndecl (exp);
6289 addr = CALL_EXPR_FN (exp);
6290 STRIP_NOPS (addr);
6292 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6293 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6295 /* If we will emit code after the call, the call can not be a tail call.
6296 If it is emitted as a tail call, a barrier is emitted after it, and
6297 then all trailing code is removed. */
6298 if (!ignore)
6299 CALL_EXPR_TAILCALL (exp) = 0;
6301 /* Expand the call here so we can emit trailing code. */
6302 ret = expand_call (exp, target, ignore);
6304 /* Replace the original function just in case it matters. */
6305 TREE_OPERAND (addr, 0) = fndecl;
6307 /* Then issue the arithmetic correction to return the right result. */
6308 if (!ignore)
6310 if (code == NOT)
6312 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6313 OPTAB_LIB_WIDEN);
6314 ret = expand_simple_unop (mode, NOT, ret, target, true);
6316 else
6317 ret = expand_simple_binop (mode, code, ret, val, target, true,
6318 OPTAB_LIB_WIDEN);
6320 return ret;
6323 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6325 void
6326 expand_ifn_atomic_bit_test_and (gcall *call)
6328 tree ptr = gimple_call_arg (call, 0);
6329 tree bit = gimple_call_arg (call, 1);
6330 tree flag = gimple_call_arg (call, 2);
6331 tree lhs = gimple_call_lhs (call);
6332 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6333 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6334 enum rtx_code code;
6335 optab optab;
6336 struct expand_operand ops[5];
6338 gcc_assert (flag_inline_atomics);
6340 if (gimple_call_num_args (call) == 4)
6341 model = get_memmodel (gimple_call_arg (call, 3));
6343 rtx mem = get_builtin_sync_mem (ptr, mode);
6344 rtx val = expand_expr_force_mode (bit, mode);
6346 switch (gimple_call_internal_fn (call))
6348 case IFN_ATOMIC_BIT_TEST_AND_SET:
6349 code = IOR;
6350 optab = atomic_bit_test_and_set_optab;
6351 break;
6352 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6353 code = XOR;
6354 optab = atomic_bit_test_and_complement_optab;
6355 break;
6356 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6357 code = AND;
6358 optab = atomic_bit_test_and_reset_optab;
6359 break;
6360 default:
6361 gcc_unreachable ();
6364 if (lhs == NULL_TREE)
6366 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6367 val, NULL_RTX, true, OPTAB_DIRECT);
6368 if (code == AND)
6369 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6370 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6371 return;
6374 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6375 enum insn_code icode = direct_optab_handler (optab, mode);
6376 gcc_assert (icode != CODE_FOR_nothing);
6377 create_output_operand (&ops[0], target, mode);
6378 create_fixed_operand (&ops[1], mem);
6379 create_convert_operand_to (&ops[2], val, mode, true);
6380 create_integer_operand (&ops[3], model);
6381 create_integer_operand (&ops[4], integer_onep (flag));
6382 if (maybe_expand_insn (icode, 5, ops))
6383 return;
6385 rtx bitval = val;
6386 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6387 val, NULL_RTX, true, OPTAB_DIRECT);
6388 rtx maskval = val;
6389 if (code == AND)
6390 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6391 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6392 code, model, false);
6393 if (integer_onep (flag))
6395 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6396 NULL_RTX, true, OPTAB_DIRECT);
6397 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6398 true, OPTAB_DIRECT);
6400 else
6401 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6402 OPTAB_DIRECT);
6403 if (result != target)
6404 emit_move_insn (target, result);
6407 /* Expand an atomic clear operation.
6408 void _atomic_clear (BOOL *obj, enum memmodel)
6409 EXP is the call expression. */
6411 static rtx
6412 expand_builtin_atomic_clear (tree exp)
6414 machine_mode mode;
6415 rtx mem, ret;
6416 enum memmodel model;
6418 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6419 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6420 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6422 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6424 source_location loc
6425 = expansion_point_location_if_in_system_header (input_location);
6426 warning_at (loc, OPT_Winvalid_memory_model,
6427 "invalid memory model for %<__atomic_store%>");
6428 model = MEMMODEL_SEQ_CST;
6431 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6432 Failing that, a store is issued by __atomic_store. The only way this can
6433 fail is if the bool type is larger than a word size. Unlikely, but
6434 handle it anyway for completeness. Assume a single threaded model since
6435 there is no atomic support in this case, and no barriers are required. */
6436 ret = expand_atomic_store (mem, const0_rtx, model, true);
6437 if (!ret)
6438 emit_move_insn (mem, const0_rtx);
6439 return const0_rtx;
6442 /* Expand an atomic test_and_set operation.
6443 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6444 EXP is the call expression. */
6446 static rtx
6447 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6449 rtx mem;
6450 enum memmodel model;
6451 machine_mode mode;
6453 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6454 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6455 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6457 return expand_atomic_test_and_set (target, mem, model);
6461 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6462 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6464 static tree
6465 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6467 int size;
6468 machine_mode mode;
6469 unsigned int mode_align, type_align;
6471 if (TREE_CODE (arg0) != INTEGER_CST)
6472 return NULL_TREE;
6474 /* We need a corresponding integer mode for the access to be lock-free. */
6475 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6476 if (!int_mode_for_size (size, 0).exists (&mode))
6477 return boolean_false_node;
6479 mode_align = GET_MODE_ALIGNMENT (mode);
6481 if (TREE_CODE (arg1) == INTEGER_CST)
6483 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6485 /* Either this argument is null, or it's a fake pointer encoding
6486 the alignment of the object. */
6487 val = least_bit_hwi (val);
6488 val *= BITS_PER_UNIT;
6490 if (val == 0 || mode_align < val)
6491 type_align = mode_align;
6492 else
6493 type_align = val;
6495 else
6497 tree ttype = TREE_TYPE (arg1);
6499 /* This function is usually invoked and folded immediately by the front
6500 end before anything else has a chance to look at it. The pointer
6501 parameter at this point is usually cast to a void *, so check for that
6502 and look past the cast. */
6503 if (CONVERT_EXPR_P (arg1)
6504 && POINTER_TYPE_P (ttype)
6505 && VOID_TYPE_P (TREE_TYPE (ttype))
6506 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6507 arg1 = TREE_OPERAND (arg1, 0);
6509 ttype = TREE_TYPE (arg1);
6510 gcc_assert (POINTER_TYPE_P (ttype));
6512 /* Get the underlying type of the object. */
6513 ttype = TREE_TYPE (ttype);
6514 type_align = TYPE_ALIGN (ttype);
6517 /* If the object has smaller alignment, the lock free routines cannot
6518 be used. */
6519 if (type_align < mode_align)
6520 return boolean_false_node;
6522 /* Check if a compare_and_swap pattern exists for the mode which represents
6523 the required size. The pattern is not allowed to fail, so the existence
6524 of the pattern indicates support is present. Also require that an
6525 atomic load exists for the required size. */
6526 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6527 return boolean_true_node;
6528 else
6529 return boolean_false_node;
6532 /* Return true if the parameters to call EXP represent an object which will
6533 always generate lock free instructions. The first argument represents the
6534 size of the object, and the second parameter is a pointer to the object
6535 itself. If NULL is passed for the object, then the result is based on
6536 typical alignment for an object of the specified size. Otherwise return
6537 false. */
6539 static rtx
6540 expand_builtin_atomic_always_lock_free (tree exp)
6542 tree size;
6543 tree arg0 = CALL_EXPR_ARG (exp, 0);
6544 tree arg1 = CALL_EXPR_ARG (exp, 1);
6546 if (TREE_CODE (arg0) != INTEGER_CST)
6548 error ("non-constant argument 1 to __atomic_always_lock_free");
6549 return const0_rtx;
6552 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6553 if (size == boolean_true_node)
6554 return const1_rtx;
6555 return const0_rtx;
6558 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6559 is lock free on this architecture. */
6561 static tree
6562 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6564 if (!flag_inline_atomics)
6565 return NULL_TREE;
6567 /* If it isn't always lock free, don't generate a result. */
6568 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6569 return boolean_true_node;
6571 return NULL_TREE;
6574 /* Return true if the parameters to call EXP represent an object which will
6575 always generate lock free instructions. The first argument represents the
6576 size of the object, and the second parameter is a pointer to the object
6577 itself. If NULL is passed for the object, then the result is based on
6578 typical alignment for an object of the specified size. Otherwise return
6579 NULL*/
6581 static rtx
6582 expand_builtin_atomic_is_lock_free (tree exp)
6584 tree size;
6585 tree arg0 = CALL_EXPR_ARG (exp, 0);
6586 tree arg1 = CALL_EXPR_ARG (exp, 1);
6588 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6590 error ("non-integer argument 1 to __atomic_is_lock_free");
6591 return NULL_RTX;
6594 if (!flag_inline_atomics)
6595 return NULL_RTX;
6597 /* If the value is known at compile time, return the RTX for it. */
6598 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6599 if (size == boolean_true_node)
6600 return const1_rtx;
6602 return NULL_RTX;
6605 /* Expand the __atomic_thread_fence intrinsic:
6606 void __atomic_thread_fence (enum memmodel)
6607 EXP is the CALL_EXPR. */
6609 static void
6610 expand_builtin_atomic_thread_fence (tree exp)
6612 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6613 expand_mem_thread_fence (model);
6616 /* Expand the __atomic_signal_fence intrinsic:
6617 void __atomic_signal_fence (enum memmodel)
6618 EXP is the CALL_EXPR. */
6620 static void
6621 expand_builtin_atomic_signal_fence (tree exp)
6623 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6624 expand_mem_signal_fence (model);
6627 /* Expand the __sync_synchronize intrinsic. */
6629 static void
6630 expand_builtin_sync_synchronize (void)
6632 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6635 static rtx
6636 expand_builtin_thread_pointer (tree exp, rtx target)
6638 enum insn_code icode;
6639 if (!validate_arglist (exp, VOID_TYPE))
6640 return const0_rtx;
6641 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6642 if (icode != CODE_FOR_nothing)
6644 struct expand_operand op;
6645 /* If the target is not sutitable then create a new target. */
6646 if (target == NULL_RTX
6647 || !REG_P (target)
6648 || GET_MODE (target) != Pmode)
6649 target = gen_reg_rtx (Pmode);
6650 create_output_operand (&op, target, Pmode);
6651 expand_insn (icode, 1, &op);
6652 return target;
6654 error ("__builtin_thread_pointer is not supported on this target");
6655 return const0_rtx;
6658 static void
6659 expand_builtin_set_thread_pointer (tree exp)
6661 enum insn_code icode;
6662 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6663 return;
6664 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6665 if (icode != CODE_FOR_nothing)
6667 struct expand_operand op;
6668 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6669 Pmode, EXPAND_NORMAL);
6670 create_input_operand (&op, val, Pmode);
6671 expand_insn (icode, 1, &op);
6672 return;
6674 error ("__builtin_set_thread_pointer is not supported on this target");
6678 /* Emit code to restore the current value of stack. */
6680 static void
6681 expand_stack_restore (tree var)
6683 rtx_insn *prev;
6684 rtx sa = expand_normal (var);
6686 sa = convert_memory_address (Pmode, sa);
6688 prev = get_last_insn ();
6689 emit_stack_restore (SAVE_BLOCK, sa);
6691 record_new_stack_level ();
6693 fixup_args_size_notes (prev, get_last_insn (), 0);
6696 /* Emit code to save the current value of stack. */
6698 static rtx
6699 expand_stack_save (void)
6701 rtx ret = NULL_RTX;
6703 emit_stack_save (SAVE_BLOCK, &ret);
6704 return ret;
6707 /* Emit code to get the openacc gang, worker or vector id or size. */
6709 static rtx
6710 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
6712 const char *name;
6713 rtx fallback_retval;
6714 rtx_insn *(*gen_fn) (rtx, rtx);
6715 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
6717 case BUILT_IN_GOACC_PARLEVEL_ID:
6718 name = "__builtin_goacc_parlevel_id";
6719 fallback_retval = const0_rtx;
6720 gen_fn = targetm.gen_oacc_dim_pos;
6721 break;
6722 case BUILT_IN_GOACC_PARLEVEL_SIZE:
6723 name = "__builtin_goacc_parlevel_size";
6724 fallback_retval = const1_rtx;
6725 gen_fn = targetm.gen_oacc_dim_size;
6726 break;
6727 default:
6728 gcc_unreachable ();
6731 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
6733 error ("%qs only supported in OpenACC code", name);
6734 return const0_rtx;
6737 tree arg = CALL_EXPR_ARG (exp, 0);
6738 if (TREE_CODE (arg) != INTEGER_CST)
6740 error ("non-constant argument 0 to %qs", name);
6741 return const0_rtx;
6744 int dim = TREE_INT_CST_LOW (arg);
6745 switch (dim)
6747 case GOMP_DIM_GANG:
6748 case GOMP_DIM_WORKER:
6749 case GOMP_DIM_VECTOR:
6750 break;
6751 default:
6752 error ("illegal argument 0 to %qs", name);
6753 return const0_rtx;
6756 if (ignore)
6757 return target;
6759 if (target == NULL_RTX)
6760 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6762 if (!targetm.have_oacc_dim_size ())
6764 emit_move_insn (target, fallback_retval);
6765 return target;
6768 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
6769 emit_insn (gen_fn (reg, GEN_INT (dim)));
6770 if (reg != target)
6771 emit_move_insn (target, reg);
6773 return target;
6776 /* Expand a string compare operation using a sequence of char comparison
6777 to get rid of the calling overhead, with result going to TARGET if
6778 that's convenient.
6780 VAR_STR is the variable string source;
6781 CONST_STR is the constant string source;
6782 LENGTH is the number of chars to compare;
6783 CONST_STR_N indicates which source string is the constant string;
6784 IS_MEMCMP indicates whether it's a memcmp or strcmp.
6786 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
6788 target = (int) (unsigned char) var_str[0]
6789 - (int) (unsigned char) const_str[0];
6790 if (target != 0)
6791 goto ne_label;
6793 target = (int) (unsigned char) var_str[length - 2]
6794 - (int) (unsigned char) const_str[length - 2];
6795 if (target != 0)
6796 goto ne_label;
6797 target = (int) (unsigned char) var_str[length - 1]
6798 - (int) (unsigned char) const_str[length - 1];
6799 ne_label:
6802 static rtx
6803 inline_string_cmp (rtx target, tree var_str, const char *const_str,
6804 unsigned HOST_WIDE_INT length,
6805 int const_str_n, machine_mode mode)
6807 HOST_WIDE_INT offset = 0;
6808 rtx var_rtx_array
6809 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
6810 rtx var_rtx = NULL_RTX;
6811 rtx const_rtx = NULL_RTX;
6812 rtx result = target ? target : gen_reg_rtx (mode);
6813 rtx_code_label *ne_label = gen_label_rtx ();
6814 tree unit_type_node = unsigned_char_type_node;
6815 scalar_int_mode unit_mode
6816 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
6818 start_sequence ();
6820 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
6822 var_rtx
6823 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
6824 const_rtx = c_readstr (const_str + offset, unit_mode);
6825 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
6826 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
6828 op0 = convert_modes (mode, unit_mode, op0, 1);
6829 op1 = convert_modes (mode, unit_mode, op1, 1);
6830 result = expand_simple_binop (mode, MINUS, op0, op1,
6831 result, 1, OPTAB_WIDEN);
6832 if (i < length - 1)
6833 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
6834 mode, true, ne_label);
6835 offset += GET_MODE_SIZE (unit_mode);
6838 emit_label (ne_label);
6839 rtx_insn *insns = get_insns ();
6840 end_sequence ();
6841 emit_insn (insns);
6843 return result;
6846 /* Inline expansion a call to str(n)cmp, with result going to
6847 TARGET if that's convenient.
6848 If the call is not been inlined, return NULL_RTX. */
6849 static rtx
6850 inline_expand_builtin_string_cmp (tree exp, rtx target)
6852 tree fndecl = get_callee_fndecl (exp);
6853 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6854 unsigned HOST_WIDE_INT length = 0;
6855 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
6857 /* Do NOT apply this inlining expansion when optimizing for size or
6858 optimization level below 2. */
6859 if (optimize < 2 || optimize_insn_for_size_p ())
6860 return NULL_RTX;
6862 gcc_checking_assert (fcode == BUILT_IN_STRCMP
6863 || fcode == BUILT_IN_STRNCMP
6864 || fcode == BUILT_IN_MEMCMP);
6866 /* On a target where the type of the call (int) has same or narrower presicion
6867 than unsigned char, give up the inlining expansion. */
6868 if (TYPE_PRECISION (unsigned_char_type_node)
6869 >= TYPE_PRECISION (TREE_TYPE (exp)))
6870 return NULL_RTX;
6872 tree arg1 = CALL_EXPR_ARG (exp, 0);
6873 tree arg2 = CALL_EXPR_ARG (exp, 1);
6874 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
6876 unsigned HOST_WIDE_INT len1 = 0;
6877 unsigned HOST_WIDE_INT len2 = 0;
6878 unsigned HOST_WIDE_INT len3 = 0;
6880 const char *src_str1 = c_getstr (arg1, &len1);
6881 const char *src_str2 = c_getstr (arg2, &len2);
6883 /* If neither strings is constant string, the call is not qualify. */
6884 if (!src_str1 && !src_str2)
6885 return NULL_RTX;
6887 /* For strncmp, if the length is not a const, not qualify. */
6888 if (is_ncmp && !tree_fits_uhwi_p (len3_tree))
6889 return NULL_RTX;
6891 int const_str_n = 0;
6892 if (!len1)
6893 const_str_n = 2;
6894 else if (!len2)
6895 const_str_n = 1;
6896 else if (len2 > len1)
6897 const_str_n = 1;
6898 else
6899 const_str_n = 2;
6901 gcc_checking_assert (const_str_n > 0);
6902 length = (const_str_n == 1) ? len1 : len2;
6904 if (is_ncmp && (len3 = tree_to_uhwi (len3_tree)) < length)
6905 length = len3;
6907 /* If the length of the comparision is larger than the threshold,
6908 do nothing. */
6909 if (length > (unsigned HOST_WIDE_INT)
6910 PARAM_VALUE (BUILTIN_STRING_CMP_INLINE_LENGTH))
6911 return NULL_RTX;
6913 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6915 /* Now, start inline expansion the call. */
6916 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
6917 (const_str_n == 1) ? src_str1 : src_str2, length,
6918 const_str_n, mode);
6921 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
6922 represents the size of the first argument to that call, or VOIDmode
6923 if the argument is a pointer. IGNORE will be true if the result
6924 isn't used. */
6925 static rtx
6926 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
6927 bool ignore)
6929 rtx val, failsafe;
6930 unsigned nargs = call_expr_nargs (exp);
6932 tree arg0 = CALL_EXPR_ARG (exp, 0);
6934 if (mode == VOIDmode)
6936 mode = TYPE_MODE (TREE_TYPE (arg0));
6937 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
6940 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
6942 /* An optional second argument can be used as a failsafe value on
6943 some machines. If it isn't present, then the failsafe value is
6944 assumed to be 0. */
6945 if (nargs > 1)
6947 tree arg1 = CALL_EXPR_ARG (exp, 1);
6948 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
6950 else
6951 failsafe = const0_rtx;
6953 /* If the result isn't used, the behavior is undefined. It would be
6954 nice to emit a warning here, but path splitting means this might
6955 happen with legitimate code. So simply drop the builtin
6956 expansion in that case; we've handled any side-effects above. */
6957 if (ignore)
6958 return const0_rtx;
6960 /* If we don't have a suitable target, create one to hold the result. */
6961 if (target == NULL || GET_MODE (target) != mode)
6962 target = gen_reg_rtx (mode);
6964 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
6965 val = convert_modes (mode, VOIDmode, val, false);
6967 return targetm.speculation_safe_value (mode, target, val, failsafe);
6970 /* Expand an expression EXP that calls a built-in function,
6971 with result going to TARGET if that's convenient
6972 (and in mode MODE if that's convenient).
6973 SUBTARGET may be used as the target for computing one of EXP's operands.
6974 IGNORE is nonzero if the value is to be ignored. */
6977 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
6978 int ignore)
6980 tree fndecl = get_callee_fndecl (exp);
6981 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6982 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6983 int flags;
6985 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6986 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6988 /* When ASan is enabled, we don't want to expand some memory/string
6989 builtins and rely on libsanitizer's hooks. This allows us to avoid
6990 redundant checks and be sure, that possible overflow will be detected
6991 by ASan. */
6993 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
6994 return expand_call (exp, target, ignore);
6996 /* When not optimizing, generate calls to library functions for a certain
6997 set of builtins. */
6998 if (!optimize
6999 && !called_as_built_in (fndecl)
7000 && fcode != BUILT_IN_FORK
7001 && fcode != BUILT_IN_EXECL
7002 && fcode != BUILT_IN_EXECV
7003 && fcode != BUILT_IN_EXECLP
7004 && fcode != BUILT_IN_EXECLE
7005 && fcode != BUILT_IN_EXECVP
7006 && fcode != BUILT_IN_EXECVE
7007 && !ALLOCA_FUNCTION_CODE_P (fcode)
7008 && fcode != BUILT_IN_FREE)
7009 return expand_call (exp, target, ignore);
7011 /* The built-in function expanders test for target == const0_rtx
7012 to determine whether the function's result will be ignored. */
7013 if (ignore)
7014 target = const0_rtx;
7016 /* If the result of a pure or const built-in function is ignored, and
7017 none of its arguments are volatile, we can avoid expanding the
7018 built-in call and just evaluate the arguments for side-effects. */
7019 if (target == const0_rtx
7020 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7021 && !(flags & ECF_LOOPING_CONST_OR_PURE))
7023 bool volatilep = false;
7024 tree arg;
7025 call_expr_arg_iterator iter;
7027 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7028 if (TREE_THIS_VOLATILE (arg))
7030 volatilep = true;
7031 break;
7034 if (! volatilep)
7036 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7037 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
7038 return const0_rtx;
7042 switch (fcode)
7044 CASE_FLT_FN (BUILT_IN_FABS):
7045 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
7046 case BUILT_IN_FABSD32:
7047 case BUILT_IN_FABSD64:
7048 case BUILT_IN_FABSD128:
7049 target = expand_builtin_fabs (exp, target, subtarget);
7050 if (target)
7051 return target;
7052 break;
7054 CASE_FLT_FN (BUILT_IN_COPYSIGN):
7055 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
7056 target = expand_builtin_copysign (exp, target, subtarget);
7057 if (target)
7058 return target;
7059 break;
7061 /* Just do a normal library call if we were unable to fold
7062 the values. */
7063 CASE_FLT_FN (BUILT_IN_CABS):
7064 break;
7066 CASE_FLT_FN (BUILT_IN_FMA):
7067 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7068 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7069 if (target)
7070 return target;
7071 break;
7073 CASE_FLT_FN (BUILT_IN_ILOGB):
7074 if (! flag_unsafe_math_optimizations)
7075 break;
7076 gcc_fallthrough ();
7077 CASE_FLT_FN (BUILT_IN_ISINF):
7078 CASE_FLT_FN (BUILT_IN_FINITE):
7079 case BUILT_IN_ISFINITE:
7080 case BUILT_IN_ISNORMAL:
7081 target = expand_builtin_interclass_mathfn (exp, target);
7082 if (target)
7083 return target;
7084 break;
7086 CASE_FLT_FN (BUILT_IN_ICEIL):
7087 CASE_FLT_FN (BUILT_IN_LCEIL):
7088 CASE_FLT_FN (BUILT_IN_LLCEIL):
7089 CASE_FLT_FN (BUILT_IN_LFLOOR):
7090 CASE_FLT_FN (BUILT_IN_IFLOOR):
7091 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7092 target = expand_builtin_int_roundingfn (exp, target);
7093 if (target)
7094 return target;
7095 break;
7097 CASE_FLT_FN (BUILT_IN_IRINT):
7098 CASE_FLT_FN (BUILT_IN_LRINT):
7099 CASE_FLT_FN (BUILT_IN_LLRINT):
7100 CASE_FLT_FN (BUILT_IN_IROUND):
7101 CASE_FLT_FN (BUILT_IN_LROUND):
7102 CASE_FLT_FN (BUILT_IN_LLROUND):
7103 target = expand_builtin_int_roundingfn_2 (exp, target);
7104 if (target)
7105 return target;
7106 break;
7108 CASE_FLT_FN (BUILT_IN_POWI):
7109 target = expand_builtin_powi (exp, target);
7110 if (target)
7111 return target;
7112 break;
7114 CASE_FLT_FN (BUILT_IN_CEXPI):
7115 target = expand_builtin_cexpi (exp, target);
7116 gcc_assert (target);
7117 return target;
7119 CASE_FLT_FN (BUILT_IN_SIN):
7120 CASE_FLT_FN (BUILT_IN_COS):
7121 if (! flag_unsafe_math_optimizations)
7122 break;
7123 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7124 if (target)
7125 return target;
7126 break;
7128 CASE_FLT_FN (BUILT_IN_SINCOS):
7129 if (! flag_unsafe_math_optimizations)
7130 break;
7131 target = expand_builtin_sincos (exp);
7132 if (target)
7133 return target;
7134 break;
7136 case BUILT_IN_APPLY_ARGS:
7137 return expand_builtin_apply_args ();
7139 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7140 FUNCTION with a copy of the parameters described by
7141 ARGUMENTS, and ARGSIZE. It returns a block of memory
7142 allocated on the stack into which is stored all the registers
7143 that might possibly be used for returning the result of a
7144 function. ARGUMENTS is the value returned by
7145 __builtin_apply_args. ARGSIZE is the number of bytes of
7146 arguments that must be copied. ??? How should this value be
7147 computed? We'll also need a safe worst case value for varargs
7148 functions. */
7149 case BUILT_IN_APPLY:
7150 if (!validate_arglist (exp, POINTER_TYPE,
7151 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7152 && !validate_arglist (exp, REFERENCE_TYPE,
7153 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7154 return const0_rtx;
7155 else
7157 rtx ops[3];
7159 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7160 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7161 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
7163 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7166 /* __builtin_return (RESULT) causes the function to return the
7167 value described by RESULT. RESULT is address of the block of
7168 memory returned by __builtin_apply. */
7169 case BUILT_IN_RETURN:
7170 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7171 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
7172 return const0_rtx;
7174 case BUILT_IN_SAVEREGS:
7175 return expand_builtin_saveregs ();
7177 case BUILT_IN_VA_ARG_PACK:
7178 /* All valid uses of __builtin_va_arg_pack () are removed during
7179 inlining. */
7180 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
7181 return const0_rtx;
7183 case BUILT_IN_VA_ARG_PACK_LEN:
7184 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7185 inlining. */
7186 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
7187 return const0_rtx;
7189 /* Return the address of the first anonymous stack arg. */
7190 case BUILT_IN_NEXT_ARG:
7191 if (fold_builtin_next_arg (exp, false))
7192 return const0_rtx;
7193 return expand_builtin_next_arg ();
7195 case BUILT_IN_CLEAR_CACHE:
7196 target = expand_builtin___clear_cache (exp);
7197 if (target)
7198 return target;
7199 break;
7201 case BUILT_IN_CLASSIFY_TYPE:
7202 return expand_builtin_classify_type (exp);
7204 case BUILT_IN_CONSTANT_P:
7205 return const0_rtx;
7207 case BUILT_IN_FRAME_ADDRESS:
7208 case BUILT_IN_RETURN_ADDRESS:
7209 return expand_builtin_frame_address (fndecl, exp);
7211 /* Returns the address of the area where the structure is returned.
7212 0 otherwise. */
7213 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
7214 if (call_expr_nargs (exp) != 0
7215 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
7216 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
7217 return const0_rtx;
7218 else
7219 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7221 CASE_BUILT_IN_ALLOCA:
7222 target = expand_builtin_alloca (exp);
7223 if (target)
7224 return target;
7225 break;
7227 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7228 return expand_asan_emit_allocas_unpoison (exp);
7230 case BUILT_IN_STACK_SAVE:
7231 return expand_stack_save ();
7233 case BUILT_IN_STACK_RESTORE:
7234 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
7235 return const0_rtx;
7237 case BUILT_IN_BSWAP16:
7238 case BUILT_IN_BSWAP32:
7239 case BUILT_IN_BSWAP64:
7240 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
7241 if (target)
7242 return target;
7243 break;
7245 CASE_INT_FN (BUILT_IN_FFS):
7246 target = expand_builtin_unop (target_mode, exp, target,
7247 subtarget, ffs_optab);
7248 if (target)
7249 return target;
7250 break;
7252 CASE_INT_FN (BUILT_IN_CLZ):
7253 target = expand_builtin_unop (target_mode, exp, target,
7254 subtarget, clz_optab);
7255 if (target)
7256 return target;
7257 break;
7259 CASE_INT_FN (BUILT_IN_CTZ):
7260 target = expand_builtin_unop (target_mode, exp, target,
7261 subtarget, ctz_optab);
7262 if (target)
7263 return target;
7264 break;
7266 CASE_INT_FN (BUILT_IN_CLRSB):
7267 target = expand_builtin_unop (target_mode, exp, target,
7268 subtarget, clrsb_optab);
7269 if (target)
7270 return target;
7271 break;
7273 CASE_INT_FN (BUILT_IN_POPCOUNT):
7274 target = expand_builtin_unop (target_mode, exp, target,
7275 subtarget, popcount_optab);
7276 if (target)
7277 return target;
7278 break;
7280 CASE_INT_FN (BUILT_IN_PARITY):
7281 target = expand_builtin_unop (target_mode, exp, target,
7282 subtarget, parity_optab);
7283 if (target)
7284 return target;
7285 break;
7287 case BUILT_IN_STRLEN:
7288 target = expand_builtin_strlen (exp, target, target_mode);
7289 if (target)
7290 return target;
7291 break;
7293 case BUILT_IN_STRNLEN:
7294 target = expand_builtin_strnlen (exp, target, target_mode);
7295 if (target)
7296 return target;
7297 break;
7299 case BUILT_IN_STRCAT:
7300 target = expand_builtin_strcat (exp, target);
7301 if (target)
7302 return target;
7303 break;
7305 case BUILT_IN_STRCPY:
7306 target = expand_builtin_strcpy (exp, target);
7307 if (target)
7308 return target;
7309 break;
7311 case BUILT_IN_STRNCAT:
7312 target = expand_builtin_strncat (exp, target);
7313 if (target)
7314 return target;
7315 break;
7317 case BUILT_IN_STRNCPY:
7318 target = expand_builtin_strncpy (exp, target);
7319 if (target)
7320 return target;
7321 break;
7323 case BUILT_IN_STPCPY:
7324 target = expand_builtin_stpcpy (exp, target, mode);
7325 if (target)
7326 return target;
7327 break;
7329 case BUILT_IN_STPNCPY:
7330 target = expand_builtin_stpncpy (exp, target);
7331 if (target)
7332 return target;
7333 break;
7335 case BUILT_IN_MEMCHR:
7336 target = expand_builtin_memchr (exp, target);
7337 if (target)
7338 return target;
7339 break;
7341 case BUILT_IN_MEMCPY:
7342 target = expand_builtin_memcpy (exp, target);
7343 if (target)
7344 return target;
7345 break;
7347 case BUILT_IN_MEMMOVE:
7348 target = expand_builtin_memmove (exp, target);
7349 if (target)
7350 return target;
7351 break;
7353 case BUILT_IN_MEMPCPY:
7354 target = expand_builtin_mempcpy (exp, target);
7355 if (target)
7356 return target;
7357 break;
7359 case BUILT_IN_MEMSET:
7360 target = expand_builtin_memset (exp, target, mode);
7361 if (target)
7362 return target;
7363 break;
7365 case BUILT_IN_BZERO:
7366 target = expand_builtin_bzero (exp);
7367 if (target)
7368 return target;
7369 break;
7371 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7372 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
7373 when changing it to a strcmp call. */
7374 case BUILT_IN_STRCMP_EQ:
7375 target = expand_builtin_memcmp (exp, target, true);
7376 if (target)
7377 return target;
7379 /* Change this call back to a BUILT_IN_STRCMP. */
7380 TREE_OPERAND (exp, 1)
7381 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7383 /* Delete the last parameter. */
7384 unsigned int i;
7385 vec<tree, va_gc> *arg_vec;
7386 vec_alloc (arg_vec, 2);
7387 for (i = 0; i < 2; i++)
7388 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7389 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7390 /* FALLTHROUGH */
7392 case BUILT_IN_STRCMP:
7393 target = expand_builtin_strcmp (exp, target);
7394 if (target)
7395 return target;
7396 break;
7398 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7399 back to a BUILT_IN_STRNCMP. */
7400 case BUILT_IN_STRNCMP_EQ:
7401 target = expand_builtin_memcmp (exp, target, true);
7402 if (target)
7403 return target;
7405 /* Change it back to a BUILT_IN_STRNCMP. */
7406 TREE_OPERAND (exp, 1)
7407 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7408 /* FALLTHROUGH */
7410 case BUILT_IN_STRNCMP:
7411 target = expand_builtin_strncmp (exp, target, mode);
7412 if (target)
7413 return target;
7414 break;
7416 case BUILT_IN_BCMP:
7417 case BUILT_IN_MEMCMP:
7418 case BUILT_IN_MEMCMP_EQ:
7419 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7420 if (target)
7421 return target;
7422 if (fcode == BUILT_IN_MEMCMP_EQ)
7424 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7425 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7427 break;
7429 case BUILT_IN_SETJMP:
7430 /* This should have been lowered to the builtins below. */
7431 gcc_unreachable ();
7433 case BUILT_IN_SETJMP_SETUP:
7434 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7435 and the receiver label. */
7436 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7438 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7439 VOIDmode, EXPAND_NORMAL);
7440 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7441 rtx_insn *label_r = label_rtx (label);
7443 /* This is copied from the handling of non-local gotos. */
7444 expand_builtin_setjmp_setup (buf_addr, label_r);
7445 nonlocal_goto_handler_labels
7446 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7447 nonlocal_goto_handler_labels);
7448 /* ??? Do not let expand_label treat us as such since we would
7449 not want to be both on the list of non-local labels and on
7450 the list of forced labels. */
7451 FORCED_LABEL (label) = 0;
7452 return const0_rtx;
7454 break;
7456 case BUILT_IN_SETJMP_RECEIVER:
7457 /* __builtin_setjmp_receiver is passed the receiver label. */
7458 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7460 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7461 rtx_insn *label_r = label_rtx (label);
7463 expand_builtin_setjmp_receiver (label_r);
7464 return const0_rtx;
7466 break;
7468 /* __builtin_longjmp is passed a pointer to an array of five words.
7469 It's similar to the C library longjmp function but works with
7470 __builtin_setjmp above. */
7471 case BUILT_IN_LONGJMP:
7472 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7474 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7475 VOIDmode, EXPAND_NORMAL);
7476 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7478 if (value != const1_rtx)
7480 error ("%<__builtin_longjmp%> second argument must be 1");
7481 return const0_rtx;
7484 expand_builtin_longjmp (buf_addr, value);
7485 return const0_rtx;
7487 break;
7489 case BUILT_IN_NONLOCAL_GOTO:
7490 target = expand_builtin_nonlocal_goto (exp);
7491 if (target)
7492 return target;
7493 break;
7495 /* This updates the setjmp buffer that is its argument with the value
7496 of the current stack pointer. */
7497 case BUILT_IN_UPDATE_SETJMP_BUF:
7498 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7500 rtx buf_addr
7501 = expand_normal (CALL_EXPR_ARG (exp, 0));
7503 expand_builtin_update_setjmp_buf (buf_addr);
7504 return const0_rtx;
7506 break;
7508 case BUILT_IN_TRAP:
7509 expand_builtin_trap ();
7510 return const0_rtx;
7512 case BUILT_IN_UNREACHABLE:
7513 expand_builtin_unreachable ();
7514 return const0_rtx;
7516 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7517 case BUILT_IN_SIGNBITD32:
7518 case BUILT_IN_SIGNBITD64:
7519 case BUILT_IN_SIGNBITD128:
7520 target = expand_builtin_signbit (exp, target);
7521 if (target)
7522 return target;
7523 break;
7525 /* Various hooks for the DWARF 2 __throw routine. */
7526 case BUILT_IN_UNWIND_INIT:
7527 expand_builtin_unwind_init ();
7528 return const0_rtx;
7529 case BUILT_IN_DWARF_CFA:
7530 return virtual_cfa_rtx;
7531 #ifdef DWARF2_UNWIND_INFO
7532 case BUILT_IN_DWARF_SP_COLUMN:
7533 return expand_builtin_dwarf_sp_column ();
7534 case BUILT_IN_INIT_DWARF_REG_SIZES:
7535 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7536 return const0_rtx;
7537 #endif
7538 case BUILT_IN_FROB_RETURN_ADDR:
7539 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7540 case BUILT_IN_EXTRACT_RETURN_ADDR:
7541 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7542 case BUILT_IN_EH_RETURN:
7543 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7544 CALL_EXPR_ARG (exp, 1));
7545 return const0_rtx;
7546 case BUILT_IN_EH_RETURN_DATA_REGNO:
7547 return expand_builtin_eh_return_data_regno (exp);
7548 case BUILT_IN_EXTEND_POINTER:
7549 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7550 case BUILT_IN_EH_POINTER:
7551 return expand_builtin_eh_pointer (exp);
7552 case BUILT_IN_EH_FILTER:
7553 return expand_builtin_eh_filter (exp);
7554 case BUILT_IN_EH_COPY_VALUES:
7555 return expand_builtin_eh_copy_values (exp);
7557 case BUILT_IN_VA_START:
7558 return expand_builtin_va_start (exp);
7559 case BUILT_IN_VA_END:
7560 return expand_builtin_va_end (exp);
7561 case BUILT_IN_VA_COPY:
7562 return expand_builtin_va_copy (exp);
7563 case BUILT_IN_EXPECT:
7564 return expand_builtin_expect (exp, target);
7565 case BUILT_IN_ASSUME_ALIGNED:
7566 return expand_builtin_assume_aligned (exp, target);
7567 case BUILT_IN_PREFETCH:
7568 expand_builtin_prefetch (exp);
7569 return const0_rtx;
7571 case BUILT_IN_INIT_TRAMPOLINE:
7572 return expand_builtin_init_trampoline (exp, true);
7573 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7574 return expand_builtin_init_trampoline (exp, false);
7575 case BUILT_IN_ADJUST_TRAMPOLINE:
7576 return expand_builtin_adjust_trampoline (exp);
7578 case BUILT_IN_INIT_DESCRIPTOR:
7579 return expand_builtin_init_descriptor (exp);
7580 case BUILT_IN_ADJUST_DESCRIPTOR:
7581 return expand_builtin_adjust_descriptor (exp);
7583 case BUILT_IN_FORK:
7584 case BUILT_IN_EXECL:
7585 case BUILT_IN_EXECV:
7586 case BUILT_IN_EXECLP:
7587 case BUILT_IN_EXECLE:
7588 case BUILT_IN_EXECVP:
7589 case BUILT_IN_EXECVE:
7590 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7591 if (target)
7592 return target;
7593 break;
7595 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7596 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7597 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7598 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7599 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7600 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7601 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7602 if (target)
7603 return target;
7604 break;
7606 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7607 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7608 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7609 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7610 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7611 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7612 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7613 if (target)
7614 return target;
7615 break;
7617 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7618 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7619 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7620 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7621 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7622 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7623 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7624 if (target)
7625 return target;
7626 break;
7628 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7629 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7630 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7631 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7632 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7633 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7634 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7635 if (target)
7636 return target;
7637 break;
7639 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7640 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7641 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7642 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7643 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7644 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7645 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7646 if (target)
7647 return target;
7648 break;
7650 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7651 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7652 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7653 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7654 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7655 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7656 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7657 if (target)
7658 return target;
7659 break;
7661 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7662 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7663 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7664 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7665 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7666 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7667 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7668 if (target)
7669 return target;
7670 break;
7672 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7673 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7674 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7675 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7676 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7677 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7678 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7679 if (target)
7680 return target;
7681 break;
7683 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7684 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7685 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7686 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7687 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7688 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7689 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7690 if (target)
7691 return target;
7692 break;
7694 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7695 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7696 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7697 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7698 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7699 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7700 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7701 if (target)
7702 return target;
7703 break;
7705 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7706 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7707 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7708 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7709 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7710 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7711 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7712 if (target)
7713 return target;
7714 break;
7716 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7717 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7718 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7719 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7720 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7721 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7722 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7723 if (target)
7724 return target;
7725 break;
7727 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7728 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7729 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7730 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7731 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7732 if (mode == VOIDmode)
7733 mode = TYPE_MODE (boolean_type_node);
7734 if (!target || !register_operand (target, mode))
7735 target = gen_reg_rtx (mode);
7737 mode = get_builtin_sync_mode
7738 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7739 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7740 if (target)
7741 return target;
7742 break;
7744 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7745 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7746 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7747 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7748 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7749 mode = get_builtin_sync_mode
7750 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7751 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7752 if (target)
7753 return target;
7754 break;
7756 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7757 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7758 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7759 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7760 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7761 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7762 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7763 if (target)
7764 return target;
7765 break;
7767 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7768 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7769 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7770 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7771 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7772 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7773 expand_builtin_sync_lock_release (mode, exp);
7774 return const0_rtx;
7776 case BUILT_IN_SYNC_SYNCHRONIZE:
7777 expand_builtin_sync_synchronize ();
7778 return const0_rtx;
7780 case BUILT_IN_ATOMIC_EXCHANGE_1:
7781 case BUILT_IN_ATOMIC_EXCHANGE_2:
7782 case BUILT_IN_ATOMIC_EXCHANGE_4:
7783 case BUILT_IN_ATOMIC_EXCHANGE_8:
7784 case BUILT_IN_ATOMIC_EXCHANGE_16:
7785 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7786 target = expand_builtin_atomic_exchange (mode, exp, target);
7787 if (target)
7788 return target;
7789 break;
7791 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7792 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7793 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7794 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7795 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7797 unsigned int nargs, z;
7798 vec<tree, va_gc> *vec;
7800 mode =
7801 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7802 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7803 if (target)
7804 return target;
7806 /* If this is turned into an external library call, the weak parameter
7807 must be dropped to match the expected parameter list. */
7808 nargs = call_expr_nargs (exp);
7809 vec_alloc (vec, nargs - 1);
7810 for (z = 0; z < 3; z++)
7811 vec->quick_push (CALL_EXPR_ARG (exp, z));
7812 /* Skip the boolean weak parameter. */
7813 for (z = 4; z < 6; z++)
7814 vec->quick_push (CALL_EXPR_ARG (exp, z));
7815 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7816 break;
7819 case BUILT_IN_ATOMIC_LOAD_1:
7820 case BUILT_IN_ATOMIC_LOAD_2:
7821 case BUILT_IN_ATOMIC_LOAD_4:
7822 case BUILT_IN_ATOMIC_LOAD_8:
7823 case BUILT_IN_ATOMIC_LOAD_16:
7824 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7825 target = expand_builtin_atomic_load (mode, exp, target);
7826 if (target)
7827 return target;
7828 break;
7830 case BUILT_IN_ATOMIC_STORE_1:
7831 case BUILT_IN_ATOMIC_STORE_2:
7832 case BUILT_IN_ATOMIC_STORE_4:
7833 case BUILT_IN_ATOMIC_STORE_8:
7834 case BUILT_IN_ATOMIC_STORE_16:
7835 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7836 target = expand_builtin_atomic_store (mode, exp);
7837 if (target)
7838 return const0_rtx;
7839 break;
7841 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7842 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7843 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7844 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7845 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7847 enum built_in_function lib;
7848 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7849 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7850 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7851 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7852 ignore, lib);
7853 if (target)
7854 return target;
7855 break;
7857 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7858 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7859 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7860 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7861 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7863 enum built_in_function lib;
7864 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7865 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7866 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7867 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7868 ignore, lib);
7869 if (target)
7870 return target;
7871 break;
7873 case BUILT_IN_ATOMIC_AND_FETCH_1:
7874 case BUILT_IN_ATOMIC_AND_FETCH_2:
7875 case BUILT_IN_ATOMIC_AND_FETCH_4:
7876 case BUILT_IN_ATOMIC_AND_FETCH_8:
7877 case BUILT_IN_ATOMIC_AND_FETCH_16:
7879 enum built_in_function lib;
7880 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7881 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7882 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7883 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7884 ignore, lib);
7885 if (target)
7886 return target;
7887 break;
7889 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7890 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7891 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7892 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7893 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7895 enum built_in_function lib;
7896 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7897 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7898 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7899 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7900 ignore, lib);
7901 if (target)
7902 return target;
7903 break;
7905 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7906 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7907 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7908 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7909 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7911 enum built_in_function lib;
7912 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7913 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7914 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7915 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7916 ignore, lib);
7917 if (target)
7918 return target;
7919 break;
7921 case BUILT_IN_ATOMIC_OR_FETCH_1:
7922 case BUILT_IN_ATOMIC_OR_FETCH_2:
7923 case BUILT_IN_ATOMIC_OR_FETCH_4:
7924 case BUILT_IN_ATOMIC_OR_FETCH_8:
7925 case BUILT_IN_ATOMIC_OR_FETCH_16:
7927 enum built_in_function lib;
7928 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7929 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7930 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7931 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7932 ignore, lib);
7933 if (target)
7934 return target;
7935 break;
7937 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7938 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7939 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7940 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7941 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7942 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7943 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7944 ignore, BUILT_IN_NONE);
7945 if (target)
7946 return target;
7947 break;
7949 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7950 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7951 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7952 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7953 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7954 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7955 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7956 ignore, BUILT_IN_NONE);
7957 if (target)
7958 return target;
7959 break;
7961 case BUILT_IN_ATOMIC_FETCH_AND_1:
7962 case BUILT_IN_ATOMIC_FETCH_AND_2:
7963 case BUILT_IN_ATOMIC_FETCH_AND_4:
7964 case BUILT_IN_ATOMIC_FETCH_AND_8:
7965 case BUILT_IN_ATOMIC_FETCH_AND_16:
7966 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7967 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7968 ignore, BUILT_IN_NONE);
7969 if (target)
7970 return target;
7971 break;
7973 case BUILT_IN_ATOMIC_FETCH_NAND_1:
7974 case BUILT_IN_ATOMIC_FETCH_NAND_2:
7975 case BUILT_IN_ATOMIC_FETCH_NAND_4:
7976 case BUILT_IN_ATOMIC_FETCH_NAND_8:
7977 case BUILT_IN_ATOMIC_FETCH_NAND_16:
7978 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
7979 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
7980 ignore, BUILT_IN_NONE);
7981 if (target)
7982 return target;
7983 break;
7985 case BUILT_IN_ATOMIC_FETCH_XOR_1:
7986 case BUILT_IN_ATOMIC_FETCH_XOR_2:
7987 case BUILT_IN_ATOMIC_FETCH_XOR_4:
7988 case BUILT_IN_ATOMIC_FETCH_XOR_8:
7989 case BUILT_IN_ATOMIC_FETCH_XOR_16:
7990 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
7991 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
7992 ignore, BUILT_IN_NONE);
7993 if (target)
7994 return target;
7995 break;
7997 case BUILT_IN_ATOMIC_FETCH_OR_1:
7998 case BUILT_IN_ATOMIC_FETCH_OR_2:
7999 case BUILT_IN_ATOMIC_FETCH_OR_4:
8000 case BUILT_IN_ATOMIC_FETCH_OR_8:
8001 case BUILT_IN_ATOMIC_FETCH_OR_16:
8002 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8003 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8004 ignore, BUILT_IN_NONE);
8005 if (target)
8006 return target;
8007 break;
8009 case BUILT_IN_ATOMIC_TEST_AND_SET:
8010 return expand_builtin_atomic_test_and_set (exp, target);
8012 case BUILT_IN_ATOMIC_CLEAR:
8013 return expand_builtin_atomic_clear (exp);
8015 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8016 return expand_builtin_atomic_always_lock_free (exp);
8018 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8019 target = expand_builtin_atomic_is_lock_free (exp);
8020 if (target)
8021 return target;
8022 break;
8024 case BUILT_IN_ATOMIC_THREAD_FENCE:
8025 expand_builtin_atomic_thread_fence (exp);
8026 return const0_rtx;
8028 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8029 expand_builtin_atomic_signal_fence (exp);
8030 return const0_rtx;
8032 case BUILT_IN_OBJECT_SIZE:
8033 return expand_builtin_object_size (exp);
8035 case BUILT_IN_MEMCPY_CHK:
8036 case BUILT_IN_MEMPCPY_CHK:
8037 case BUILT_IN_MEMMOVE_CHK:
8038 case BUILT_IN_MEMSET_CHK:
8039 target = expand_builtin_memory_chk (exp, target, mode, fcode);
8040 if (target)
8041 return target;
8042 break;
8044 case BUILT_IN_STRCPY_CHK:
8045 case BUILT_IN_STPCPY_CHK:
8046 case BUILT_IN_STRNCPY_CHK:
8047 case BUILT_IN_STPNCPY_CHK:
8048 case BUILT_IN_STRCAT_CHK:
8049 case BUILT_IN_STRNCAT_CHK:
8050 case BUILT_IN_SNPRINTF_CHK:
8051 case BUILT_IN_VSNPRINTF_CHK:
8052 maybe_emit_chk_warning (exp, fcode);
8053 break;
8055 case BUILT_IN_SPRINTF_CHK:
8056 case BUILT_IN_VSPRINTF_CHK:
8057 maybe_emit_sprintf_chk_warning (exp, fcode);
8058 break;
8060 case BUILT_IN_FREE:
8061 if (warn_free_nonheap_object)
8062 maybe_emit_free_warning (exp);
8063 break;
8065 case BUILT_IN_THREAD_POINTER:
8066 return expand_builtin_thread_pointer (exp, target);
8068 case BUILT_IN_SET_THREAD_POINTER:
8069 expand_builtin_set_thread_pointer (exp);
8070 return const0_rtx;
8072 case BUILT_IN_ACC_ON_DEVICE:
8073 /* Do library call, if we failed to expand the builtin when
8074 folding. */
8075 break;
8077 case BUILT_IN_GOACC_PARLEVEL_ID:
8078 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8079 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8081 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8082 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8084 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8085 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8086 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8087 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8088 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8089 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8090 return expand_speculation_safe_value (mode, exp, target, ignore);
8092 default: /* just do library call, if unknown builtin */
8093 break;
8096 /* The switch statement above can drop through to cause the function
8097 to be called normally. */
8098 return expand_call (exp, target, ignore);
8101 /* Determine whether a tree node represents a call to a built-in
8102 function. If the tree T is a call to a built-in function with
8103 the right number of arguments of the appropriate types, return
8104 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8105 Otherwise the return value is END_BUILTINS. */
8107 enum built_in_function
8108 builtin_mathfn_code (const_tree t)
8110 const_tree fndecl, arg, parmlist;
8111 const_tree argtype, parmtype;
8112 const_call_expr_arg_iterator iter;
8114 if (TREE_CODE (t) != CALL_EXPR)
8115 return END_BUILTINS;
8117 fndecl = get_callee_fndecl (t);
8118 if (fndecl == NULL_TREE
8119 || TREE_CODE (fndecl) != FUNCTION_DECL
8120 || ! DECL_BUILT_IN (fndecl)
8121 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8122 return END_BUILTINS;
8124 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8125 init_const_call_expr_arg_iterator (t, &iter);
8126 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
8128 /* If a function doesn't take a variable number of arguments,
8129 the last element in the list will have type `void'. */
8130 parmtype = TREE_VALUE (parmlist);
8131 if (VOID_TYPE_P (parmtype))
8133 if (more_const_call_expr_args_p (&iter))
8134 return END_BUILTINS;
8135 return DECL_FUNCTION_CODE (fndecl);
8138 if (! more_const_call_expr_args_p (&iter))
8139 return END_BUILTINS;
8141 arg = next_const_call_expr_arg (&iter);
8142 argtype = TREE_TYPE (arg);
8144 if (SCALAR_FLOAT_TYPE_P (parmtype))
8146 if (! SCALAR_FLOAT_TYPE_P (argtype))
8147 return END_BUILTINS;
8149 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8151 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8152 return END_BUILTINS;
8154 else if (POINTER_TYPE_P (parmtype))
8156 if (! POINTER_TYPE_P (argtype))
8157 return END_BUILTINS;
8159 else if (INTEGRAL_TYPE_P (parmtype))
8161 if (! INTEGRAL_TYPE_P (argtype))
8162 return END_BUILTINS;
8164 else
8165 return END_BUILTINS;
8168 /* Variable-length argument list. */
8169 return DECL_FUNCTION_CODE (fndecl);
8172 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8173 evaluate to a constant. */
8175 static tree
8176 fold_builtin_constant_p (tree arg)
8178 /* We return 1 for a numeric type that's known to be a constant
8179 value at compile-time or for an aggregate type that's a
8180 literal constant. */
8181 STRIP_NOPS (arg);
8183 /* If we know this is a constant, emit the constant of one. */
8184 if (CONSTANT_CLASS_P (arg)
8185 || (TREE_CODE (arg) == CONSTRUCTOR
8186 && TREE_CONSTANT (arg)))
8187 return integer_one_node;
8188 if (TREE_CODE (arg) == ADDR_EXPR)
8190 tree op = TREE_OPERAND (arg, 0);
8191 if (TREE_CODE (op) == STRING_CST
8192 || (TREE_CODE (op) == ARRAY_REF
8193 && integer_zerop (TREE_OPERAND (op, 1))
8194 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8195 return integer_one_node;
8198 /* If this expression has side effects, show we don't know it to be a
8199 constant. Likewise if it's a pointer or aggregate type since in
8200 those case we only want literals, since those are only optimized
8201 when generating RTL, not later.
8202 And finally, if we are compiling an initializer, not code, we
8203 need to return a definite result now; there's not going to be any
8204 more optimization done. */
8205 if (TREE_SIDE_EFFECTS (arg)
8206 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8207 || POINTER_TYPE_P (TREE_TYPE (arg))
8208 || cfun == 0
8209 || folding_initializer
8210 || force_folding_builtin_constant_p)
8211 return integer_zero_node;
8213 return NULL_TREE;
8216 /* Create builtin_expect with PRED and EXPECTED as its arguments and
8217 return it as a truthvalue. */
8219 static tree
8220 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
8221 tree predictor)
8223 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
8225 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
8226 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8227 ret_type = TREE_TYPE (TREE_TYPE (fn));
8228 pred_type = TREE_VALUE (arg_types);
8229 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8231 pred = fold_convert_loc (loc, pred_type, pred);
8232 expected = fold_convert_loc (loc, expected_type, expected);
8233 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8234 predictor);
8236 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8237 build_int_cst (ret_type, 0));
8240 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
8241 NULL_TREE if no simplification is possible. */
8243 tree
8244 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
8246 tree inner, fndecl, inner_arg0;
8247 enum tree_code code;
8249 /* Distribute the expected value over short-circuiting operators.
8250 See through the cast from truthvalue_type_node to long. */
8251 inner_arg0 = arg0;
8252 while (CONVERT_EXPR_P (inner_arg0)
8253 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8254 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8255 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8257 /* If this is a builtin_expect within a builtin_expect keep the
8258 inner one. See through a comparison against a constant. It
8259 might have been added to create a thruthvalue. */
8260 inner = inner_arg0;
8262 if (COMPARISON_CLASS_P (inner)
8263 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8264 inner = TREE_OPERAND (inner, 0);
8266 if (TREE_CODE (inner) == CALL_EXPR
8267 && (fndecl = get_callee_fndecl (inner))
8268 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
8269 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
8270 return arg0;
8272 inner = inner_arg0;
8273 code = TREE_CODE (inner);
8274 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8276 tree op0 = TREE_OPERAND (inner, 0);
8277 tree op1 = TREE_OPERAND (inner, 1);
8278 arg1 = save_expr (arg1);
8280 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
8281 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
8282 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8284 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
8287 /* If the argument isn't invariant then there's nothing else we can do. */
8288 if (!TREE_CONSTANT (inner_arg0))
8289 return NULL_TREE;
8291 /* If we expect that a comparison against the argument will fold to
8292 a constant return the constant. In practice, this means a true
8293 constant or the address of a non-weak symbol. */
8294 inner = inner_arg0;
8295 STRIP_NOPS (inner);
8296 if (TREE_CODE (inner) == ADDR_EXPR)
8300 inner = TREE_OPERAND (inner, 0);
8302 while (TREE_CODE (inner) == COMPONENT_REF
8303 || TREE_CODE (inner) == ARRAY_REF);
8304 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8305 return NULL_TREE;
8308 /* Otherwise, ARG0 already has the proper type for the return value. */
8309 return arg0;
8312 /* Fold a call to __builtin_classify_type with argument ARG. */
8314 static tree
8315 fold_builtin_classify_type (tree arg)
8317 if (arg == 0)
8318 return build_int_cst (integer_type_node, no_type_class);
8320 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8323 /* Fold a call to __builtin_strlen with argument ARG. */
8325 static tree
8326 fold_builtin_strlen (location_t loc, tree type, tree arg)
8328 if (!validate_arg (arg, POINTER_TYPE))
8329 return NULL_TREE;
8330 else
8332 tree len = c_strlen (arg, 0);
8334 if (len)
8335 return fold_convert_loc (loc, type, len);
8337 return NULL_TREE;
8341 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8343 static tree
8344 fold_builtin_inf (location_t loc, tree type, int warn)
8346 REAL_VALUE_TYPE real;
8348 /* __builtin_inff is intended to be usable to define INFINITY on all
8349 targets. If an infinity is not available, INFINITY expands "to a
8350 positive constant of type float that overflows at translation
8351 time", footnote "In this case, using INFINITY will violate the
8352 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8353 Thus we pedwarn to ensure this constraint violation is
8354 diagnosed. */
8355 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8356 pedwarn (loc, 0, "target format does not support infinity");
8358 real_inf (&real);
8359 return build_real (type, real);
8362 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8363 NULL_TREE if no simplification can be made. */
8365 static tree
8366 fold_builtin_sincos (location_t loc,
8367 tree arg0, tree arg1, tree arg2)
8369 tree type;
8370 tree fndecl, call = NULL_TREE;
8372 if (!validate_arg (arg0, REAL_TYPE)
8373 || !validate_arg (arg1, POINTER_TYPE)
8374 || !validate_arg (arg2, POINTER_TYPE))
8375 return NULL_TREE;
8377 type = TREE_TYPE (arg0);
8379 /* Calculate the result when the argument is a constant. */
8380 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8381 if (fn == END_BUILTINS)
8382 return NULL_TREE;
8384 /* Canonicalize sincos to cexpi. */
8385 if (TREE_CODE (arg0) == REAL_CST)
8387 tree complex_type = build_complex_type (type);
8388 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8390 if (!call)
8392 if (!targetm.libc_has_function (function_c99_math_complex)
8393 || !builtin_decl_implicit_p (fn))
8394 return NULL_TREE;
8395 fndecl = builtin_decl_explicit (fn);
8396 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8397 call = builtin_save_expr (call);
8400 tree ptype = build_pointer_type (type);
8401 arg1 = fold_convert (ptype, arg1);
8402 arg2 = fold_convert (ptype, arg2);
8403 return build2 (COMPOUND_EXPR, void_type_node,
8404 build2 (MODIFY_EXPR, void_type_node,
8405 build_fold_indirect_ref_loc (loc, arg1),
8406 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8407 build2 (MODIFY_EXPR, void_type_node,
8408 build_fold_indirect_ref_loc (loc, arg2),
8409 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8412 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8413 Return NULL_TREE if no simplification can be made. */
8415 static tree
8416 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8418 if (!validate_arg (arg1, POINTER_TYPE)
8419 || !validate_arg (arg2, POINTER_TYPE)
8420 || !validate_arg (len, INTEGER_TYPE))
8421 return NULL_TREE;
8423 /* If the LEN parameter is zero, return zero. */
8424 if (integer_zerop (len))
8425 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8426 arg1, arg2);
8428 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8429 if (operand_equal_p (arg1, arg2, 0))
8430 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8432 /* If len parameter is one, return an expression corresponding to
8433 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8434 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8436 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8437 tree cst_uchar_ptr_node
8438 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8440 tree ind1
8441 = fold_convert_loc (loc, integer_type_node,
8442 build1 (INDIRECT_REF, cst_uchar_node,
8443 fold_convert_loc (loc,
8444 cst_uchar_ptr_node,
8445 arg1)));
8446 tree ind2
8447 = fold_convert_loc (loc, integer_type_node,
8448 build1 (INDIRECT_REF, cst_uchar_node,
8449 fold_convert_loc (loc,
8450 cst_uchar_ptr_node,
8451 arg2)));
8452 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8455 return NULL_TREE;
8458 /* Fold a call to builtin isascii with argument ARG. */
8460 static tree
8461 fold_builtin_isascii (location_t loc, tree arg)
8463 if (!validate_arg (arg, INTEGER_TYPE))
8464 return NULL_TREE;
8465 else
8467 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8468 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8469 build_int_cst (integer_type_node,
8470 ~ (unsigned HOST_WIDE_INT) 0x7f));
8471 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8472 arg, integer_zero_node);
8476 /* Fold a call to builtin toascii with argument ARG. */
8478 static tree
8479 fold_builtin_toascii (location_t loc, tree arg)
8481 if (!validate_arg (arg, INTEGER_TYPE))
8482 return NULL_TREE;
8484 /* Transform toascii(c) -> (c & 0x7f). */
8485 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8486 build_int_cst (integer_type_node, 0x7f));
8489 /* Fold a call to builtin isdigit with argument ARG. */
8491 static tree
8492 fold_builtin_isdigit (location_t loc, tree arg)
8494 if (!validate_arg (arg, INTEGER_TYPE))
8495 return NULL_TREE;
8496 else
8498 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8499 /* According to the C standard, isdigit is unaffected by locale.
8500 However, it definitely is affected by the target character set. */
8501 unsigned HOST_WIDE_INT target_digit0
8502 = lang_hooks.to_target_charset ('0');
8504 if (target_digit0 == 0)
8505 return NULL_TREE;
8507 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8508 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8509 build_int_cst (unsigned_type_node, target_digit0));
8510 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8511 build_int_cst (unsigned_type_node, 9));
8515 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8517 static tree
8518 fold_builtin_fabs (location_t loc, tree arg, tree type)
8520 if (!validate_arg (arg, REAL_TYPE))
8521 return NULL_TREE;
8523 arg = fold_convert_loc (loc, type, arg);
8524 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8527 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8529 static tree
8530 fold_builtin_abs (location_t loc, tree arg, tree type)
8532 if (!validate_arg (arg, INTEGER_TYPE))
8533 return NULL_TREE;
8535 arg = fold_convert_loc (loc, type, arg);
8536 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8539 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8541 static tree
8542 fold_builtin_carg (location_t loc, tree arg, tree type)
8544 if (validate_arg (arg, COMPLEX_TYPE)
8545 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8547 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8549 if (atan2_fn)
8551 tree new_arg = builtin_save_expr (arg);
8552 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8553 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8554 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8558 return NULL_TREE;
8561 /* Fold a call to builtin frexp, we can assume the base is 2. */
8563 static tree
8564 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8566 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8567 return NULL_TREE;
8569 STRIP_NOPS (arg0);
8571 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8572 return NULL_TREE;
8574 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8576 /* Proceed if a valid pointer type was passed in. */
8577 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8579 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8580 tree frac, exp;
8582 switch (value->cl)
8584 case rvc_zero:
8585 /* For +-0, return (*exp = 0, +-0). */
8586 exp = integer_zero_node;
8587 frac = arg0;
8588 break;
8589 case rvc_nan:
8590 case rvc_inf:
8591 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8592 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8593 case rvc_normal:
8595 /* Since the frexp function always expects base 2, and in
8596 GCC normalized significands are already in the range
8597 [0.5, 1.0), we have exactly what frexp wants. */
8598 REAL_VALUE_TYPE frac_rvt = *value;
8599 SET_REAL_EXP (&frac_rvt, 0);
8600 frac = build_real (rettype, frac_rvt);
8601 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8603 break;
8604 default:
8605 gcc_unreachable ();
8608 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8609 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8610 TREE_SIDE_EFFECTS (arg1) = 1;
8611 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8614 return NULL_TREE;
8617 /* Fold a call to builtin modf. */
8619 static tree
8620 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8622 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8623 return NULL_TREE;
8625 STRIP_NOPS (arg0);
8627 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8628 return NULL_TREE;
8630 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8632 /* Proceed if a valid pointer type was passed in. */
8633 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8635 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8636 REAL_VALUE_TYPE trunc, frac;
8638 switch (value->cl)
8640 case rvc_nan:
8641 case rvc_zero:
8642 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8643 trunc = frac = *value;
8644 break;
8645 case rvc_inf:
8646 /* For +-Inf, return (*arg1 = arg0, +-0). */
8647 frac = dconst0;
8648 frac.sign = value->sign;
8649 trunc = *value;
8650 break;
8651 case rvc_normal:
8652 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8653 real_trunc (&trunc, VOIDmode, value);
8654 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8655 /* If the original number was negative and already
8656 integral, then the fractional part is -0.0. */
8657 if (value->sign && frac.cl == rvc_zero)
8658 frac.sign = value->sign;
8659 break;
8662 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8663 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8664 build_real (rettype, trunc));
8665 TREE_SIDE_EFFECTS (arg1) = 1;
8666 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8667 build_real (rettype, frac));
8670 return NULL_TREE;
8673 /* Given a location LOC, an interclass builtin function decl FNDECL
8674 and its single argument ARG, return an folded expression computing
8675 the same, or NULL_TREE if we either couldn't or didn't want to fold
8676 (the latter happen if there's an RTL instruction available). */
8678 static tree
8679 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8681 machine_mode mode;
8683 if (!validate_arg (arg, REAL_TYPE))
8684 return NULL_TREE;
8686 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8687 return NULL_TREE;
8689 mode = TYPE_MODE (TREE_TYPE (arg));
8691 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8693 /* If there is no optab, try generic code. */
8694 switch (DECL_FUNCTION_CODE (fndecl))
8696 tree result;
8698 CASE_FLT_FN (BUILT_IN_ISINF):
8700 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8701 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8702 tree type = TREE_TYPE (arg);
8703 REAL_VALUE_TYPE r;
8704 char buf[128];
8706 if (is_ibm_extended)
8708 /* NaN and Inf are encoded in the high-order double value
8709 only. The low-order value is not significant. */
8710 type = double_type_node;
8711 mode = DFmode;
8712 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8714 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8715 real_from_string (&r, buf);
8716 result = build_call_expr (isgr_fn, 2,
8717 fold_build1_loc (loc, ABS_EXPR, type, arg),
8718 build_real (type, r));
8719 return result;
8721 CASE_FLT_FN (BUILT_IN_FINITE):
8722 case BUILT_IN_ISFINITE:
8724 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8725 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8726 tree type = TREE_TYPE (arg);
8727 REAL_VALUE_TYPE r;
8728 char buf[128];
8730 if (is_ibm_extended)
8732 /* NaN and Inf are encoded in the high-order double value
8733 only. The low-order value is not significant. */
8734 type = double_type_node;
8735 mode = DFmode;
8736 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8738 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8739 real_from_string (&r, buf);
8740 result = build_call_expr (isle_fn, 2,
8741 fold_build1_loc (loc, ABS_EXPR, type, arg),
8742 build_real (type, r));
8743 /*result = fold_build2_loc (loc, UNGT_EXPR,
8744 TREE_TYPE (TREE_TYPE (fndecl)),
8745 fold_build1_loc (loc, ABS_EXPR, type, arg),
8746 build_real (type, r));
8747 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8748 TREE_TYPE (TREE_TYPE (fndecl)),
8749 result);*/
8750 return result;
8752 case BUILT_IN_ISNORMAL:
8754 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8755 islessequal(fabs(x),DBL_MAX). */
8756 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8757 tree type = TREE_TYPE (arg);
8758 tree orig_arg, max_exp, min_exp;
8759 machine_mode orig_mode = mode;
8760 REAL_VALUE_TYPE rmax, rmin;
8761 char buf[128];
8763 orig_arg = arg = builtin_save_expr (arg);
8764 if (is_ibm_extended)
8766 /* Use double to test the normal range of IBM extended
8767 precision. Emin for IBM extended precision is
8768 different to emin for IEEE double, being 53 higher
8769 since the low double exponent is at least 53 lower
8770 than the high double exponent. */
8771 type = double_type_node;
8772 mode = DFmode;
8773 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8775 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8777 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8778 real_from_string (&rmax, buf);
8779 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8780 real_from_string (&rmin, buf);
8781 max_exp = build_real (type, rmax);
8782 min_exp = build_real (type, rmin);
8784 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8785 if (is_ibm_extended)
8787 /* Testing the high end of the range is done just using
8788 the high double, using the same test as isfinite().
8789 For the subnormal end of the range we first test the
8790 high double, then if its magnitude is equal to the
8791 limit of 0x1p-969, we test whether the low double is
8792 non-zero and opposite sign to the high double. */
8793 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8794 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8795 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8796 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8797 arg, min_exp);
8798 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8799 complex_double_type_node, orig_arg);
8800 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8801 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8802 tree zero = build_real (type, dconst0);
8803 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8804 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8805 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8806 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8807 fold_build3 (COND_EXPR,
8808 integer_type_node,
8809 hilt, logt, lolt));
8810 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8811 eq_min, ok_lo);
8812 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8813 gt_min, eq_min);
8815 else
8817 tree const isge_fn
8818 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8819 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8821 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8822 max_exp, min_exp);
8823 return result;
8825 default:
8826 break;
8829 return NULL_TREE;
8832 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8833 ARG is the argument for the call. */
8835 static tree
8836 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8838 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8840 if (!validate_arg (arg, REAL_TYPE))
8841 return NULL_TREE;
8843 switch (builtin_index)
8845 case BUILT_IN_ISINF:
8846 if (!HONOR_INFINITIES (arg))
8847 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8849 return NULL_TREE;
8851 case BUILT_IN_ISINF_SIGN:
8853 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8854 /* In a boolean context, GCC will fold the inner COND_EXPR to
8855 1. So e.g. "if (isinf_sign(x))" would be folded to just
8856 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8857 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8858 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8859 tree tmp = NULL_TREE;
8861 arg = builtin_save_expr (arg);
8863 if (signbit_fn && isinf_fn)
8865 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8866 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8868 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8869 signbit_call, integer_zero_node);
8870 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8871 isinf_call, integer_zero_node);
8873 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8874 integer_minus_one_node, integer_one_node);
8875 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8876 isinf_call, tmp,
8877 integer_zero_node);
8880 return tmp;
8883 case BUILT_IN_ISFINITE:
8884 if (!HONOR_NANS (arg)
8885 && !HONOR_INFINITIES (arg))
8886 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8888 return NULL_TREE;
8890 case BUILT_IN_ISNAN:
8891 if (!HONOR_NANS (arg))
8892 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8895 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8896 if (is_ibm_extended)
8898 /* NaN and Inf are encoded in the high-order double value
8899 only. The low-order value is not significant. */
8900 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8903 arg = builtin_save_expr (arg);
8904 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8906 default:
8907 gcc_unreachable ();
8911 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8912 This builtin will generate code to return the appropriate floating
8913 point classification depending on the value of the floating point
8914 number passed in. The possible return values must be supplied as
8915 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8916 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8917 one floating point argument which is "type generic". */
8919 static tree
8920 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8922 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8923 arg, type, res, tmp;
8924 machine_mode mode;
8925 REAL_VALUE_TYPE r;
8926 char buf[128];
8928 /* Verify the required arguments in the original call. */
8929 if (nargs != 6
8930 || !validate_arg (args[0], INTEGER_TYPE)
8931 || !validate_arg (args[1], INTEGER_TYPE)
8932 || !validate_arg (args[2], INTEGER_TYPE)
8933 || !validate_arg (args[3], INTEGER_TYPE)
8934 || !validate_arg (args[4], INTEGER_TYPE)
8935 || !validate_arg (args[5], REAL_TYPE))
8936 return NULL_TREE;
8938 fp_nan = args[0];
8939 fp_infinite = args[1];
8940 fp_normal = args[2];
8941 fp_subnormal = args[3];
8942 fp_zero = args[4];
8943 arg = args[5];
8944 type = TREE_TYPE (arg);
8945 mode = TYPE_MODE (type);
8946 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8948 /* fpclassify(x) ->
8949 isnan(x) ? FP_NAN :
8950 (fabs(x) == Inf ? FP_INFINITE :
8951 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8952 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8954 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8955 build_real (type, dconst0));
8956 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8957 tmp, fp_zero, fp_subnormal);
8959 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8960 real_from_string (&r, buf);
8961 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8962 arg, build_real (type, r));
8963 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8965 if (HONOR_INFINITIES (mode))
8967 real_inf (&r);
8968 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8969 build_real (type, r));
8970 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8971 fp_infinite, res);
8974 if (HONOR_NANS (mode))
8976 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8977 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8980 return res;
8983 /* Fold a call to an unordered comparison function such as
8984 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8985 being called and ARG0 and ARG1 are the arguments for the call.
8986 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8987 the opposite of the desired result. UNORDERED_CODE is used
8988 for modes that can hold NaNs and ORDERED_CODE is used for
8989 the rest. */
8991 static tree
8992 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8993 enum tree_code unordered_code,
8994 enum tree_code ordered_code)
8996 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8997 enum tree_code code;
8998 tree type0, type1;
8999 enum tree_code code0, code1;
9000 tree cmp_type = NULL_TREE;
9002 type0 = TREE_TYPE (arg0);
9003 type1 = TREE_TYPE (arg1);
9005 code0 = TREE_CODE (type0);
9006 code1 = TREE_CODE (type1);
9008 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9009 /* Choose the wider of two real types. */
9010 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9011 ? type0 : type1;
9012 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9013 cmp_type = type0;
9014 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9015 cmp_type = type1;
9017 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9018 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9020 if (unordered_code == UNORDERED_EXPR)
9022 if (!HONOR_NANS (arg0))
9023 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9024 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9027 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9028 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9029 fold_build2_loc (loc, code, type, arg0, arg1));
9032 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9033 arithmetics if it can never overflow, or into internal functions that
9034 return both result of arithmetics and overflowed boolean flag in
9035 a complex integer result, or some other check for overflow.
9036 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9037 checking part of that. */
9039 static tree
9040 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9041 tree arg0, tree arg1, tree arg2)
9043 enum internal_fn ifn = IFN_LAST;
9044 /* The code of the expression corresponding to the type-generic
9045 built-in, or ERROR_MARK for the type-specific ones. */
9046 enum tree_code opcode = ERROR_MARK;
9047 bool ovf_only = false;
9049 switch (fcode)
9051 case BUILT_IN_ADD_OVERFLOW_P:
9052 ovf_only = true;
9053 /* FALLTHRU */
9054 case BUILT_IN_ADD_OVERFLOW:
9055 opcode = PLUS_EXPR;
9056 /* FALLTHRU */
9057 case BUILT_IN_SADD_OVERFLOW:
9058 case BUILT_IN_SADDL_OVERFLOW:
9059 case BUILT_IN_SADDLL_OVERFLOW:
9060 case BUILT_IN_UADD_OVERFLOW:
9061 case BUILT_IN_UADDL_OVERFLOW:
9062 case BUILT_IN_UADDLL_OVERFLOW:
9063 ifn = IFN_ADD_OVERFLOW;
9064 break;
9065 case BUILT_IN_SUB_OVERFLOW_P:
9066 ovf_only = true;
9067 /* FALLTHRU */
9068 case BUILT_IN_SUB_OVERFLOW:
9069 opcode = MINUS_EXPR;
9070 /* FALLTHRU */
9071 case BUILT_IN_SSUB_OVERFLOW:
9072 case BUILT_IN_SSUBL_OVERFLOW:
9073 case BUILT_IN_SSUBLL_OVERFLOW:
9074 case BUILT_IN_USUB_OVERFLOW:
9075 case BUILT_IN_USUBL_OVERFLOW:
9076 case BUILT_IN_USUBLL_OVERFLOW:
9077 ifn = IFN_SUB_OVERFLOW;
9078 break;
9079 case BUILT_IN_MUL_OVERFLOW_P:
9080 ovf_only = true;
9081 /* FALLTHRU */
9082 case BUILT_IN_MUL_OVERFLOW:
9083 opcode = MULT_EXPR;
9084 /* FALLTHRU */
9085 case BUILT_IN_SMUL_OVERFLOW:
9086 case BUILT_IN_SMULL_OVERFLOW:
9087 case BUILT_IN_SMULLL_OVERFLOW:
9088 case BUILT_IN_UMUL_OVERFLOW:
9089 case BUILT_IN_UMULL_OVERFLOW:
9090 case BUILT_IN_UMULLL_OVERFLOW:
9091 ifn = IFN_MUL_OVERFLOW;
9092 break;
9093 default:
9094 gcc_unreachable ();
9097 /* For the "generic" overloads, the first two arguments can have different
9098 types and the last argument determines the target type to use to check
9099 for overflow. The arguments of the other overloads all have the same
9100 type. */
9101 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9103 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9104 arguments are constant, attempt to fold the built-in call into a constant
9105 expression indicating whether or not it detected an overflow. */
9106 if (ovf_only
9107 && TREE_CODE (arg0) == INTEGER_CST
9108 && TREE_CODE (arg1) == INTEGER_CST)
9109 /* Perform the computation in the target type and check for overflow. */
9110 return omit_one_operand_loc (loc, boolean_type_node,
9111 arith_overflowed_p (opcode, type, arg0, arg1)
9112 ? boolean_true_node : boolean_false_node,
9113 arg2);
9115 tree ctype = build_complex_type (type);
9116 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9117 2, arg0, arg1);
9118 tree tgt = save_expr (call);
9119 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9120 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9121 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9123 if (ovf_only)
9124 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9126 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9127 tree store
9128 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9129 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9132 /* Fold a call to __builtin_FILE to a constant string. */
9134 static inline tree
9135 fold_builtin_FILE (location_t loc)
9137 if (const char *fname = LOCATION_FILE (loc))
9139 /* The documentation says this builtin is equivalent to the preprocessor
9140 __FILE__ macro so it appears appropriate to use the same file prefix
9141 mappings. */
9142 fname = remap_macro_filename (fname);
9143 return build_string_literal (strlen (fname) + 1, fname);
9146 return build_string_literal (1, "");
9149 /* Fold a call to __builtin_FUNCTION to a constant string. */
9151 static inline tree
9152 fold_builtin_FUNCTION ()
9154 const char *name = "";
9156 if (current_function_decl)
9157 name = lang_hooks.decl_printable_name (current_function_decl, 0);
9159 return build_string_literal (strlen (name) + 1, name);
9162 /* Fold a call to __builtin_LINE to an integer constant. */
9164 static inline tree
9165 fold_builtin_LINE (location_t loc, tree type)
9167 return build_int_cst (type, LOCATION_LINE (loc));
9170 /* Fold a call to built-in function FNDECL with 0 arguments.
9171 This function returns NULL_TREE if no simplification was possible. */
9173 static tree
9174 fold_builtin_0 (location_t loc, tree fndecl)
9176 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9177 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9178 switch (fcode)
9180 case BUILT_IN_FILE:
9181 return fold_builtin_FILE (loc);
9183 case BUILT_IN_FUNCTION:
9184 return fold_builtin_FUNCTION ();
9186 case BUILT_IN_LINE:
9187 return fold_builtin_LINE (loc, type);
9189 CASE_FLT_FN (BUILT_IN_INF):
9190 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
9191 case BUILT_IN_INFD32:
9192 case BUILT_IN_INFD64:
9193 case BUILT_IN_INFD128:
9194 return fold_builtin_inf (loc, type, true);
9196 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9197 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
9198 return fold_builtin_inf (loc, type, false);
9200 case BUILT_IN_CLASSIFY_TYPE:
9201 return fold_builtin_classify_type (NULL_TREE);
9203 default:
9204 break;
9206 return NULL_TREE;
9209 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9210 This function returns NULL_TREE if no simplification was possible. */
9212 static tree
9213 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9215 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9216 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9218 if (TREE_CODE (arg0) == ERROR_MARK)
9219 return NULL_TREE;
9221 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
9222 return ret;
9224 switch (fcode)
9226 case BUILT_IN_CONSTANT_P:
9228 tree val = fold_builtin_constant_p (arg0);
9230 /* Gimplification will pull the CALL_EXPR for the builtin out of
9231 an if condition. When not optimizing, we'll not CSE it back.
9232 To avoid link error types of regressions, return false now. */
9233 if (!val && !optimize)
9234 val = integer_zero_node;
9236 return val;
9239 case BUILT_IN_CLASSIFY_TYPE:
9240 return fold_builtin_classify_type (arg0);
9242 case BUILT_IN_STRLEN:
9243 return fold_builtin_strlen (loc, type, arg0);
9245 CASE_FLT_FN (BUILT_IN_FABS):
9246 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9247 case BUILT_IN_FABSD32:
9248 case BUILT_IN_FABSD64:
9249 case BUILT_IN_FABSD128:
9250 return fold_builtin_fabs (loc, arg0, type);
9252 case BUILT_IN_ABS:
9253 case BUILT_IN_LABS:
9254 case BUILT_IN_LLABS:
9255 case BUILT_IN_IMAXABS:
9256 return fold_builtin_abs (loc, arg0, type);
9258 CASE_FLT_FN (BUILT_IN_CONJ):
9259 if (validate_arg (arg0, COMPLEX_TYPE)
9260 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9261 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9262 break;
9264 CASE_FLT_FN (BUILT_IN_CREAL):
9265 if (validate_arg (arg0, COMPLEX_TYPE)
9266 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9267 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9268 break;
9270 CASE_FLT_FN (BUILT_IN_CIMAG):
9271 if (validate_arg (arg0, COMPLEX_TYPE)
9272 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9273 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9274 break;
9276 CASE_FLT_FN (BUILT_IN_CARG):
9277 return fold_builtin_carg (loc, arg0, type);
9279 case BUILT_IN_ISASCII:
9280 return fold_builtin_isascii (loc, arg0);
9282 case BUILT_IN_TOASCII:
9283 return fold_builtin_toascii (loc, arg0);
9285 case BUILT_IN_ISDIGIT:
9286 return fold_builtin_isdigit (loc, arg0);
9288 CASE_FLT_FN (BUILT_IN_FINITE):
9289 case BUILT_IN_FINITED32:
9290 case BUILT_IN_FINITED64:
9291 case BUILT_IN_FINITED128:
9292 case BUILT_IN_ISFINITE:
9294 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9295 if (ret)
9296 return ret;
9297 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9300 CASE_FLT_FN (BUILT_IN_ISINF):
9301 case BUILT_IN_ISINFD32:
9302 case BUILT_IN_ISINFD64:
9303 case BUILT_IN_ISINFD128:
9305 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9306 if (ret)
9307 return ret;
9308 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9311 case BUILT_IN_ISNORMAL:
9312 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9314 case BUILT_IN_ISINF_SIGN:
9315 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9317 CASE_FLT_FN (BUILT_IN_ISNAN):
9318 case BUILT_IN_ISNAND32:
9319 case BUILT_IN_ISNAND64:
9320 case BUILT_IN_ISNAND128:
9321 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9323 case BUILT_IN_FREE:
9324 if (integer_zerop (arg0))
9325 return build_empty_stmt (loc);
9326 break;
9328 default:
9329 break;
9332 return NULL_TREE;
9336 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9337 This function returns NULL_TREE if no simplification was possible. */
9339 static tree
9340 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
9342 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9343 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9345 if (TREE_CODE (arg0) == ERROR_MARK
9346 || TREE_CODE (arg1) == ERROR_MARK)
9347 return NULL_TREE;
9349 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9350 return ret;
9352 switch (fcode)
9354 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9355 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9356 if (validate_arg (arg0, REAL_TYPE)
9357 && validate_arg (arg1, POINTER_TYPE))
9358 return do_mpfr_lgamma_r (arg0, arg1, type);
9359 break;
9361 CASE_FLT_FN (BUILT_IN_FREXP):
9362 return fold_builtin_frexp (loc, arg0, arg1, type);
9364 CASE_FLT_FN (BUILT_IN_MODF):
9365 return fold_builtin_modf (loc, arg0, arg1, type);
9367 case BUILT_IN_STRSPN:
9368 return fold_builtin_strspn (loc, arg0, arg1);
9370 case BUILT_IN_STRCSPN:
9371 return fold_builtin_strcspn (loc, arg0, arg1);
9373 case BUILT_IN_STRPBRK:
9374 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9376 case BUILT_IN_EXPECT:
9377 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
9379 case BUILT_IN_ISGREATER:
9380 return fold_builtin_unordered_cmp (loc, fndecl,
9381 arg0, arg1, UNLE_EXPR, LE_EXPR);
9382 case BUILT_IN_ISGREATEREQUAL:
9383 return fold_builtin_unordered_cmp (loc, fndecl,
9384 arg0, arg1, UNLT_EXPR, LT_EXPR);
9385 case BUILT_IN_ISLESS:
9386 return fold_builtin_unordered_cmp (loc, fndecl,
9387 arg0, arg1, UNGE_EXPR, GE_EXPR);
9388 case BUILT_IN_ISLESSEQUAL:
9389 return fold_builtin_unordered_cmp (loc, fndecl,
9390 arg0, arg1, UNGT_EXPR, GT_EXPR);
9391 case BUILT_IN_ISLESSGREATER:
9392 return fold_builtin_unordered_cmp (loc, fndecl,
9393 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9394 case BUILT_IN_ISUNORDERED:
9395 return fold_builtin_unordered_cmp (loc, fndecl,
9396 arg0, arg1, UNORDERED_EXPR,
9397 NOP_EXPR);
9399 /* We do the folding for va_start in the expander. */
9400 case BUILT_IN_VA_START:
9401 break;
9403 case BUILT_IN_OBJECT_SIZE:
9404 return fold_builtin_object_size (arg0, arg1);
9406 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9407 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9409 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9410 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9412 default:
9413 break;
9415 return NULL_TREE;
9418 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9419 and ARG2.
9420 This function returns NULL_TREE if no simplification was possible. */
9422 static tree
9423 fold_builtin_3 (location_t loc, tree fndecl,
9424 tree arg0, tree arg1, tree arg2)
9426 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9427 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9429 if (TREE_CODE (arg0) == ERROR_MARK
9430 || TREE_CODE (arg1) == ERROR_MARK
9431 || TREE_CODE (arg2) == ERROR_MARK)
9432 return NULL_TREE;
9434 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9435 arg0, arg1, arg2))
9436 return ret;
9438 switch (fcode)
9441 CASE_FLT_FN (BUILT_IN_SINCOS):
9442 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9444 CASE_FLT_FN (BUILT_IN_REMQUO):
9445 if (validate_arg (arg0, REAL_TYPE)
9446 && validate_arg (arg1, REAL_TYPE)
9447 && validate_arg (arg2, POINTER_TYPE))
9448 return do_mpfr_remquo (arg0, arg1, arg2);
9449 break;
9451 case BUILT_IN_MEMCMP:
9452 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9454 case BUILT_IN_EXPECT:
9455 return fold_builtin_expect (loc, arg0, arg1, arg2);
9457 case BUILT_IN_ADD_OVERFLOW:
9458 case BUILT_IN_SUB_OVERFLOW:
9459 case BUILT_IN_MUL_OVERFLOW:
9460 case BUILT_IN_ADD_OVERFLOW_P:
9461 case BUILT_IN_SUB_OVERFLOW_P:
9462 case BUILT_IN_MUL_OVERFLOW_P:
9463 case BUILT_IN_SADD_OVERFLOW:
9464 case BUILT_IN_SADDL_OVERFLOW:
9465 case BUILT_IN_SADDLL_OVERFLOW:
9466 case BUILT_IN_SSUB_OVERFLOW:
9467 case BUILT_IN_SSUBL_OVERFLOW:
9468 case BUILT_IN_SSUBLL_OVERFLOW:
9469 case BUILT_IN_SMUL_OVERFLOW:
9470 case BUILT_IN_SMULL_OVERFLOW:
9471 case BUILT_IN_SMULLL_OVERFLOW:
9472 case BUILT_IN_UADD_OVERFLOW:
9473 case BUILT_IN_UADDL_OVERFLOW:
9474 case BUILT_IN_UADDLL_OVERFLOW:
9475 case BUILT_IN_USUB_OVERFLOW:
9476 case BUILT_IN_USUBL_OVERFLOW:
9477 case BUILT_IN_USUBLL_OVERFLOW:
9478 case BUILT_IN_UMUL_OVERFLOW:
9479 case BUILT_IN_UMULL_OVERFLOW:
9480 case BUILT_IN_UMULLL_OVERFLOW:
9481 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9483 default:
9484 break;
9486 return NULL_TREE;
9489 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9490 arguments. IGNORE is true if the result of the
9491 function call is ignored. This function returns NULL_TREE if no
9492 simplification was possible. */
9494 tree
9495 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9497 tree ret = NULL_TREE;
9499 switch (nargs)
9501 case 0:
9502 ret = fold_builtin_0 (loc, fndecl);
9503 break;
9504 case 1:
9505 ret = fold_builtin_1 (loc, fndecl, args[0]);
9506 break;
9507 case 2:
9508 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9509 break;
9510 case 3:
9511 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9512 break;
9513 default:
9514 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9515 break;
9517 if (ret)
9519 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9520 SET_EXPR_LOCATION (ret, loc);
9521 return ret;
9523 return NULL_TREE;
9526 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9527 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9528 of arguments in ARGS to be omitted. OLDNARGS is the number of
9529 elements in ARGS. */
9531 static tree
9532 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9533 int skip, tree fndecl, int n, va_list newargs)
9535 int nargs = oldnargs - skip + n;
9536 tree *buffer;
9538 if (n > 0)
9540 int i, j;
9542 buffer = XALLOCAVEC (tree, nargs);
9543 for (i = 0; i < n; i++)
9544 buffer[i] = va_arg (newargs, tree);
9545 for (j = skip; j < oldnargs; j++, i++)
9546 buffer[i] = args[j];
9548 else
9549 buffer = args + skip;
9551 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9554 /* Return true if FNDECL shouldn't be folded right now.
9555 If a built-in function has an inline attribute always_inline
9556 wrapper, defer folding it after always_inline functions have
9557 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9558 might not be performed. */
9560 bool
9561 avoid_folding_inline_builtin (tree fndecl)
9563 return (DECL_DECLARED_INLINE_P (fndecl)
9564 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9565 && cfun
9566 && !cfun->always_inline_functions_inlined
9567 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9570 /* A wrapper function for builtin folding that prevents warnings for
9571 "statement without effect" and the like, caused by removing the
9572 call node earlier than the warning is generated. */
9574 tree
9575 fold_call_expr (location_t loc, tree exp, bool ignore)
9577 tree ret = NULL_TREE;
9578 tree fndecl = get_callee_fndecl (exp);
9579 if (fndecl
9580 && TREE_CODE (fndecl) == FUNCTION_DECL
9581 && DECL_BUILT_IN (fndecl)
9582 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9583 yet. Defer folding until we see all the arguments
9584 (after inlining). */
9585 && !CALL_EXPR_VA_ARG_PACK (exp))
9587 int nargs = call_expr_nargs (exp);
9589 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9590 instead last argument is __builtin_va_arg_pack (). Defer folding
9591 even in that case, until arguments are finalized. */
9592 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9594 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9595 if (fndecl2
9596 && TREE_CODE (fndecl2) == FUNCTION_DECL
9597 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9598 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9599 return NULL_TREE;
9602 if (avoid_folding_inline_builtin (fndecl))
9603 return NULL_TREE;
9605 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9606 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9607 CALL_EXPR_ARGP (exp), ignore);
9608 else
9610 tree *args = CALL_EXPR_ARGP (exp);
9611 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9612 if (ret)
9613 return ret;
9616 return NULL_TREE;
9619 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9620 N arguments are passed in the array ARGARRAY. Return a folded
9621 expression or NULL_TREE if no simplification was possible. */
9623 tree
9624 fold_builtin_call_array (location_t loc, tree,
9625 tree fn,
9626 int n,
9627 tree *argarray)
9629 if (TREE_CODE (fn) != ADDR_EXPR)
9630 return NULL_TREE;
9632 tree fndecl = TREE_OPERAND (fn, 0);
9633 if (TREE_CODE (fndecl) == FUNCTION_DECL
9634 && DECL_BUILT_IN (fndecl))
9636 /* If last argument is __builtin_va_arg_pack (), arguments to this
9637 function are not finalized yet. Defer folding until they are. */
9638 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9640 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9641 if (fndecl2
9642 && TREE_CODE (fndecl2) == FUNCTION_DECL
9643 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9644 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9645 return NULL_TREE;
9647 if (avoid_folding_inline_builtin (fndecl))
9648 return NULL_TREE;
9649 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9650 return targetm.fold_builtin (fndecl, n, argarray, false);
9651 else
9652 return fold_builtin_n (loc, fndecl, argarray, n, false);
9655 return NULL_TREE;
9658 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9659 along with N new arguments specified as the "..." parameters. SKIP
9660 is the number of arguments in EXP to be omitted. This function is used
9661 to do varargs-to-varargs transformations. */
9663 static tree
9664 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9666 va_list ap;
9667 tree t;
9669 va_start (ap, n);
9670 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9671 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9672 va_end (ap);
9674 return t;
9677 /* Validate a single argument ARG against a tree code CODE representing
9678 a type. Return true when argument is valid. */
9680 static bool
9681 validate_arg (const_tree arg, enum tree_code code)
9683 if (!arg)
9684 return false;
9685 else if (code == POINTER_TYPE)
9686 return POINTER_TYPE_P (TREE_TYPE (arg));
9687 else if (code == INTEGER_TYPE)
9688 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9689 return code == TREE_CODE (TREE_TYPE (arg));
9692 /* This function validates the types of a function call argument list
9693 against a specified list of tree_codes. If the last specifier is a 0,
9694 that represents an ellipses, otherwise the last specifier must be a
9695 VOID_TYPE.
9697 This is the GIMPLE version of validate_arglist. Eventually we want to
9698 completely convert builtins.c to work from GIMPLEs and the tree based
9699 validate_arglist will then be removed. */
9701 bool
9702 validate_gimple_arglist (const gcall *call, ...)
9704 enum tree_code code;
9705 bool res = 0;
9706 va_list ap;
9707 const_tree arg;
9708 size_t i;
9710 va_start (ap, call);
9711 i = 0;
9715 code = (enum tree_code) va_arg (ap, int);
9716 switch (code)
9718 case 0:
9719 /* This signifies an ellipses, any further arguments are all ok. */
9720 res = true;
9721 goto end;
9722 case VOID_TYPE:
9723 /* This signifies an endlink, if no arguments remain, return
9724 true, otherwise return false. */
9725 res = (i == gimple_call_num_args (call));
9726 goto end;
9727 default:
9728 /* If no parameters remain or the parameter's code does not
9729 match the specified code, return false. Otherwise continue
9730 checking any remaining arguments. */
9731 arg = gimple_call_arg (call, i++);
9732 if (!validate_arg (arg, code))
9733 goto end;
9734 break;
9737 while (1);
9739 /* We need gotos here since we can only have one VA_CLOSE in a
9740 function. */
9741 end: ;
9742 va_end (ap);
9744 return res;
9747 /* Default target-specific builtin expander that does nothing. */
9750 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9751 rtx target ATTRIBUTE_UNUSED,
9752 rtx subtarget ATTRIBUTE_UNUSED,
9753 machine_mode mode ATTRIBUTE_UNUSED,
9754 int ignore ATTRIBUTE_UNUSED)
9756 return NULL_RTX;
9759 /* Returns true is EXP represents data that would potentially reside
9760 in a readonly section. */
9762 bool
9763 readonly_data_expr (tree exp)
9765 STRIP_NOPS (exp);
9767 if (TREE_CODE (exp) != ADDR_EXPR)
9768 return false;
9770 exp = get_base_address (TREE_OPERAND (exp, 0));
9771 if (!exp)
9772 return false;
9774 /* Make sure we call decl_readonly_section only for trees it
9775 can handle (since it returns true for everything it doesn't
9776 understand). */
9777 if (TREE_CODE (exp) == STRING_CST
9778 || TREE_CODE (exp) == CONSTRUCTOR
9779 || (VAR_P (exp) && TREE_STATIC (exp)))
9780 return decl_readonly_section (exp, 0);
9781 else
9782 return false;
9785 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9786 to the call, and TYPE is its return type.
9788 Return NULL_TREE if no simplification was possible, otherwise return the
9789 simplified form of the call as a tree.
9791 The simplified form may be a constant or other expression which
9792 computes the same value, but in a more efficient manner (including
9793 calls to other builtin functions).
9795 The call may contain arguments which need to be evaluated, but
9796 which are not useful to determine the result of the call. In
9797 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9798 COMPOUND_EXPR will be an argument which must be evaluated.
9799 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9800 COMPOUND_EXPR in the chain will contain the tree for the simplified
9801 form of the builtin function call. */
9803 static tree
9804 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9806 if (!validate_arg (s1, POINTER_TYPE)
9807 || !validate_arg (s2, POINTER_TYPE))
9808 return NULL_TREE;
9809 else
9811 tree fn;
9812 const char *p1, *p2;
9814 p2 = c_getstr (s2);
9815 if (p2 == NULL)
9816 return NULL_TREE;
9818 p1 = c_getstr (s1);
9819 if (p1 != NULL)
9821 const char *r = strpbrk (p1, p2);
9822 tree tem;
9824 if (r == NULL)
9825 return build_int_cst (TREE_TYPE (s1), 0);
9827 /* Return an offset into the constant string argument. */
9828 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9829 return fold_convert_loc (loc, type, tem);
9832 if (p2[0] == '\0')
9833 /* strpbrk(x, "") == NULL.
9834 Evaluate and ignore s1 in case it had side-effects. */
9835 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
9837 if (p2[1] != '\0')
9838 return NULL_TREE; /* Really call strpbrk. */
9840 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9841 if (!fn)
9842 return NULL_TREE;
9844 /* New argument list transforming strpbrk(s1, s2) to
9845 strchr(s1, s2[0]). */
9846 return build_call_expr_loc (loc, fn, 2, s1,
9847 build_int_cst (integer_type_node, p2[0]));
9851 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9852 to the call.
9854 Return NULL_TREE if no simplification was possible, otherwise return the
9855 simplified form of the call as a tree.
9857 The simplified form may be a constant or other expression which
9858 computes the same value, but in a more efficient manner (including
9859 calls to other builtin functions).
9861 The call may contain arguments which need to be evaluated, but
9862 which are not useful to determine the result of the call. In
9863 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9864 COMPOUND_EXPR will be an argument which must be evaluated.
9865 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9866 COMPOUND_EXPR in the chain will contain the tree for the simplified
9867 form of the builtin function call. */
9869 static tree
9870 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9872 if (!validate_arg (s1, POINTER_TYPE)
9873 || !validate_arg (s2, POINTER_TYPE))
9874 return NULL_TREE;
9875 else
9877 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9879 /* If either argument is "", return NULL_TREE. */
9880 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9881 /* Evaluate and ignore both arguments in case either one has
9882 side-effects. */
9883 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9884 s1, s2);
9885 return NULL_TREE;
9889 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9890 to the call.
9892 Return NULL_TREE if no simplification was possible, otherwise return the
9893 simplified form of the call as a tree.
9895 The simplified form may be a constant or other expression which
9896 computes the same value, but in a more efficient manner (including
9897 calls to other builtin functions).
9899 The call may contain arguments which need to be evaluated, but
9900 which are not useful to determine the result of the call. In
9901 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9902 COMPOUND_EXPR will be an argument which must be evaluated.
9903 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9904 COMPOUND_EXPR in the chain will contain the tree for the simplified
9905 form of the builtin function call. */
9907 static tree
9908 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9910 if (!validate_arg (s1, POINTER_TYPE)
9911 || !validate_arg (s2, POINTER_TYPE))
9912 return NULL_TREE;
9913 else
9915 /* If the first argument is "", return NULL_TREE. */
9916 const char *p1 = c_getstr (s1);
9917 if (p1 && *p1 == '\0')
9919 /* Evaluate and ignore argument s2 in case it has
9920 side-effects. */
9921 return omit_one_operand_loc (loc, size_type_node,
9922 size_zero_node, s2);
9925 /* If the second argument is "", return __builtin_strlen(s1). */
9926 const char *p2 = c_getstr (s2);
9927 if (p2 && *p2 == '\0')
9929 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9931 /* If the replacement _DECL isn't initialized, don't do the
9932 transformation. */
9933 if (!fn)
9934 return NULL_TREE;
9936 return build_call_expr_loc (loc, fn, 1, s1);
9938 return NULL_TREE;
9942 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9943 produced. False otherwise. This is done so that we don't output the error
9944 or warning twice or three times. */
9946 bool
9947 fold_builtin_next_arg (tree exp, bool va_start_p)
9949 tree fntype = TREE_TYPE (current_function_decl);
9950 int nargs = call_expr_nargs (exp);
9951 tree arg;
9952 /* There is good chance the current input_location points inside the
9953 definition of the va_start macro (perhaps on the token for
9954 builtin) in a system header, so warnings will not be emitted.
9955 Use the location in real source code. */
9956 source_location current_location =
9957 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9958 NULL);
9960 if (!stdarg_p (fntype))
9962 error ("%<va_start%> used in function with fixed args");
9963 return true;
9966 if (va_start_p)
9968 if (va_start_p && (nargs != 2))
9970 error ("wrong number of arguments to function %<va_start%>");
9971 return true;
9973 arg = CALL_EXPR_ARG (exp, 1);
9975 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9976 when we checked the arguments and if needed issued a warning. */
9977 else
9979 if (nargs == 0)
9981 /* Evidently an out of date version of <stdarg.h>; can't validate
9982 va_start's second argument, but can still work as intended. */
9983 warning_at (current_location,
9984 OPT_Wvarargs,
9985 "%<__builtin_next_arg%> called without an argument");
9986 return true;
9988 else if (nargs > 1)
9990 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9991 return true;
9993 arg = CALL_EXPR_ARG (exp, 0);
9996 if (TREE_CODE (arg) == SSA_NAME)
9997 arg = SSA_NAME_VAR (arg);
9999 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10000 or __builtin_next_arg (0) the first time we see it, after checking
10001 the arguments and if needed issuing a warning. */
10002 if (!integer_zerop (arg))
10004 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10006 /* Strip off all nops for the sake of the comparison. This
10007 is not quite the same as STRIP_NOPS. It does more.
10008 We must also strip off INDIRECT_EXPR for C++ reference
10009 parameters. */
10010 while (CONVERT_EXPR_P (arg)
10011 || TREE_CODE (arg) == INDIRECT_REF)
10012 arg = TREE_OPERAND (arg, 0);
10013 if (arg != last_parm)
10015 /* FIXME: Sometimes with the tree optimizers we can get the
10016 not the last argument even though the user used the last
10017 argument. We just warn and set the arg to be the last
10018 argument so that we will get wrong-code because of
10019 it. */
10020 warning_at (current_location,
10021 OPT_Wvarargs,
10022 "second parameter of %<va_start%> not last named argument");
10025 /* Undefined by C99 7.15.1.4p4 (va_start):
10026 "If the parameter parmN is declared with the register storage
10027 class, with a function or array type, or with a type that is
10028 not compatible with the type that results after application of
10029 the default argument promotions, the behavior is undefined."
10031 else if (DECL_REGISTER (arg))
10033 warning_at (current_location,
10034 OPT_Wvarargs,
10035 "undefined behavior when second parameter of "
10036 "%<va_start%> is declared with %<register%> storage");
10039 /* We want to verify the second parameter just once before the tree
10040 optimizers are run and then avoid keeping it in the tree,
10041 as otherwise we could warn even for correct code like:
10042 void foo (int i, ...)
10043 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10044 if (va_start_p)
10045 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10046 else
10047 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
10049 return false;
10053 /* Expand a call EXP to __builtin_object_size. */
10055 static rtx
10056 expand_builtin_object_size (tree exp)
10058 tree ost;
10059 int object_size_type;
10060 tree fndecl = get_callee_fndecl (exp);
10062 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10064 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
10065 exp, fndecl);
10066 expand_builtin_trap ();
10067 return const0_rtx;
10070 ost = CALL_EXPR_ARG (exp, 1);
10071 STRIP_NOPS (ost);
10073 if (TREE_CODE (ost) != INTEGER_CST
10074 || tree_int_cst_sgn (ost) < 0
10075 || compare_tree_int (ost, 3) > 0)
10077 error ("%Klast argument of %qD is not integer constant between 0 and 3",
10078 exp, fndecl);
10079 expand_builtin_trap ();
10080 return const0_rtx;
10083 object_size_type = tree_to_shwi (ost);
10085 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10088 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10089 FCODE is the BUILT_IN_* to use.
10090 Return NULL_RTX if we failed; the caller should emit a normal call,
10091 otherwise try to get the result in TARGET, if convenient (and in
10092 mode MODE if that's convenient). */
10094 static rtx
10095 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10096 enum built_in_function fcode)
10098 if (!validate_arglist (exp,
10099 POINTER_TYPE,
10100 fcode == BUILT_IN_MEMSET_CHK
10101 ? INTEGER_TYPE : POINTER_TYPE,
10102 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10103 return NULL_RTX;
10105 tree dest = CALL_EXPR_ARG (exp, 0);
10106 tree src = CALL_EXPR_ARG (exp, 1);
10107 tree len = CALL_EXPR_ARG (exp, 2);
10108 tree size = CALL_EXPR_ARG (exp, 3);
10110 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
10111 /*str=*/NULL_TREE, size);
10113 if (!tree_fits_uhwi_p (size))
10114 return NULL_RTX;
10116 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10118 /* Avoid transforming the checking call to an ordinary one when
10119 an overflow has been detected or when the call couldn't be
10120 validated because the size is not constant. */
10121 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10122 return NULL_RTX;
10124 tree fn = NULL_TREE;
10125 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10126 mem{cpy,pcpy,move,set} is available. */
10127 switch (fcode)
10129 case BUILT_IN_MEMCPY_CHK:
10130 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10131 break;
10132 case BUILT_IN_MEMPCPY_CHK:
10133 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10134 break;
10135 case BUILT_IN_MEMMOVE_CHK:
10136 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10137 break;
10138 case BUILT_IN_MEMSET_CHK:
10139 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10140 break;
10141 default:
10142 break;
10145 if (! fn)
10146 return NULL_RTX;
10148 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10149 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10150 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10151 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10153 else if (fcode == BUILT_IN_MEMSET_CHK)
10154 return NULL_RTX;
10155 else
10157 unsigned int dest_align = get_pointer_alignment (dest);
10159 /* If DEST is not a pointer type, call the normal function. */
10160 if (dest_align == 0)
10161 return NULL_RTX;
10163 /* If SRC and DEST are the same (and not volatile), do nothing. */
10164 if (operand_equal_p (src, dest, 0))
10166 tree expr;
10168 if (fcode != BUILT_IN_MEMPCPY_CHK)
10170 /* Evaluate and ignore LEN in case it has side-effects. */
10171 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10172 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10175 expr = fold_build_pointer_plus (dest, len);
10176 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10179 /* __memmove_chk special case. */
10180 if (fcode == BUILT_IN_MEMMOVE_CHK)
10182 unsigned int src_align = get_pointer_alignment (src);
10184 if (src_align == 0)
10185 return NULL_RTX;
10187 /* If src is categorized for a readonly section we can use
10188 normal __memcpy_chk. */
10189 if (readonly_data_expr (src))
10191 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10192 if (!fn)
10193 return NULL_RTX;
10194 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10195 dest, src, len, size);
10196 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10197 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10198 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10201 return NULL_RTX;
10205 /* Emit warning if a buffer overflow is detected at compile time. */
10207 static void
10208 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10210 /* The source string. */
10211 tree srcstr = NULL_TREE;
10212 /* The size of the destination object. */
10213 tree objsize = NULL_TREE;
10214 /* The string that is being concatenated with (as in __strcat_chk)
10215 or null if it isn't. */
10216 tree catstr = NULL_TREE;
10217 /* The maximum length of the source sequence in a bounded operation
10218 (such as __strncat_chk) or null if the operation isn't bounded
10219 (such as __strcat_chk). */
10220 tree maxread = NULL_TREE;
10221 /* The exact size of the access (such as in __strncpy_chk). */
10222 tree size = NULL_TREE;
10224 switch (fcode)
10226 case BUILT_IN_STRCPY_CHK:
10227 case BUILT_IN_STPCPY_CHK:
10228 srcstr = CALL_EXPR_ARG (exp, 1);
10229 objsize = CALL_EXPR_ARG (exp, 2);
10230 break;
10232 case BUILT_IN_STRCAT_CHK:
10233 /* For __strcat_chk the warning will be emitted only if overflowing
10234 by at least strlen (dest) + 1 bytes. */
10235 catstr = CALL_EXPR_ARG (exp, 0);
10236 srcstr = CALL_EXPR_ARG (exp, 1);
10237 objsize = CALL_EXPR_ARG (exp, 2);
10238 break;
10240 case BUILT_IN_STRNCAT_CHK:
10241 catstr = CALL_EXPR_ARG (exp, 0);
10242 srcstr = CALL_EXPR_ARG (exp, 1);
10243 maxread = CALL_EXPR_ARG (exp, 2);
10244 objsize = CALL_EXPR_ARG (exp, 3);
10245 break;
10247 case BUILT_IN_STRNCPY_CHK:
10248 case BUILT_IN_STPNCPY_CHK:
10249 srcstr = CALL_EXPR_ARG (exp, 1);
10250 size = CALL_EXPR_ARG (exp, 2);
10251 objsize = CALL_EXPR_ARG (exp, 3);
10252 break;
10254 case BUILT_IN_SNPRINTF_CHK:
10255 case BUILT_IN_VSNPRINTF_CHK:
10256 maxread = CALL_EXPR_ARG (exp, 1);
10257 objsize = CALL_EXPR_ARG (exp, 3);
10258 break;
10259 default:
10260 gcc_unreachable ();
10263 if (catstr && maxread)
10265 /* Check __strncat_chk. There is no way to determine the length
10266 of the string to which the source string is being appended so
10267 just warn when the length of the source string is not known. */
10268 check_strncat_sizes (exp, objsize);
10269 return;
10272 /* The destination argument is the first one for all built-ins above. */
10273 tree dst = CALL_EXPR_ARG (exp, 0);
10275 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
10278 /* Emit warning if a buffer overflow is detected at compile time
10279 in __sprintf_chk/__vsprintf_chk calls. */
10281 static void
10282 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10284 tree size, len, fmt;
10285 const char *fmt_str;
10286 int nargs = call_expr_nargs (exp);
10288 /* Verify the required arguments in the original call. */
10290 if (nargs < 4)
10291 return;
10292 size = CALL_EXPR_ARG (exp, 2);
10293 fmt = CALL_EXPR_ARG (exp, 3);
10295 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10296 return;
10298 /* Check whether the format is a literal string constant. */
10299 fmt_str = c_getstr (fmt);
10300 if (fmt_str == NULL)
10301 return;
10303 if (!init_target_chars ())
10304 return;
10306 /* If the format doesn't contain % args or %%, we know its size. */
10307 if (strchr (fmt_str, target_percent) == 0)
10308 len = build_int_cstu (size_type_node, strlen (fmt_str));
10309 /* If the format is "%s" and first ... argument is a string literal,
10310 we know it too. */
10311 else if (fcode == BUILT_IN_SPRINTF_CHK
10312 && strcmp (fmt_str, target_percent_s) == 0)
10314 tree arg;
10316 if (nargs < 5)
10317 return;
10318 arg = CALL_EXPR_ARG (exp, 4);
10319 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10320 return;
10322 len = c_strlen (arg, 1);
10323 if (!len || ! tree_fits_uhwi_p (len))
10324 return;
10326 else
10327 return;
10329 /* Add one for the terminating nul. */
10330 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10332 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
10333 /*maxread=*/NULL_TREE, len, size);
10336 /* Emit warning if a free is called with address of a variable. */
10338 static void
10339 maybe_emit_free_warning (tree exp)
10341 tree arg = CALL_EXPR_ARG (exp, 0);
10343 STRIP_NOPS (arg);
10344 if (TREE_CODE (arg) != ADDR_EXPR)
10345 return;
10347 arg = get_base_address (TREE_OPERAND (arg, 0));
10348 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
10349 return;
10351 if (SSA_VAR_P (arg))
10352 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10353 "%Kattempt to free a non-heap object %qD", exp, arg);
10354 else
10355 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10356 "%Kattempt to free a non-heap object", exp);
10359 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10360 if possible. */
10362 static tree
10363 fold_builtin_object_size (tree ptr, tree ost)
10365 unsigned HOST_WIDE_INT bytes;
10366 int object_size_type;
10368 if (!validate_arg (ptr, POINTER_TYPE)
10369 || !validate_arg (ost, INTEGER_TYPE))
10370 return NULL_TREE;
10372 STRIP_NOPS (ost);
10374 if (TREE_CODE (ost) != INTEGER_CST
10375 || tree_int_cst_sgn (ost) < 0
10376 || compare_tree_int (ost, 3) > 0)
10377 return NULL_TREE;
10379 object_size_type = tree_to_shwi (ost);
10381 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10382 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10383 and (size_t) 0 for types 2 and 3. */
10384 if (TREE_SIDE_EFFECTS (ptr))
10385 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10387 if (TREE_CODE (ptr) == ADDR_EXPR)
10389 compute_builtin_object_size (ptr, object_size_type, &bytes);
10390 if (wi::fits_to_tree_p (bytes, size_type_node))
10391 return build_int_cstu (size_type_node, bytes);
10393 else if (TREE_CODE (ptr) == SSA_NAME)
10395 /* If object size is not known yet, delay folding until
10396 later. Maybe subsequent passes will help determining
10397 it. */
10398 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10399 && wi::fits_to_tree_p (bytes, size_type_node))
10400 return build_int_cstu (size_type_node, bytes);
10403 return NULL_TREE;
10406 /* Builtins with folding operations that operate on "..." arguments
10407 need special handling; we need to store the arguments in a convenient
10408 data structure before attempting any folding. Fortunately there are
10409 only a few builtins that fall into this category. FNDECL is the
10410 function, EXP is the CALL_EXPR for the call. */
10412 static tree
10413 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10415 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10416 tree ret = NULL_TREE;
10418 switch (fcode)
10420 case BUILT_IN_FPCLASSIFY:
10421 ret = fold_builtin_fpclassify (loc, args, nargs);
10422 break;
10424 default:
10425 break;
10427 if (ret)
10429 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10430 SET_EXPR_LOCATION (ret, loc);
10431 TREE_NO_WARNING (ret) = 1;
10432 return ret;
10434 return NULL_TREE;
10437 /* Initialize format string characters in the target charset. */
10439 bool
10440 init_target_chars (void)
10442 static bool init;
10443 if (!init)
10445 target_newline = lang_hooks.to_target_charset ('\n');
10446 target_percent = lang_hooks.to_target_charset ('%');
10447 target_c = lang_hooks.to_target_charset ('c');
10448 target_s = lang_hooks.to_target_charset ('s');
10449 if (target_newline == 0 || target_percent == 0 || target_c == 0
10450 || target_s == 0)
10451 return false;
10453 target_percent_c[0] = target_percent;
10454 target_percent_c[1] = target_c;
10455 target_percent_c[2] = '\0';
10457 target_percent_s[0] = target_percent;
10458 target_percent_s[1] = target_s;
10459 target_percent_s[2] = '\0';
10461 target_percent_s_newline[0] = target_percent;
10462 target_percent_s_newline[1] = target_s;
10463 target_percent_s_newline[2] = target_newline;
10464 target_percent_s_newline[3] = '\0';
10466 init = true;
10468 return true;
10471 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10472 and no overflow/underflow occurred. INEXACT is true if M was not
10473 exactly calculated. TYPE is the tree type for the result. This
10474 function assumes that you cleared the MPFR flags and then
10475 calculated M to see if anything subsequently set a flag prior to
10476 entering this function. Return NULL_TREE if any checks fail. */
10478 static tree
10479 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10481 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10482 overflow/underflow occurred. If -frounding-math, proceed iff the
10483 result of calling FUNC was exact. */
10484 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10485 && (!flag_rounding_math || !inexact))
10487 REAL_VALUE_TYPE rr;
10489 real_from_mpfr (&rr, m, type, GMP_RNDN);
10490 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10491 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10492 but the mpft_t is not, then we underflowed in the
10493 conversion. */
10494 if (real_isfinite (&rr)
10495 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10497 REAL_VALUE_TYPE rmode;
10499 real_convert (&rmode, TYPE_MODE (type), &rr);
10500 /* Proceed iff the specified mode can hold the value. */
10501 if (real_identical (&rmode, &rr))
10502 return build_real (type, rmode);
10505 return NULL_TREE;
10508 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10509 number and no overflow/underflow occurred. INEXACT is true if M
10510 was not exactly calculated. TYPE is the tree type for the result.
10511 This function assumes that you cleared the MPFR flags and then
10512 calculated M to see if anything subsequently set a flag prior to
10513 entering this function. Return NULL_TREE if any checks fail, if
10514 FORCE_CONVERT is true, then bypass the checks. */
10516 static tree
10517 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10519 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10520 overflow/underflow occurred. If -frounding-math, proceed iff the
10521 result of calling FUNC was exact. */
10522 if (force_convert
10523 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10524 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10525 && (!flag_rounding_math || !inexact)))
10527 REAL_VALUE_TYPE re, im;
10529 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10530 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10531 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10532 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10533 but the mpft_t is not, then we underflowed in the
10534 conversion. */
10535 if (force_convert
10536 || (real_isfinite (&re) && real_isfinite (&im)
10537 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10538 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10540 REAL_VALUE_TYPE re_mode, im_mode;
10542 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10543 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10544 /* Proceed iff the specified mode can hold the value. */
10545 if (force_convert
10546 || (real_identical (&re_mode, &re)
10547 && real_identical (&im_mode, &im)))
10548 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10549 build_real (TREE_TYPE (type), im_mode));
10552 return NULL_TREE;
10555 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10556 the pointer *(ARG_QUO) and return the result. The type is taken
10557 from the type of ARG0 and is used for setting the precision of the
10558 calculation and results. */
10560 static tree
10561 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10563 tree const type = TREE_TYPE (arg0);
10564 tree result = NULL_TREE;
10566 STRIP_NOPS (arg0);
10567 STRIP_NOPS (arg1);
10569 /* To proceed, MPFR must exactly represent the target floating point
10570 format, which only happens when the target base equals two. */
10571 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10572 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10573 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10575 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10576 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10578 if (real_isfinite (ra0) && real_isfinite (ra1))
10580 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10581 const int prec = fmt->p;
10582 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10583 tree result_rem;
10584 long integer_quo;
10585 mpfr_t m0, m1;
10587 mpfr_inits2 (prec, m0, m1, NULL);
10588 mpfr_from_real (m0, ra0, GMP_RNDN);
10589 mpfr_from_real (m1, ra1, GMP_RNDN);
10590 mpfr_clear_flags ();
10591 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10592 /* Remquo is independent of the rounding mode, so pass
10593 inexact=0 to do_mpfr_ckconv(). */
10594 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10595 mpfr_clears (m0, m1, NULL);
10596 if (result_rem)
10598 /* MPFR calculates quo in the host's long so it may
10599 return more bits in quo than the target int can hold
10600 if sizeof(host long) > sizeof(target int). This can
10601 happen even for native compilers in LP64 mode. In
10602 these cases, modulo the quo value with the largest
10603 number that the target int can hold while leaving one
10604 bit for the sign. */
10605 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10606 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10608 /* Dereference the quo pointer argument. */
10609 arg_quo = build_fold_indirect_ref (arg_quo);
10610 /* Proceed iff a valid pointer type was passed in. */
10611 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10613 /* Set the value. */
10614 tree result_quo
10615 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10616 build_int_cst (TREE_TYPE (arg_quo),
10617 integer_quo));
10618 TREE_SIDE_EFFECTS (result_quo) = 1;
10619 /* Combine the quo assignment with the rem. */
10620 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10621 result_quo, result_rem));
10626 return result;
10629 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10630 resulting value as a tree with type TYPE. The mpfr precision is
10631 set to the precision of TYPE. We assume that this mpfr function
10632 returns zero if the result could be calculated exactly within the
10633 requested precision. In addition, the integer pointer represented
10634 by ARG_SG will be dereferenced and set to the appropriate signgam
10635 (-1,1) value. */
10637 static tree
10638 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10640 tree result = NULL_TREE;
10642 STRIP_NOPS (arg);
10644 /* To proceed, MPFR must exactly represent the target floating point
10645 format, which only happens when the target base equals two. Also
10646 verify ARG is a constant and that ARG_SG is an int pointer. */
10647 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10648 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10649 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10650 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10652 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10654 /* In addition to NaN and Inf, the argument cannot be zero or a
10655 negative integer. */
10656 if (real_isfinite (ra)
10657 && ra->cl != rvc_zero
10658 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10660 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10661 const int prec = fmt->p;
10662 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10663 int inexact, sg;
10664 mpfr_t m;
10665 tree result_lg;
10667 mpfr_init2 (m, prec);
10668 mpfr_from_real (m, ra, GMP_RNDN);
10669 mpfr_clear_flags ();
10670 inexact = mpfr_lgamma (m, &sg, m, rnd);
10671 result_lg = do_mpfr_ckconv (m, type, inexact);
10672 mpfr_clear (m);
10673 if (result_lg)
10675 tree result_sg;
10677 /* Dereference the arg_sg pointer argument. */
10678 arg_sg = build_fold_indirect_ref (arg_sg);
10679 /* Assign the signgam value into *arg_sg. */
10680 result_sg = fold_build2 (MODIFY_EXPR,
10681 TREE_TYPE (arg_sg), arg_sg,
10682 build_int_cst (TREE_TYPE (arg_sg), sg));
10683 TREE_SIDE_EFFECTS (result_sg) = 1;
10684 /* Combine the signgam assignment with the lgamma result. */
10685 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10686 result_sg, result_lg));
10691 return result;
10694 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10695 mpc function FUNC on it and return the resulting value as a tree
10696 with type TYPE. The mpfr precision is set to the precision of
10697 TYPE. We assume that function FUNC returns zero if the result
10698 could be calculated exactly within the requested precision. If
10699 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10700 in the arguments and/or results. */
10702 tree
10703 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10704 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10706 tree result = NULL_TREE;
10708 STRIP_NOPS (arg0);
10709 STRIP_NOPS (arg1);
10711 /* To proceed, MPFR must exactly represent the target floating point
10712 format, which only happens when the target base equals two. */
10713 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10714 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10715 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10716 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10717 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10719 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10720 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10721 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10722 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10724 if (do_nonfinite
10725 || (real_isfinite (re0) && real_isfinite (im0)
10726 && real_isfinite (re1) && real_isfinite (im1)))
10728 const struct real_format *const fmt =
10729 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10730 const int prec = fmt->p;
10731 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10732 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10733 int inexact;
10734 mpc_t m0, m1;
10736 mpc_init2 (m0, prec);
10737 mpc_init2 (m1, prec);
10738 mpfr_from_real (mpc_realref (m0), re0, rnd);
10739 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10740 mpfr_from_real (mpc_realref (m1), re1, rnd);
10741 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10742 mpfr_clear_flags ();
10743 inexact = func (m0, m0, m1, crnd);
10744 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10745 mpc_clear (m0);
10746 mpc_clear (m1);
10750 return result;
10753 /* A wrapper function for builtin folding that prevents warnings for
10754 "statement without effect" and the like, caused by removing the
10755 call node earlier than the warning is generated. */
10757 tree
10758 fold_call_stmt (gcall *stmt, bool ignore)
10760 tree ret = NULL_TREE;
10761 tree fndecl = gimple_call_fndecl (stmt);
10762 location_t loc = gimple_location (stmt);
10763 if (fndecl
10764 && TREE_CODE (fndecl) == FUNCTION_DECL
10765 && DECL_BUILT_IN (fndecl)
10766 && !gimple_call_va_arg_pack_p (stmt))
10768 int nargs = gimple_call_num_args (stmt);
10769 tree *args = (nargs > 0
10770 ? gimple_call_arg_ptr (stmt, 0)
10771 : &error_mark_node);
10773 if (avoid_folding_inline_builtin (fndecl))
10774 return NULL_TREE;
10775 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10777 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10779 else
10781 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10782 if (ret)
10784 /* Propagate location information from original call to
10785 expansion of builtin. Otherwise things like
10786 maybe_emit_chk_warning, that operate on the expansion
10787 of a builtin, will use the wrong location information. */
10788 if (gimple_has_location (stmt))
10790 tree realret = ret;
10791 if (TREE_CODE (ret) == NOP_EXPR)
10792 realret = TREE_OPERAND (ret, 0);
10793 if (CAN_HAVE_LOCATION_P (realret)
10794 && !EXPR_HAS_LOCATION (realret))
10795 SET_EXPR_LOCATION (realret, loc);
10796 return realret;
10798 return ret;
10802 return NULL_TREE;
10805 /* Look up the function in builtin_decl that corresponds to DECL
10806 and set ASMSPEC as its user assembler name. DECL must be a
10807 function decl that declares a builtin. */
10809 void
10810 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10812 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10813 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10814 && asmspec != 0);
10816 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10817 set_user_assembler_name (builtin, asmspec);
10819 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10820 && INT_TYPE_SIZE < BITS_PER_WORD)
10822 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
10823 set_user_assembler_libfunc ("ffs", asmspec);
10824 set_optab_libfunc (ffs_optab, mode, "ffs");
10828 /* Return true if DECL is a builtin that expands to a constant or similarly
10829 simple code. */
10830 bool
10831 is_simple_builtin (tree decl)
10833 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10834 switch (DECL_FUNCTION_CODE (decl))
10836 /* Builtins that expand to constants. */
10837 case BUILT_IN_CONSTANT_P:
10838 case BUILT_IN_EXPECT:
10839 case BUILT_IN_OBJECT_SIZE:
10840 case BUILT_IN_UNREACHABLE:
10841 /* Simple register moves or loads from stack. */
10842 case BUILT_IN_ASSUME_ALIGNED:
10843 case BUILT_IN_RETURN_ADDRESS:
10844 case BUILT_IN_EXTRACT_RETURN_ADDR:
10845 case BUILT_IN_FROB_RETURN_ADDR:
10846 case BUILT_IN_RETURN:
10847 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10848 case BUILT_IN_FRAME_ADDRESS:
10849 case BUILT_IN_VA_END:
10850 case BUILT_IN_STACK_SAVE:
10851 case BUILT_IN_STACK_RESTORE:
10852 /* Exception state returns or moves registers around. */
10853 case BUILT_IN_EH_FILTER:
10854 case BUILT_IN_EH_POINTER:
10855 case BUILT_IN_EH_COPY_VALUES:
10856 return true;
10858 default:
10859 return false;
10862 return false;
10865 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10866 most probably expanded inline into reasonably simple code. This is a
10867 superset of is_simple_builtin. */
10868 bool
10869 is_inexpensive_builtin (tree decl)
10871 if (!decl)
10872 return false;
10873 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10874 return true;
10875 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10876 switch (DECL_FUNCTION_CODE (decl))
10878 case BUILT_IN_ABS:
10879 CASE_BUILT_IN_ALLOCA:
10880 case BUILT_IN_BSWAP16:
10881 case BUILT_IN_BSWAP32:
10882 case BUILT_IN_BSWAP64:
10883 case BUILT_IN_CLZ:
10884 case BUILT_IN_CLZIMAX:
10885 case BUILT_IN_CLZL:
10886 case BUILT_IN_CLZLL:
10887 case BUILT_IN_CTZ:
10888 case BUILT_IN_CTZIMAX:
10889 case BUILT_IN_CTZL:
10890 case BUILT_IN_CTZLL:
10891 case BUILT_IN_FFS:
10892 case BUILT_IN_FFSIMAX:
10893 case BUILT_IN_FFSL:
10894 case BUILT_IN_FFSLL:
10895 case BUILT_IN_IMAXABS:
10896 case BUILT_IN_FINITE:
10897 case BUILT_IN_FINITEF:
10898 case BUILT_IN_FINITEL:
10899 case BUILT_IN_FINITED32:
10900 case BUILT_IN_FINITED64:
10901 case BUILT_IN_FINITED128:
10902 case BUILT_IN_FPCLASSIFY:
10903 case BUILT_IN_ISFINITE:
10904 case BUILT_IN_ISINF_SIGN:
10905 case BUILT_IN_ISINF:
10906 case BUILT_IN_ISINFF:
10907 case BUILT_IN_ISINFL:
10908 case BUILT_IN_ISINFD32:
10909 case BUILT_IN_ISINFD64:
10910 case BUILT_IN_ISINFD128:
10911 case BUILT_IN_ISNAN:
10912 case BUILT_IN_ISNANF:
10913 case BUILT_IN_ISNANL:
10914 case BUILT_IN_ISNAND32:
10915 case BUILT_IN_ISNAND64:
10916 case BUILT_IN_ISNAND128:
10917 case BUILT_IN_ISNORMAL:
10918 case BUILT_IN_ISGREATER:
10919 case BUILT_IN_ISGREATEREQUAL:
10920 case BUILT_IN_ISLESS:
10921 case BUILT_IN_ISLESSEQUAL:
10922 case BUILT_IN_ISLESSGREATER:
10923 case BUILT_IN_ISUNORDERED:
10924 case BUILT_IN_VA_ARG_PACK:
10925 case BUILT_IN_VA_ARG_PACK_LEN:
10926 case BUILT_IN_VA_COPY:
10927 case BUILT_IN_TRAP:
10928 case BUILT_IN_SAVEREGS:
10929 case BUILT_IN_POPCOUNTL:
10930 case BUILT_IN_POPCOUNTLL:
10931 case BUILT_IN_POPCOUNTIMAX:
10932 case BUILT_IN_POPCOUNT:
10933 case BUILT_IN_PARITYL:
10934 case BUILT_IN_PARITYLL:
10935 case BUILT_IN_PARITYIMAX:
10936 case BUILT_IN_PARITY:
10937 case BUILT_IN_LABS:
10938 case BUILT_IN_LLABS:
10939 case BUILT_IN_PREFETCH:
10940 case BUILT_IN_ACC_ON_DEVICE:
10941 return true;
10943 default:
10944 return is_simple_builtin (decl);
10947 return false;
10950 /* Return true if T is a constant and the value cast to a target char
10951 can be represented by a host char.
10952 Store the casted char constant in *P if so. */
10954 bool
10955 target_char_cst_p (tree t, char *p)
10957 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
10958 return false;
10960 *p = (char)tree_to_uhwi (t);
10961 return true;
10964 /* Return the maximum object size. */
10966 tree
10967 max_object_size (void)
10969 /* To do: Make this a configurable parameter. */
10970 return TYPE_MAX_VALUE (ptrdiff_type_node);