[PR c++/86374] Name lookup failure in enclosing template
[official-gcc.git] / gcc / builtins.c
blob839a8180e48bdc8b4ee5ce8870761e425ee83b22
1 /* Expand builtin functions.
2 Copyright (C) 1988-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "params.h"
35 #include "tm_p.h"
36 #include "stringpool.h"
37 #include "tree-vrp.h"
38 #include "tree-ssanames.h"
39 #include "expmed.h"
40 #include "optabs.h"
41 #include "emit-rtl.h"
42 #include "recog.h"
43 #include "diagnostic-core.h"
44 #include "alias.h"
45 #include "fold-const.h"
46 #include "fold-const-call.h"
47 #include "gimple-ssa-warn-restrict.h"
48 #include "stor-layout.h"
49 #include "calls.h"
50 #include "varasm.h"
51 #include "tree-object-size.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
76 struct target_builtins default_target_builtins;
77 #if SWITCHABLE_TARGET
78 struct target_builtins *this_target_builtins = &default_target_builtins;
79 #endif
81 /* Define the names of the builtin function types and codes. */
82 const char *const built_in_class_names[BUILT_IN_LAST]
83 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
85 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
86 const char * built_in_names[(int) END_BUILTINS] =
88 #include "builtins.def"
91 /* Setup an array of builtin_info_type, make sure each element decl is
92 initialized to NULL_TREE. */
93 builtin_info_type builtin_info[(int)END_BUILTINS];
95 /* Non-zero if __builtin_constant_p should be folded right away. */
96 bool force_folding_builtin_constant_p;
98 static rtx c_readstr (const char *, scalar_int_mode);
99 static int target_char_cast (tree, char *);
100 static rtx get_memory_rtx (tree, tree);
101 static int apply_args_size (void);
102 static int apply_result_size (void);
103 static rtx result_vector (int, rtx);
104 static void expand_builtin_prefetch (tree);
105 static rtx expand_builtin_apply_args (void);
106 static rtx expand_builtin_apply_args_1 (void);
107 static rtx expand_builtin_apply (rtx, rtx, rtx);
108 static void expand_builtin_return (rtx);
109 static enum type_class type_to_class (tree);
110 static rtx expand_builtin_classify_type (tree);
111 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
112 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
113 static rtx expand_builtin_interclass_mathfn (tree, rtx);
114 static rtx expand_builtin_sincos (tree);
115 static rtx expand_builtin_cexpi (tree, rtx);
116 static rtx expand_builtin_int_roundingfn (tree, rtx);
117 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
118 static rtx expand_builtin_next_arg (void);
119 static rtx expand_builtin_va_start (tree);
120 static rtx expand_builtin_va_end (tree);
121 static rtx expand_builtin_va_copy (tree);
122 static rtx inline_expand_builtin_string_cmp (tree, rtx, bool);
123 static rtx expand_builtin_strcmp (tree, rtx);
124 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
125 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
126 static rtx expand_builtin_memchr (tree, rtx);
127 static rtx expand_builtin_memcpy (tree, rtx);
128 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
129 rtx target, tree exp, int endp);
130 static rtx expand_builtin_memmove (tree, rtx);
131 static rtx expand_builtin_mempcpy (tree, rtx);
132 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, int);
133 static rtx expand_builtin_strcat (tree, rtx);
134 static rtx expand_builtin_strcpy (tree, rtx);
135 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
136 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
137 static rtx expand_builtin_stpncpy (tree, rtx);
138 static rtx expand_builtin_strncat (tree, rtx);
139 static rtx expand_builtin_strncpy (tree, rtx);
140 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
141 static rtx expand_builtin_memset (tree, rtx, machine_mode);
142 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
143 static rtx expand_builtin_bzero (tree);
144 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
145 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
146 static rtx expand_builtin_alloca (tree);
147 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
148 static rtx expand_builtin_frame_address (tree, tree);
149 static tree stabilize_va_list_loc (location_t, tree, int);
150 static rtx expand_builtin_expect (tree, rtx);
151 static tree fold_builtin_constant_p (tree);
152 static tree fold_builtin_classify_type (tree);
153 static tree fold_builtin_strlen (location_t, tree, tree);
154 static tree fold_builtin_inf (location_t, tree, int);
155 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
156 static bool validate_arg (const_tree, enum tree_code code);
157 static rtx expand_builtin_fabs (tree, rtx, rtx);
158 static rtx expand_builtin_signbit (tree, rtx);
159 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
160 static tree fold_builtin_isascii (location_t, tree);
161 static tree fold_builtin_toascii (location_t, tree);
162 static tree fold_builtin_isdigit (location_t, tree);
163 static tree fold_builtin_fabs (location_t, tree, tree);
164 static tree fold_builtin_abs (location_t, tree, tree);
165 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
166 enum tree_code);
167 static tree fold_builtin_0 (location_t, tree);
168 static tree fold_builtin_1 (location_t, tree, tree);
169 static tree fold_builtin_2 (location_t, tree, tree, tree);
170 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
171 static tree fold_builtin_varargs (location_t, tree, tree*, int);
173 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
174 static tree fold_builtin_strspn (location_t, tree, tree);
175 static tree fold_builtin_strcspn (location_t, tree, tree);
177 static rtx expand_builtin_object_size (tree);
178 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
179 enum built_in_function);
180 static void maybe_emit_chk_warning (tree, enum built_in_function);
181 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
182 static void maybe_emit_free_warning (tree);
183 static tree fold_builtin_object_size (tree, tree);
185 unsigned HOST_WIDE_INT target_newline;
186 unsigned HOST_WIDE_INT target_percent;
187 static unsigned HOST_WIDE_INT target_c;
188 static unsigned HOST_WIDE_INT target_s;
189 char target_percent_c[3];
190 char target_percent_s[3];
191 char target_percent_s_newline[4];
192 static tree do_mpfr_remquo (tree, tree, tree);
193 static tree do_mpfr_lgamma_r (tree, tree, tree);
194 static void expand_builtin_sync_synchronize (void);
196 /* Return true if NAME starts with __builtin_ or __sync_. */
198 static bool
199 is_builtin_name (const char *name)
201 if (strncmp (name, "__builtin_", 10) == 0)
202 return true;
203 if (strncmp (name, "__sync_", 7) == 0)
204 return true;
205 if (strncmp (name, "__atomic_", 9) == 0)
206 return true;
207 return false;
211 /* Return true if DECL is a function symbol representing a built-in. */
213 bool
214 is_builtin_fn (tree decl)
216 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
219 /* Return true if NODE should be considered for inline expansion regardless
220 of the optimization level. This means whenever a function is invoked with
221 its "internal" name, which normally contains the prefix "__builtin". */
223 bool
224 called_as_built_in (tree node)
226 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
227 we want the name used to call the function, not the name it
228 will have. */
229 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
230 return is_builtin_name (name);
233 /* Compute values M and N such that M divides (address of EXP - N) and such
234 that N < M. If these numbers can be determined, store M in alignp and N in
235 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
236 *alignp and any bit-offset to *bitposp.
238 Note that the address (and thus the alignment) computed here is based
239 on the address to which a symbol resolves, whereas DECL_ALIGN is based
240 on the address at which an object is actually located. These two
241 addresses are not always the same. For example, on ARM targets,
242 the address &foo of a Thumb function foo() has the lowest bit set,
243 whereas foo() itself starts on an even address.
245 If ADDR_P is true we are taking the address of the memory reference EXP
246 and thus cannot rely on the access taking place. */
248 static bool
249 get_object_alignment_2 (tree exp, unsigned int *alignp,
250 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
252 poly_int64 bitsize, bitpos;
253 tree offset;
254 machine_mode mode;
255 int unsignedp, reversep, volatilep;
256 unsigned int align = BITS_PER_UNIT;
257 bool known_alignment = false;
259 /* Get the innermost object and the constant (bitpos) and possibly
260 variable (offset) offset of the access. */
261 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
262 &unsignedp, &reversep, &volatilep);
264 /* Extract alignment information from the innermost object and
265 possibly adjust bitpos and offset. */
266 if (TREE_CODE (exp) == FUNCTION_DECL)
268 /* Function addresses can encode extra information besides their
269 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
270 allows the low bit to be used as a virtual bit, we know
271 that the address itself must be at least 2-byte aligned. */
272 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
273 align = 2 * BITS_PER_UNIT;
275 else if (TREE_CODE (exp) == LABEL_DECL)
277 else if (TREE_CODE (exp) == CONST_DECL)
279 /* The alignment of a CONST_DECL is determined by its initializer. */
280 exp = DECL_INITIAL (exp);
281 align = TYPE_ALIGN (TREE_TYPE (exp));
282 if (CONSTANT_CLASS_P (exp))
283 align = targetm.constant_alignment (exp, align);
285 known_alignment = true;
287 else if (DECL_P (exp))
289 align = DECL_ALIGN (exp);
290 known_alignment = true;
292 else if (TREE_CODE (exp) == INDIRECT_REF
293 || TREE_CODE (exp) == MEM_REF
294 || TREE_CODE (exp) == TARGET_MEM_REF)
296 tree addr = TREE_OPERAND (exp, 0);
297 unsigned ptr_align;
298 unsigned HOST_WIDE_INT ptr_bitpos;
299 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
301 /* If the address is explicitely aligned, handle that. */
302 if (TREE_CODE (addr) == BIT_AND_EXPR
303 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
305 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
306 ptr_bitmask *= BITS_PER_UNIT;
307 align = least_bit_hwi (ptr_bitmask);
308 addr = TREE_OPERAND (addr, 0);
311 known_alignment
312 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
313 align = MAX (ptr_align, align);
315 /* Re-apply explicit alignment to the bitpos. */
316 ptr_bitpos &= ptr_bitmask;
318 /* The alignment of the pointer operand in a TARGET_MEM_REF
319 has to take the variable offset parts into account. */
320 if (TREE_CODE (exp) == TARGET_MEM_REF)
322 if (TMR_INDEX (exp))
324 unsigned HOST_WIDE_INT step = 1;
325 if (TMR_STEP (exp))
326 step = TREE_INT_CST_LOW (TMR_STEP (exp));
327 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
329 if (TMR_INDEX2 (exp))
330 align = BITS_PER_UNIT;
331 known_alignment = false;
334 /* When EXP is an actual memory reference then we can use
335 TYPE_ALIGN of a pointer indirection to derive alignment.
336 Do so only if get_pointer_alignment_1 did not reveal absolute
337 alignment knowledge and if using that alignment would
338 improve the situation. */
339 unsigned int talign;
340 if (!addr_p && !known_alignment
341 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
342 && talign > align)
343 align = talign;
344 else
346 /* Else adjust bitpos accordingly. */
347 bitpos += ptr_bitpos;
348 if (TREE_CODE (exp) == MEM_REF
349 || TREE_CODE (exp) == TARGET_MEM_REF)
350 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
353 else if (TREE_CODE (exp) == STRING_CST)
355 /* STRING_CST are the only constant objects we allow to be not
356 wrapped inside a CONST_DECL. */
357 align = TYPE_ALIGN (TREE_TYPE (exp));
358 if (CONSTANT_CLASS_P (exp))
359 align = targetm.constant_alignment (exp, align);
361 known_alignment = true;
364 /* If there is a non-constant offset part extract the maximum
365 alignment that can prevail. */
366 if (offset)
368 unsigned int trailing_zeros = tree_ctz (offset);
369 if (trailing_zeros < HOST_BITS_PER_INT)
371 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
372 if (inner)
373 align = MIN (align, inner);
377 /* Account for the alignment of runtime coefficients, so that the constant
378 bitpos is guaranteed to be accurate. */
379 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
380 if (alt_align != 0 && alt_align < align)
382 align = alt_align;
383 known_alignment = false;
386 *alignp = align;
387 *bitposp = bitpos.coeffs[0] & (align - 1);
388 return known_alignment;
391 /* For a memory reference expression EXP compute values M and N such that M
392 divides (&EXP - N) and such that N < M. If these numbers can be determined,
393 store M in alignp and N in *BITPOSP and return true. Otherwise return false
394 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
396 bool
397 get_object_alignment_1 (tree exp, unsigned int *alignp,
398 unsigned HOST_WIDE_INT *bitposp)
400 return get_object_alignment_2 (exp, alignp, bitposp, false);
403 /* Return the alignment in bits of EXP, an object. */
405 unsigned int
406 get_object_alignment (tree exp)
408 unsigned HOST_WIDE_INT bitpos = 0;
409 unsigned int align;
411 get_object_alignment_1 (exp, &align, &bitpos);
413 /* align and bitpos now specify known low bits of the pointer.
414 ptr & (align - 1) == bitpos. */
416 if (bitpos != 0)
417 align = least_bit_hwi (bitpos);
418 return align;
421 /* For a pointer valued expression EXP compute values M and N such that M
422 divides (EXP - N) and such that N < M. If these numbers can be determined,
423 store M in alignp and N in *BITPOSP and return true. Return false if
424 the results are just a conservative approximation.
426 If EXP is not a pointer, false is returned too. */
428 bool
429 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
430 unsigned HOST_WIDE_INT *bitposp)
432 STRIP_NOPS (exp);
434 if (TREE_CODE (exp) == ADDR_EXPR)
435 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
436 alignp, bitposp, true);
437 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
439 unsigned int align;
440 unsigned HOST_WIDE_INT bitpos;
441 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
442 &align, &bitpos);
443 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
444 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
445 else
447 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
448 if (trailing_zeros < HOST_BITS_PER_INT)
450 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
451 if (inner)
452 align = MIN (align, inner);
455 *alignp = align;
456 *bitposp = bitpos & (align - 1);
457 return res;
459 else if (TREE_CODE (exp) == SSA_NAME
460 && POINTER_TYPE_P (TREE_TYPE (exp)))
462 unsigned int ptr_align, ptr_misalign;
463 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
465 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
467 *bitposp = ptr_misalign * BITS_PER_UNIT;
468 *alignp = ptr_align * BITS_PER_UNIT;
469 /* Make sure to return a sensible alignment when the multiplication
470 by BITS_PER_UNIT overflowed. */
471 if (*alignp == 0)
472 *alignp = 1u << (HOST_BITS_PER_INT - 1);
473 /* We cannot really tell whether this result is an approximation. */
474 return false;
476 else
478 *bitposp = 0;
479 *alignp = BITS_PER_UNIT;
480 return false;
483 else if (TREE_CODE (exp) == INTEGER_CST)
485 *alignp = BIGGEST_ALIGNMENT;
486 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
487 & (BIGGEST_ALIGNMENT - 1));
488 return true;
491 *bitposp = 0;
492 *alignp = BITS_PER_UNIT;
493 return false;
496 /* Return the alignment in bits of EXP, a pointer valued expression.
497 The alignment returned is, by default, the alignment of the thing that
498 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
500 Otherwise, look at the expression to see if we can do better, i.e., if the
501 expression is actually pointing at an object whose alignment is tighter. */
503 unsigned int
504 get_pointer_alignment (tree exp)
506 unsigned HOST_WIDE_INT bitpos = 0;
507 unsigned int align;
509 get_pointer_alignment_1 (exp, &align, &bitpos);
511 /* align and bitpos now specify known low bits of the pointer.
512 ptr & (align - 1) == bitpos. */
514 if (bitpos != 0)
515 align = least_bit_hwi (bitpos);
517 return align;
520 /* Return the number of non-zero elements in the sequence
521 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
522 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
524 static unsigned
525 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
527 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
529 unsigned n;
531 if (eltsize == 1)
533 /* Optimize the common case of plain char. */
534 for (n = 0; n < maxelts; n++)
536 const char *elt = (const char*) ptr + n;
537 if (!*elt)
538 break;
541 else
543 for (n = 0; n < maxelts; n++)
545 const char *elt = (const char*) ptr + n * eltsize;
546 if (!memcmp (elt, "\0\0\0\0", eltsize))
547 break;
550 return n;
553 /* Compute the length of a null-terminated character string or wide
554 character string handling character sizes of 1, 2, and 4 bytes.
555 TREE_STRING_LENGTH is not the right way because it evaluates to
556 the size of the character array in bytes (as opposed to characters)
557 and because it can contain a zero byte in the middle.
559 ONLY_VALUE should be nonzero if the result is not going to be emitted
560 into the instruction stream and zero if it is going to be expanded.
561 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
562 is returned, otherwise NULL, since
563 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
564 evaluate the side-effects.
566 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
567 accesses. Note that this implies the result is not going to be emitted
568 into the instruction stream.
570 The value returned is of type `ssizetype'.
572 Unfortunately, string_constant can't access the values of const char
573 arrays with initializers, so neither can we do so here. */
575 tree
576 c_strlen (tree src, int only_value)
578 STRIP_NOPS (src);
579 if (TREE_CODE (src) == COND_EXPR
580 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
582 tree len1, len2;
584 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
585 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
586 if (tree_int_cst_equal (len1, len2))
587 return len1;
590 if (TREE_CODE (src) == COMPOUND_EXPR
591 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
592 return c_strlen (TREE_OPERAND (src, 1), only_value);
594 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
596 /* Offset from the beginning of the string in bytes. */
597 tree byteoff;
598 src = string_constant (src, &byteoff);
599 if (src == 0)
600 return NULL_TREE;
602 /* Determine the size of the string element. */
603 unsigned eltsize
604 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src))));
606 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
607 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
608 in case the latter is less than the size of the array. */
609 HOST_WIDE_INT maxelts = TREE_STRING_LENGTH (src);
610 tree type = TREE_TYPE (src);
611 if (tree size = TYPE_SIZE_UNIT (type))
612 if (tree_fits_shwi_p (size))
613 maxelts = tree_to_uhwi (size);
615 maxelts = maxelts / eltsize - 1;
617 /* PTR can point to the byte representation of any string type, including
618 char* and wchar_t*. */
619 const char *ptr = TREE_STRING_POINTER (src);
621 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
623 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
624 compute the offset to the following null if we don't know where to
625 start searching for it. */
626 if (string_length (ptr, eltsize, maxelts) < maxelts)
628 /* Return when an embedded null character is found. */
629 return NULL_TREE;
632 if (!maxelts)
633 return ssize_int (0);
635 /* We don't know the starting offset, but we do know that the string
636 has no internal zero bytes. We can assume that the offset falls
637 within the bounds of the string; otherwise, the programmer deserves
638 what he gets. Subtract the offset from the length of the string,
639 and return that. This would perhaps not be valid if we were dealing
640 with named arrays in addition to literal string constants. */
641 return size_diffop_loc (loc, size_int (maxelts * eltsize), byteoff);
644 /* Offset from the beginning of the string in elements. */
645 HOST_WIDE_INT eltoff;
647 /* We have a known offset into the string. Start searching there for
648 a null character if we can represent it as a single HOST_WIDE_INT. */
649 if (byteoff == 0)
650 eltoff = 0;
651 else if (! tree_fits_shwi_p (byteoff))
652 eltoff = -1;
653 else
654 eltoff = tree_to_shwi (byteoff) / eltsize;
656 /* If the offset is known to be out of bounds, warn, and call strlen at
657 runtime. */
658 if (eltoff < 0 || eltoff > maxelts)
660 /* Suppress multiple warnings for propagated constant strings. */
661 if (only_value != 2
662 && !TREE_NO_WARNING (src))
664 warning_at (loc, OPT_Warray_bounds,
665 "offset %qwi outside bounds of constant string",
666 eltoff);
667 TREE_NO_WARNING (src) = 1;
669 return NULL_TREE;
672 /* Use strlen to search for the first zero byte. Since any strings
673 constructed with build_string will have nulls appended, we win even
674 if we get handed something like (char[4])"abcd".
676 Since ELTOFF is our starting index into the string, no further
677 calculation is needed. */
678 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
679 maxelts - eltoff);
681 return ssize_int (len);
684 /* Return a constant integer corresponding to target reading
685 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
687 static rtx
688 c_readstr (const char *str, scalar_int_mode mode)
690 HOST_WIDE_INT ch;
691 unsigned int i, j;
692 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
694 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
695 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
696 / HOST_BITS_PER_WIDE_INT;
698 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
699 for (i = 0; i < len; i++)
700 tmp[i] = 0;
702 ch = 1;
703 for (i = 0; i < GET_MODE_SIZE (mode); i++)
705 j = i;
706 if (WORDS_BIG_ENDIAN)
707 j = GET_MODE_SIZE (mode) - i - 1;
708 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
709 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
710 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
711 j *= BITS_PER_UNIT;
713 if (ch)
714 ch = (unsigned char) str[i];
715 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
718 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
719 return immed_wide_int_const (c, mode);
722 /* Cast a target constant CST to target CHAR and if that value fits into
723 host char type, return zero and put that value into variable pointed to by
724 P. */
726 static int
727 target_char_cast (tree cst, char *p)
729 unsigned HOST_WIDE_INT val, hostval;
731 if (TREE_CODE (cst) != INTEGER_CST
732 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
733 return 1;
735 /* Do not care if it fits or not right here. */
736 val = TREE_INT_CST_LOW (cst);
738 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
739 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
741 hostval = val;
742 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
743 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
745 if (val != hostval)
746 return 1;
748 *p = hostval;
749 return 0;
752 /* Similar to save_expr, but assumes that arbitrary code is not executed
753 in between the multiple evaluations. In particular, we assume that a
754 non-addressable local variable will not be modified. */
756 static tree
757 builtin_save_expr (tree exp)
759 if (TREE_CODE (exp) == SSA_NAME
760 || (TREE_ADDRESSABLE (exp) == 0
761 && (TREE_CODE (exp) == PARM_DECL
762 || (VAR_P (exp) && !TREE_STATIC (exp)))))
763 return exp;
765 return save_expr (exp);
768 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
769 times to get the address of either a higher stack frame, or a return
770 address located within it (depending on FNDECL_CODE). */
772 static rtx
773 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
775 int i;
776 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
777 if (tem == NULL_RTX)
779 /* For a zero count with __builtin_return_address, we don't care what
780 frame address we return, because target-specific definitions will
781 override us. Therefore frame pointer elimination is OK, and using
782 the soft frame pointer is OK.
784 For a nonzero count, or a zero count with __builtin_frame_address,
785 we require a stable offset from the current frame pointer to the
786 previous one, so we must use the hard frame pointer, and
787 we must disable frame pointer elimination. */
788 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
789 tem = frame_pointer_rtx;
790 else
792 tem = hard_frame_pointer_rtx;
794 /* Tell reload not to eliminate the frame pointer. */
795 crtl->accesses_prior_frames = 1;
799 if (count > 0)
800 SETUP_FRAME_ADDRESSES ();
802 /* On the SPARC, the return address is not in the frame, it is in a
803 register. There is no way to access it off of the current frame
804 pointer, but it can be accessed off the previous frame pointer by
805 reading the value from the register window save area. */
806 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
807 count--;
809 /* Scan back COUNT frames to the specified frame. */
810 for (i = 0; i < count; i++)
812 /* Assume the dynamic chain pointer is in the word that the
813 frame address points to, unless otherwise specified. */
814 tem = DYNAMIC_CHAIN_ADDRESS (tem);
815 tem = memory_address (Pmode, tem);
816 tem = gen_frame_mem (Pmode, tem);
817 tem = copy_to_reg (tem);
820 /* For __builtin_frame_address, return what we've got. But, on
821 the SPARC for example, we may have to add a bias. */
822 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
823 return FRAME_ADDR_RTX (tem);
825 /* For __builtin_return_address, get the return address from that frame. */
826 #ifdef RETURN_ADDR_RTX
827 tem = RETURN_ADDR_RTX (count, tem);
828 #else
829 tem = memory_address (Pmode,
830 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
831 tem = gen_frame_mem (Pmode, tem);
832 #endif
833 return tem;
836 /* Alias set used for setjmp buffer. */
837 static alias_set_type setjmp_alias_set = -1;
839 /* Construct the leading half of a __builtin_setjmp call. Control will
840 return to RECEIVER_LABEL. This is also called directly by the SJLJ
841 exception handling code. */
843 void
844 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
846 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
847 rtx stack_save;
848 rtx mem;
850 if (setjmp_alias_set == -1)
851 setjmp_alias_set = new_alias_set ();
853 buf_addr = convert_memory_address (Pmode, buf_addr);
855 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
857 /* We store the frame pointer and the address of receiver_label in
858 the buffer and use the rest of it for the stack save area, which
859 is machine-dependent. */
861 mem = gen_rtx_MEM (Pmode, buf_addr);
862 set_mem_alias_set (mem, setjmp_alias_set);
863 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
865 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
866 GET_MODE_SIZE (Pmode))),
867 set_mem_alias_set (mem, setjmp_alias_set);
869 emit_move_insn (validize_mem (mem),
870 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
872 stack_save = gen_rtx_MEM (sa_mode,
873 plus_constant (Pmode, buf_addr,
874 2 * GET_MODE_SIZE (Pmode)));
875 set_mem_alias_set (stack_save, setjmp_alias_set);
876 emit_stack_save (SAVE_NONLOCAL, &stack_save);
878 /* If there is further processing to do, do it. */
879 if (targetm.have_builtin_setjmp_setup ())
880 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
882 /* We have a nonlocal label. */
883 cfun->has_nonlocal_label = 1;
886 /* Construct the trailing part of a __builtin_setjmp call. This is
887 also called directly by the SJLJ exception handling code.
888 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
890 void
891 expand_builtin_setjmp_receiver (rtx receiver_label)
893 rtx chain;
895 /* Mark the FP as used when we get here, so we have to make sure it's
896 marked as used by this function. */
897 emit_use (hard_frame_pointer_rtx);
899 /* Mark the static chain as clobbered here so life information
900 doesn't get messed up for it. */
901 chain = rtx_for_static_chain (current_function_decl, true);
902 if (chain && REG_P (chain))
903 emit_clobber (chain);
905 /* Now put in the code to restore the frame pointer, and argument
906 pointer, if needed. */
907 if (! targetm.have_nonlocal_goto ())
909 /* First adjust our frame pointer to its actual value. It was
910 previously set to the start of the virtual area corresponding to
911 the stacked variables when we branched here and now needs to be
912 adjusted to the actual hardware fp value.
914 Assignments to virtual registers are converted by
915 instantiate_virtual_regs into the corresponding assignment
916 to the underlying register (fp in this case) that makes
917 the original assignment true.
918 So the following insn will actually be decrementing fp by
919 TARGET_STARTING_FRAME_OFFSET. */
920 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
922 /* Restoring the frame pointer also modifies the hard frame pointer.
923 Mark it used (so that the previous assignment remains live once
924 the frame pointer is eliminated) and clobbered (to represent the
925 implicit update from the assignment). */
926 emit_use (hard_frame_pointer_rtx);
927 emit_clobber (hard_frame_pointer_rtx);
930 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
932 /* If the argument pointer can be eliminated in favor of the
933 frame pointer, we don't need to restore it. We assume here
934 that if such an elimination is present, it can always be used.
935 This is the case on all known machines; if we don't make this
936 assumption, we do unnecessary saving on many machines. */
937 size_t i;
938 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
940 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
941 if (elim_regs[i].from == ARG_POINTER_REGNUM
942 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
943 break;
945 if (i == ARRAY_SIZE (elim_regs))
947 /* Now restore our arg pointer from the address at which it
948 was saved in our stack frame. */
949 emit_move_insn (crtl->args.internal_arg_pointer,
950 copy_to_reg (get_arg_pointer_save_area ()));
954 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
955 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
956 else if (targetm.have_nonlocal_goto_receiver ())
957 emit_insn (targetm.gen_nonlocal_goto_receiver ());
958 else
959 { /* Nothing */ }
961 /* We must not allow the code we just generated to be reordered by
962 scheduling. Specifically, the update of the frame pointer must
963 happen immediately, not later. */
964 emit_insn (gen_blockage ());
967 /* __builtin_longjmp is passed a pointer to an array of five words (not
968 all will be used on all machines). It operates similarly to the C
969 library function of the same name, but is more efficient. Much of
970 the code below is copied from the handling of non-local gotos. */
972 static void
973 expand_builtin_longjmp (rtx buf_addr, rtx value)
975 rtx fp, lab, stack;
976 rtx_insn *insn, *last;
977 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
979 /* DRAP is needed for stack realign if longjmp is expanded to current
980 function */
981 if (SUPPORTS_STACK_ALIGNMENT)
982 crtl->need_drap = true;
984 if (setjmp_alias_set == -1)
985 setjmp_alias_set = new_alias_set ();
987 buf_addr = convert_memory_address (Pmode, buf_addr);
989 buf_addr = force_reg (Pmode, buf_addr);
991 /* We require that the user must pass a second argument of 1, because
992 that is what builtin_setjmp will return. */
993 gcc_assert (value == const1_rtx);
995 last = get_last_insn ();
996 if (targetm.have_builtin_longjmp ())
997 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
998 else
1000 fp = gen_rtx_MEM (Pmode, buf_addr);
1001 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1002 GET_MODE_SIZE (Pmode)));
1004 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1005 2 * GET_MODE_SIZE (Pmode)));
1006 set_mem_alias_set (fp, setjmp_alias_set);
1007 set_mem_alias_set (lab, setjmp_alias_set);
1008 set_mem_alias_set (stack, setjmp_alias_set);
1010 /* Pick up FP, label, and SP from the block and jump. This code is
1011 from expand_goto in stmt.c; see there for detailed comments. */
1012 if (targetm.have_nonlocal_goto ())
1013 /* We have to pass a value to the nonlocal_goto pattern that will
1014 get copied into the static_chain pointer, but it does not matter
1015 what that value is, because builtin_setjmp does not use it. */
1016 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1017 else
1019 lab = copy_to_reg (lab);
1021 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1022 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1024 emit_move_insn (hard_frame_pointer_rtx, fp);
1025 emit_stack_restore (SAVE_NONLOCAL, stack);
1027 emit_use (hard_frame_pointer_rtx);
1028 emit_use (stack_pointer_rtx);
1029 emit_indirect_jump (lab);
1033 /* Search backwards and mark the jump insn as a non-local goto.
1034 Note that this precludes the use of __builtin_longjmp to a
1035 __builtin_setjmp target in the same function. However, we've
1036 already cautioned the user that these functions are for
1037 internal exception handling use only. */
1038 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1040 gcc_assert (insn != last);
1042 if (JUMP_P (insn))
1044 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1045 break;
1047 else if (CALL_P (insn))
1048 break;
1052 static inline bool
1053 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1055 return (iter->i < iter->n);
1058 /* This function validates the types of a function call argument list
1059 against a specified list of tree_codes. If the last specifier is a 0,
1060 that represents an ellipsis, otherwise the last specifier must be a
1061 VOID_TYPE. */
1063 static bool
1064 validate_arglist (const_tree callexpr, ...)
1066 enum tree_code code;
1067 bool res = 0;
1068 va_list ap;
1069 const_call_expr_arg_iterator iter;
1070 const_tree arg;
1072 va_start (ap, callexpr);
1073 init_const_call_expr_arg_iterator (callexpr, &iter);
1075 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1076 tree fn = CALL_EXPR_FN (callexpr);
1077 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1079 for (unsigned argno = 1; ; ++argno)
1081 code = (enum tree_code) va_arg (ap, int);
1083 switch (code)
1085 case 0:
1086 /* This signifies an ellipses, any further arguments are all ok. */
1087 res = true;
1088 goto end;
1089 case VOID_TYPE:
1090 /* This signifies an endlink, if no arguments remain, return
1091 true, otherwise return false. */
1092 res = !more_const_call_expr_args_p (&iter);
1093 goto end;
1094 case POINTER_TYPE:
1095 /* The actual argument must be nonnull when either the whole
1096 called function has been declared nonnull, or when the formal
1097 argument corresponding to the actual argument has been. */
1098 if (argmap
1099 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1101 arg = next_const_call_expr_arg (&iter);
1102 if (!validate_arg (arg, code) || integer_zerop (arg))
1103 goto end;
1104 break;
1106 /* FALLTHRU */
1107 default:
1108 /* If no parameters remain or the parameter's code does not
1109 match the specified code, return false. Otherwise continue
1110 checking any remaining arguments. */
1111 arg = next_const_call_expr_arg (&iter);
1112 if (!validate_arg (arg, code))
1113 goto end;
1114 break;
1118 /* We need gotos here since we can only have one VA_CLOSE in a
1119 function. */
1120 end: ;
1121 va_end (ap);
1123 BITMAP_FREE (argmap);
1125 return res;
1128 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1129 and the address of the save area. */
1131 static rtx
1132 expand_builtin_nonlocal_goto (tree exp)
1134 tree t_label, t_save_area;
1135 rtx r_label, r_save_area, r_fp, r_sp;
1136 rtx_insn *insn;
1138 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1139 return NULL_RTX;
1141 t_label = CALL_EXPR_ARG (exp, 0);
1142 t_save_area = CALL_EXPR_ARG (exp, 1);
1144 r_label = expand_normal (t_label);
1145 r_label = convert_memory_address (Pmode, r_label);
1146 r_save_area = expand_normal (t_save_area);
1147 r_save_area = convert_memory_address (Pmode, r_save_area);
1148 /* Copy the address of the save location to a register just in case it was
1149 based on the frame pointer. */
1150 r_save_area = copy_to_reg (r_save_area);
1151 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1152 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1153 plus_constant (Pmode, r_save_area,
1154 GET_MODE_SIZE (Pmode)));
1156 crtl->has_nonlocal_goto = 1;
1158 /* ??? We no longer need to pass the static chain value, afaik. */
1159 if (targetm.have_nonlocal_goto ())
1160 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1161 else
1163 r_label = copy_to_reg (r_label);
1165 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1166 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1168 /* Restore frame pointer for containing function. */
1169 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1170 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1172 /* USE of hard_frame_pointer_rtx added for consistency;
1173 not clear if really needed. */
1174 emit_use (hard_frame_pointer_rtx);
1175 emit_use (stack_pointer_rtx);
1177 /* If the architecture is using a GP register, we must
1178 conservatively assume that the target function makes use of it.
1179 The prologue of functions with nonlocal gotos must therefore
1180 initialize the GP register to the appropriate value, and we
1181 must then make sure that this value is live at the point
1182 of the jump. (Note that this doesn't necessarily apply
1183 to targets with a nonlocal_goto pattern; they are free
1184 to implement it in their own way. Note also that this is
1185 a no-op if the GP register is a global invariant.) */
1186 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1187 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1188 emit_use (pic_offset_table_rtx);
1190 emit_indirect_jump (r_label);
1193 /* Search backwards to the jump insn and mark it as a
1194 non-local goto. */
1195 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1197 if (JUMP_P (insn))
1199 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1200 break;
1202 else if (CALL_P (insn))
1203 break;
1206 return const0_rtx;
1209 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1210 (not all will be used on all machines) that was passed to __builtin_setjmp.
1211 It updates the stack pointer in that block to the current value. This is
1212 also called directly by the SJLJ exception handling code. */
1214 void
1215 expand_builtin_update_setjmp_buf (rtx buf_addr)
1217 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1218 buf_addr = convert_memory_address (Pmode, buf_addr);
1219 rtx stack_save
1220 = gen_rtx_MEM (sa_mode,
1221 memory_address
1222 (sa_mode,
1223 plus_constant (Pmode, buf_addr,
1224 2 * GET_MODE_SIZE (Pmode))));
1226 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1229 /* Expand a call to __builtin_prefetch. For a target that does not support
1230 data prefetch, evaluate the memory address argument in case it has side
1231 effects. */
1233 static void
1234 expand_builtin_prefetch (tree exp)
1236 tree arg0, arg1, arg2;
1237 int nargs;
1238 rtx op0, op1, op2;
1240 if (!validate_arglist (exp, POINTER_TYPE, 0))
1241 return;
1243 arg0 = CALL_EXPR_ARG (exp, 0);
1245 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1246 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1247 locality). */
1248 nargs = call_expr_nargs (exp);
1249 if (nargs > 1)
1250 arg1 = CALL_EXPR_ARG (exp, 1);
1251 else
1252 arg1 = integer_zero_node;
1253 if (nargs > 2)
1254 arg2 = CALL_EXPR_ARG (exp, 2);
1255 else
1256 arg2 = integer_three_node;
1258 /* Argument 0 is an address. */
1259 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1261 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1262 if (TREE_CODE (arg1) != INTEGER_CST)
1264 error ("second argument to %<__builtin_prefetch%> must be a constant");
1265 arg1 = integer_zero_node;
1267 op1 = expand_normal (arg1);
1268 /* Argument 1 must be either zero or one. */
1269 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1271 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1272 " using zero");
1273 op1 = const0_rtx;
1276 /* Argument 2 (locality) must be a compile-time constant int. */
1277 if (TREE_CODE (arg2) != INTEGER_CST)
1279 error ("third argument to %<__builtin_prefetch%> must be a constant");
1280 arg2 = integer_zero_node;
1282 op2 = expand_normal (arg2);
1283 /* Argument 2 must be 0, 1, 2, or 3. */
1284 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1286 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1287 op2 = const0_rtx;
1290 if (targetm.have_prefetch ())
1292 struct expand_operand ops[3];
1294 create_address_operand (&ops[0], op0);
1295 create_integer_operand (&ops[1], INTVAL (op1));
1296 create_integer_operand (&ops[2], INTVAL (op2));
1297 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1298 return;
1301 /* Don't do anything with direct references to volatile memory, but
1302 generate code to handle other side effects. */
1303 if (!MEM_P (op0) && side_effects_p (op0))
1304 emit_insn (op0);
1307 /* Get a MEM rtx for expression EXP which is the address of an operand
1308 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1309 the maximum length of the block of memory that might be accessed or
1310 NULL if unknown. */
1312 static rtx
1313 get_memory_rtx (tree exp, tree len)
1315 tree orig_exp = exp;
1316 rtx addr, mem;
1318 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1319 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1320 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1321 exp = TREE_OPERAND (exp, 0);
1323 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1324 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1326 /* Get an expression we can use to find the attributes to assign to MEM.
1327 First remove any nops. */
1328 while (CONVERT_EXPR_P (exp)
1329 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1330 exp = TREE_OPERAND (exp, 0);
1332 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1333 (as builtin stringops may alias with anything). */
1334 exp = fold_build2 (MEM_REF,
1335 build_array_type (char_type_node,
1336 build_range_type (sizetype,
1337 size_one_node, len)),
1338 exp, build_int_cst (ptr_type_node, 0));
1340 /* If the MEM_REF has no acceptable address, try to get the base object
1341 from the original address we got, and build an all-aliasing
1342 unknown-sized access to that one. */
1343 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1344 set_mem_attributes (mem, exp, 0);
1345 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1346 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1347 0))))
1349 exp = build_fold_addr_expr (exp);
1350 exp = fold_build2 (MEM_REF,
1351 build_array_type (char_type_node,
1352 build_range_type (sizetype,
1353 size_zero_node,
1354 NULL)),
1355 exp, build_int_cst (ptr_type_node, 0));
1356 set_mem_attributes (mem, exp, 0);
1358 set_mem_alias_set (mem, 0);
1359 return mem;
1362 /* Built-in functions to perform an untyped call and return. */
1364 #define apply_args_mode \
1365 (this_target_builtins->x_apply_args_mode)
1366 #define apply_result_mode \
1367 (this_target_builtins->x_apply_result_mode)
1369 /* Return the size required for the block returned by __builtin_apply_args,
1370 and initialize apply_args_mode. */
1372 static int
1373 apply_args_size (void)
1375 static int size = -1;
1376 int align;
1377 unsigned int regno;
1379 /* The values computed by this function never change. */
1380 if (size < 0)
1382 /* The first value is the incoming arg-pointer. */
1383 size = GET_MODE_SIZE (Pmode);
1385 /* The second value is the structure value address unless this is
1386 passed as an "invisible" first argument. */
1387 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1388 size += GET_MODE_SIZE (Pmode);
1390 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1391 if (FUNCTION_ARG_REGNO_P (regno))
1393 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1395 gcc_assert (mode != VOIDmode);
1397 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1398 if (size % align != 0)
1399 size = CEIL (size, align) * align;
1400 size += GET_MODE_SIZE (mode);
1401 apply_args_mode[regno] = mode;
1403 else
1405 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1408 return size;
1411 /* Return the size required for the block returned by __builtin_apply,
1412 and initialize apply_result_mode. */
1414 static int
1415 apply_result_size (void)
1417 static int size = -1;
1418 int align, regno;
1420 /* The values computed by this function never change. */
1421 if (size < 0)
1423 size = 0;
1425 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1426 if (targetm.calls.function_value_regno_p (regno))
1428 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1430 gcc_assert (mode != VOIDmode);
1432 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1433 if (size % align != 0)
1434 size = CEIL (size, align) * align;
1435 size += GET_MODE_SIZE (mode);
1436 apply_result_mode[regno] = mode;
1438 else
1439 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1441 /* Allow targets that use untyped_call and untyped_return to override
1442 the size so that machine-specific information can be stored here. */
1443 #ifdef APPLY_RESULT_SIZE
1444 size = APPLY_RESULT_SIZE;
1445 #endif
1447 return size;
1450 /* Create a vector describing the result block RESULT. If SAVEP is true,
1451 the result block is used to save the values; otherwise it is used to
1452 restore the values. */
1454 static rtx
1455 result_vector (int savep, rtx result)
1457 int regno, size, align, nelts;
1458 fixed_size_mode mode;
1459 rtx reg, mem;
1460 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1462 size = nelts = 0;
1463 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1464 if ((mode = apply_result_mode[regno]) != VOIDmode)
1466 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1467 if (size % align != 0)
1468 size = CEIL (size, align) * align;
1469 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1470 mem = adjust_address (result, mode, size);
1471 savevec[nelts++] = (savep
1472 ? gen_rtx_SET (mem, reg)
1473 : gen_rtx_SET (reg, mem));
1474 size += GET_MODE_SIZE (mode);
1476 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1479 /* Save the state required to perform an untyped call with the same
1480 arguments as were passed to the current function. */
1482 static rtx
1483 expand_builtin_apply_args_1 (void)
1485 rtx registers, tem;
1486 int size, align, regno;
1487 fixed_size_mode mode;
1488 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1490 /* Create a block where the arg-pointer, structure value address,
1491 and argument registers can be saved. */
1492 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1494 /* Walk past the arg-pointer and structure value address. */
1495 size = GET_MODE_SIZE (Pmode);
1496 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1497 size += GET_MODE_SIZE (Pmode);
1499 /* Save each register used in calling a function to the block. */
1500 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1501 if ((mode = apply_args_mode[regno]) != VOIDmode)
1503 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1504 if (size % align != 0)
1505 size = CEIL (size, align) * align;
1507 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1509 emit_move_insn (adjust_address (registers, mode, size), tem);
1510 size += GET_MODE_SIZE (mode);
1513 /* Save the arg pointer to the block. */
1514 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1515 /* We need the pointer as the caller actually passed them to us, not
1516 as we might have pretended they were passed. Make sure it's a valid
1517 operand, as emit_move_insn isn't expected to handle a PLUS. */
1518 if (STACK_GROWS_DOWNWARD)
1520 = force_operand (plus_constant (Pmode, tem,
1521 crtl->args.pretend_args_size),
1522 NULL_RTX);
1523 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1525 size = GET_MODE_SIZE (Pmode);
1527 /* Save the structure value address unless this is passed as an
1528 "invisible" first argument. */
1529 if (struct_incoming_value)
1531 emit_move_insn (adjust_address (registers, Pmode, size),
1532 copy_to_reg (struct_incoming_value));
1533 size += GET_MODE_SIZE (Pmode);
1536 /* Return the address of the block. */
1537 return copy_addr_to_reg (XEXP (registers, 0));
1540 /* __builtin_apply_args returns block of memory allocated on
1541 the stack into which is stored the arg pointer, structure
1542 value address, static chain, and all the registers that might
1543 possibly be used in performing a function call. The code is
1544 moved to the start of the function so the incoming values are
1545 saved. */
1547 static rtx
1548 expand_builtin_apply_args (void)
1550 /* Don't do __builtin_apply_args more than once in a function.
1551 Save the result of the first call and reuse it. */
1552 if (apply_args_value != 0)
1553 return apply_args_value;
1555 /* When this function is called, it means that registers must be
1556 saved on entry to this function. So we migrate the
1557 call to the first insn of this function. */
1558 rtx temp;
1560 start_sequence ();
1561 temp = expand_builtin_apply_args_1 ();
1562 rtx_insn *seq = get_insns ();
1563 end_sequence ();
1565 apply_args_value = temp;
1567 /* Put the insns after the NOTE that starts the function.
1568 If this is inside a start_sequence, make the outer-level insn
1569 chain current, so the code is placed at the start of the
1570 function. If internal_arg_pointer is a non-virtual pseudo,
1571 it needs to be placed after the function that initializes
1572 that pseudo. */
1573 push_topmost_sequence ();
1574 if (REG_P (crtl->args.internal_arg_pointer)
1575 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1576 emit_insn_before (seq, parm_birth_insn);
1577 else
1578 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1579 pop_topmost_sequence ();
1580 return temp;
1584 /* Perform an untyped call and save the state required to perform an
1585 untyped return of whatever value was returned by the given function. */
1587 static rtx
1588 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1590 int size, align, regno;
1591 fixed_size_mode mode;
1592 rtx incoming_args, result, reg, dest, src;
1593 rtx_call_insn *call_insn;
1594 rtx old_stack_level = 0;
1595 rtx call_fusage = 0;
1596 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1598 arguments = convert_memory_address (Pmode, arguments);
1600 /* Create a block where the return registers can be saved. */
1601 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1603 /* Fetch the arg pointer from the ARGUMENTS block. */
1604 incoming_args = gen_reg_rtx (Pmode);
1605 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1606 if (!STACK_GROWS_DOWNWARD)
1607 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1608 incoming_args, 0, OPTAB_LIB_WIDEN);
1610 /* Push a new argument block and copy the arguments. Do not allow
1611 the (potential) memcpy call below to interfere with our stack
1612 manipulations. */
1613 do_pending_stack_adjust ();
1614 NO_DEFER_POP;
1616 /* Save the stack with nonlocal if available. */
1617 if (targetm.have_save_stack_nonlocal ())
1618 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1619 else
1620 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1622 /* Allocate a block of memory onto the stack and copy the memory
1623 arguments to the outgoing arguments address. We can pass TRUE
1624 as the 4th argument because we just saved the stack pointer
1625 and will restore it right after the call. */
1626 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1628 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1629 may have already set current_function_calls_alloca to true.
1630 current_function_calls_alloca won't be set if argsize is zero,
1631 so we have to guarantee need_drap is true here. */
1632 if (SUPPORTS_STACK_ALIGNMENT)
1633 crtl->need_drap = true;
1635 dest = virtual_outgoing_args_rtx;
1636 if (!STACK_GROWS_DOWNWARD)
1638 if (CONST_INT_P (argsize))
1639 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1640 else
1641 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1643 dest = gen_rtx_MEM (BLKmode, dest);
1644 set_mem_align (dest, PARM_BOUNDARY);
1645 src = gen_rtx_MEM (BLKmode, incoming_args);
1646 set_mem_align (src, PARM_BOUNDARY);
1647 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1649 /* Refer to the argument block. */
1650 apply_args_size ();
1651 arguments = gen_rtx_MEM (BLKmode, arguments);
1652 set_mem_align (arguments, PARM_BOUNDARY);
1654 /* Walk past the arg-pointer and structure value address. */
1655 size = GET_MODE_SIZE (Pmode);
1656 if (struct_value)
1657 size += GET_MODE_SIZE (Pmode);
1659 /* Restore each of the registers previously saved. Make USE insns
1660 for each of these registers for use in making the call. */
1661 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1662 if ((mode = apply_args_mode[regno]) != VOIDmode)
1664 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1665 if (size % align != 0)
1666 size = CEIL (size, align) * align;
1667 reg = gen_rtx_REG (mode, regno);
1668 emit_move_insn (reg, adjust_address (arguments, mode, size));
1669 use_reg (&call_fusage, reg);
1670 size += GET_MODE_SIZE (mode);
1673 /* Restore the structure value address unless this is passed as an
1674 "invisible" first argument. */
1675 size = GET_MODE_SIZE (Pmode);
1676 if (struct_value)
1678 rtx value = gen_reg_rtx (Pmode);
1679 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1680 emit_move_insn (struct_value, value);
1681 if (REG_P (struct_value))
1682 use_reg (&call_fusage, struct_value);
1683 size += GET_MODE_SIZE (Pmode);
1686 /* All arguments and registers used for the call are set up by now! */
1687 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1689 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1690 and we don't want to load it into a register as an optimization,
1691 because prepare_call_address already did it if it should be done. */
1692 if (GET_CODE (function) != SYMBOL_REF)
1693 function = memory_address (FUNCTION_MODE, function);
1695 /* Generate the actual call instruction and save the return value. */
1696 if (targetm.have_untyped_call ())
1698 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1699 emit_call_insn (targetm.gen_untyped_call (mem, result,
1700 result_vector (1, result)));
1702 else if (targetm.have_call_value ())
1704 rtx valreg = 0;
1706 /* Locate the unique return register. It is not possible to
1707 express a call that sets more than one return register using
1708 call_value; use untyped_call for that. In fact, untyped_call
1709 only needs to save the return registers in the given block. */
1710 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1711 if ((mode = apply_result_mode[regno]) != VOIDmode)
1713 gcc_assert (!valreg); /* have_untyped_call required. */
1715 valreg = gen_rtx_REG (mode, regno);
1718 emit_insn (targetm.gen_call_value (valreg,
1719 gen_rtx_MEM (FUNCTION_MODE, function),
1720 const0_rtx, NULL_RTX, const0_rtx));
1722 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1724 else
1725 gcc_unreachable ();
1727 /* Find the CALL insn we just emitted, and attach the register usage
1728 information. */
1729 call_insn = last_call_insn ();
1730 add_function_usage_to (call_insn, call_fusage);
1732 /* Restore the stack. */
1733 if (targetm.have_save_stack_nonlocal ())
1734 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1735 else
1736 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1737 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1739 OK_DEFER_POP;
1741 /* Return the address of the result block. */
1742 result = copy_addr_to_reg (XEXP (result, 0));
1743 return convert_memory_address (ptr_mode, result);
1746 /* Perform an untyped return. */
1748 static void
1749 expand_builtin_return (rtx result)
1751 int size, align, regno;
1752 fixed_size_mode mode;
1753 rtx reg;
1754 rtx_insn *call_fusage = 0;
1756 result = convert_memory_address (Pmode, result);
1758 apply_result_size ();
1759 result = gen_rtx_MEM (BLKmode, result);
1761 if (targetm.have_untyped_return ())
1763 rtx vector = result_vector (0, result);
1764 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1765 emit_barrier ();
1766 return;
1769 /* Restore the return value and note that each value is used. */
1770 size = 0;
1771 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1772 if ((mode = apply_result_mode[regno]) != VOIDmode)
1774 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1775 if (size % align != 0)
1776 size = CEIL (size, align) * align;
1777 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1778 emit_move_insn (reg, adjust_address (result, mode, size));
1780 push_to_sequence (call_fusage);
1781 emit_use (reg);
1782 call_fusage = get_insns ();
1783 end_sequence ();
1784 size += GET_MODE_SIZE (mode);
1787 /* Put the USE insns before the return. */
1788 emit_insn (call_fusage);
1790 /* Return whatever values was restored by jumping directly to the end
1791 of the function. */
1792 expand_naked_return ();
1795 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1797 static enum type_class
1798 type_to_class (tree type)
1800 switch (TREE_CODE (type))
1802 case VOID_TYPE: return void_type_class;
1803 case INTEGER_TYPE: return integer_type_class;
1804 case ENUMERAL_TYPE: return enumeral_type_class;
1805 case BOOLEAN_TYPE: return boolean_type_class;
1806 case POINTER_TYPE: return pointer_type_class;
1807 case REFERENCE_TYPE: return reference_type_class;
1808 case OFFSET_TYPE: return offset_type_class;
1809 case REAL_TYPE: return real_type_class;
1810 case COMPLEX_TYPE: return complex_type_class;
1811 case FUNCTION_TYPE: return function_type_class;
1812 case METHOD_TYPE: return method_type_class;
1813 case RECORD_TYPE: return record_type_class;
1814 case UNION_TYPE:
1815 case QUAL_UNION_TYPE: return union_type_class;
1816 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1817 ? string_type_class : array_type_class);
1818 case LANG_TYPE: return lang_type_class;
1819 default: return no_type_class;
1823 /* Expand a call EXP to __builtin_classify_type. */
1825 static rtx
1826 expand_builtin_classify_type (tree exp)
1828 if (call_expr_nargs (exp))
1829 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1830 return GEN_INT (no_type_class);
1833 /* This helper macro, meant to be used in mathfn_built_in below, determines
1834 which among a set of builtin math functions is appropriate for a given type
1835 mode. The `F' (float) and `L' (long double) are automatically generated
1836 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1837 types, there are additional types that are considered with 'F32', 'F64',
1838 'F128', etc. suffixes. */
1839 #define CASE_MATHFN(MATHFN) \
1840 CASE_CFN_##MATHFN: \
1841 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1842 fcodel = BUILT_IN_##MATHFN##L ; break;
1843 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1844 types. */
1845 #define CASE_MATHFN_FLOATN(MATHFN) \
1846 CASE_CFN_##MATHFN: \
1847 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1848 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1849 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1850 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1851 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1852 break;
1853 /* Similar to above, but appends _R after any F/L suffix. */
1854 #define CASE_MATHFN_REENT(MATHFN) \
1855 case CFN_BUILT_IN_##MATHFN##_R: \
1856 case CFN_BUILT_IN_##MATHFN##F_R: \
1857 case CFN_BUILT_IN_##MATHFN##L_R: \
1858 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1859 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1861 /* Return a function equivalent to FN but operating on floating-point
1862 values of type TYPE, or END_BUILTINS if no such function exists.
1863 This is purely an operation on function codes; it does not guarantee
1864 that the target actually has an implementation of the function. */
1866 static built_in_function
1867 mathfn_built_in_2 (tree type, combined_fn fn)
1869 tree mtype;
1870 built_in_function fcode, fcodef, fcodel;
1871 built_in_function fcodef16 = END_BUILTINS;
1872 built_in_function fcodef32 = END_BUILTINS;
1873 built_in_function fcodef64 = END_BUILTINS;
1874 built_in_function fcodef128 = END_BUILTINS;
1875 built_in_function fcodef32x = END_BUILTINS;
1876 built_in_function fcodef64x = END_BUILTINS;
1877 built_in_function fcodef128x = END_BUILTINS;
1879 switch (fn)
1881 CASE_MATHFN (ACOS)
1882 CASE_MATHFN (ACOSH)
1883 CASE_MATHFN (ASIN)
1884 CASE_MATHFN (ASINH)
1885 CASE_MATHFN (ATAN)
1886 CASE_MATHFN (ATAN2)
1887 CASE_MATHFN (ATANH)
1888 CASE_MATHFN (CBRT)
1889 CASE_MATHFN_FLOATN (CEIL)
1890 CASE_MATHFN (CEXPI)
1891 CASE_MATHFN_FLOATN (COPYSIGN)
1892 CASE_MATHFN (COS)
1893 CASE_MATHFN (COSH)
1894 CASE_MATHFN (DREM)
1895 CASE_MATHFN (ERF)
1896 CASE_MATHFN (ERFC)
1897 CASE_MATHFN (EXP)
1898 CASE_MATHFN (EXP10)
1899 CASE_MATHFN (EXP2)
1900 CASE_MATHFN (EXPM1)
1901 CASE_MATHFN (FABS)
1902 CASE_MATHFN (FDIM)
1903 CASE_MATHFN_FLOATN (FLOOR)
1904 CASE_MATHFN_FLOATN (FMA)
1905 CASE_MATHFN_FLOATN (FMAX)
1906 CASE_MATHFN_FLOATN (FMIN)
1907 CASE_MATHFN (FMOD)
1908 CASE_MATHFN (FREXP)
1909 CASE_MATHFN (GAMMA)
1910 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1911 CASE_MATHFN (HUGE_VAL)
1912 CASE_MATHFN (HYPOT)
1913 CASE_MATHFN (ILOGB)
1914 CASE_MATHFN (ICEIL)
1915 CASE_MATHFN (IFLOOR)
1916 CASE_MATHFN (INF)
1917 CASE_MATHFN (IRINT)
1918 CASE_MATHFN (IROUND)
1919 CASE_MATHFN (ISINF)
1920 CASE_MATHFN (J0)
1921 CASE_MATHFN (J1)
1922 CASE_MATHFN (JN)
1923 CASE_MATHFN (LCEIL)
1924 CASE_MATHFN (LDEXP)
1925 CASE_MATHFN (LFLOOR)
1926 CASE_MATHFN (LGAMMA)
1927 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1928 CASE_MATHFN (LLCEIL)
1929 CASE_MATHFN (LLFLOOR)
1930 CASE_MATHFN (LLRINT)
1931 CASE_MATHFN (LLROUND)
1932 CASE_MATHFN (LOG)
1933 CASE_MATHFN (LOG10)
1934 CASE_MATHFN (LOG1P)
1935 CASE_MATHFN (LOG2)
1936 CASE_MATHFN (LOGB)
1937 CASE_MATHFN (LRINT)
1938 CASE_MATHFN (LROUND)
1939 CASE_MATHFN (MODF)
1940 CASE_MATHFN (NAN)
1941 CASE_MATHFN (NANS)
1942 CASE_MATHFN_FLOATN (NEARBYINT)
1943 CASE_MATHFN (NEXTAFTER)
1944 CASE_MATHFN (NEXTTOWARD)
1945 CASE_MATHFN (POW)
1946 CASE_MATHFN (POWI)
1947 CASE_MATHFN (POW10)
1948 CASE_MATHFN (REMAINDER)
1949 CASE_MATHFN (REMQUO)
1950 CASE_MATHFN_FLOATN (RINT)
1951 CASE_MATHFN_FLOATN (ROUND)
1952 CASE_MATHFN (SCALB)
1953 CASE_MATHFN (SCALBLN)
1954 CASE_MATHFN (SCALBN)
1955 CASE_MATHFN (SIGNBIT)
1956 CASE_MATHFN (SIGNIFICAND)
1957 CASE_MATHFN (SIN)
1958 CASE_MATHFN (SINCOS)
1959 CASE_MATHFN (SINH)
1960 CASE_MATHFN_FLOATN (SQRT)
1961 CASE_MATHFN (TAN)
1962 CASE_MATHFN (TANH)
1963 CASE_MATHFN (TGAMMA)
1964 CASE_MATHFN_FLOATN (TRUNC)
1965 CASE_MATHFN (Y0)
1966 CASE_MATHFN (Y1)
1967 CASE_MATHFN (YN)
1969 default:
1970 return END_BUILTINS;
1973 mtype = TYPE_MAIN_VARIANT (type);
1974 if (mtype == double_type_node)
1975 return fcode;
1976 else if (mtype == float_type_node)
1977 return fcodef;
1978 else if (mtype == long_double_type_node)
1979 return fcodel;
1980 else if (mtype == float16_type_node)
1981 return fcodef16;
1982 else if (mtype == float32_type_node)
1983 return fcodef32;
1984 else if (mtype == float64_type_node)
1985 return fcodef64;
1986 else if (mtype == float128_type_node)
1987 return fcodef128;
1988 else if (mtype == float32x_type_node)
1989 return fcodef32x;
1990 else if (mtype == float64x_type_node)
1991 return fcodef64x;
1992 else if (mtype == float128x_type_node)
1993 return fcodef128x;
1994 else
1995 return END_BUILTINS;
1998 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1999 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2000 otherwise use the explicit declaration. If we can't do the conversion,
2001 return null. */
2003 static tree
2004 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2006 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2007 if (fcode2 == END_BUILTINS)
2008 return NULL_TREE;
2010 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2011 return NULL_TREE;
2013 return builtin_decl_explicit (fcode2);
2016 /* Like mathfn_built_in_1, but always use the implicit array. */
2018 tree
2019 mathfn_built_in (tree type, combined_fn fn)
2021 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2024 /* Like mathfn_built_in_1, but take a built_in_function and
2025 always use the implicit array. */
2027 tree
2028 mathfn_built_in (tree type, enum built_in_function fn)
2030 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2033 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2034 return its code, otherwise return IFN_LAST. Note that this function
2035 only tests whether the function is defined in internals.def, not whether
2036 it is actually available on the target. */
2038 internal_fn
2039 associated_internal_fn (tree fndecl)
2041 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2042 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2043 switch (DECL_FUNCTION_CODE (fndecl))
2045 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2046 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2047 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2048 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2049 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2050 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2051 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2052 #include "internal-fn.def"
2054 CASE_FLT_FN (BUILT_IN_POW10):
2055 return IFN_EXP10;
2057 CASE_FLT_FN (BUILT_IN_DREM):
2058 return IFN_REMAINDER;
2060 CASE_FLT_FN (BUILT_IN_SCALBN):
2061 CASE_FLT_FN (BUILT_IN_SCALBLN):
2062 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2063 return IFN_LDEXP;
2064 return IFN_LAST;
2066 default:
2067 return IFN_LAST;
2071 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2072 on the current target by a call to an internal function, return the
2073 code of that internal function, otherwise return IFN_LAST. The caller
2074 is responsible for ensuring that any side-effects of the built-in
2075 call are dealt with correctly. E.g. if CALL sets errno, the caller
2076 must decide that the errno result isn't needed or make it available
2077 in some other way. */
2079 internal_fn
2080 replacement_internal_fn (gcall *call)
2082 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2084 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2085 if (ifn != IFN_LAST)
2087 tree_pair types = direct_internal_fn_types (ifn, call);
2088 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2089 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2090 return ifn;
2093 return IFN_LAST;
2096 /* Expand a call to the builtin trinary math functions (fma).
2097 Return NULL_RTX if a normal call should be emitted rather than expanding the
2098 function in-line. EXP is the expression that is a call to the builtin
2099 function; if convenient, the result should be placed in TARGET.
2100 SUBTARGET may be used as the target for computing one of EXP's
2101 operands. */
2103 static rtx
2104 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2106 optab builtin_optab;
2107 rtx op0, op1, op2, result;
2108 rtx_insn *insns;
2109 tree fndecl = get_callee_fndecl (exp);
2110 tree arg0, arg1, arg2;
2111 machine_mode mode;
2113 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2114 return NULL_RTX;
2116 arg0 = CALL_EXPR_ARG (exp, 0);
2117 arg1 = CALL_EXPR_ARG (exp, 1);
2118 arg2 = CALL_EXPR_ARG (exp, 2);
2120 switch (DECL_FUNCTION_CODE (fndecl))
2122 CASE_FLT_FN (BUILT_IN_FMA):
2123 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2124 builtin_optab = fma_optab; break;
2125 default:
2126 gcc_unreachable ();
2129 /* Make a suitable register to place result in. */
2130 mode = TYPE_MODE (TREE_TYPE (exp));
2132 /* Before working hard, check whether the instruction is available. */
2133 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2134 return NULL_RTX;
2136 result = gen_reg_rtx (mode);
2138 /* Always stabilize the argument list. */
2139 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2140 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2141 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2143 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2144 op1 = expand_normal (arg1);
2145 op2 = expand_normal (arg2);
2147 start_sequence ();
2149 /* Compute into RESULT.
2150 Set RESULT to wherever the result comes back. */
2151 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2152 result, 0);
2154 /* If we were unable to expand via the builtin, stop the sequence
2155 (without outputting the insns) and call to the library function
2156 with the stabilized argument list. */
2157 if (result == 0)
2159 end_sequence ();
2160 return expand_call (exp, target, target == const0_rtx);
2163 /* Output the entire sequence. */
2164 insns = get_insns ();
2165 end_sequence ();
2166 emit_insn (insns);
2168 return result;
2171 /* Expand a call to the builtin sin and cos math functions.
2172 Return NULL_RTX if a normal call should be emitted rather than expanding the
2173 function in-line. EXP is the expression that is a call to the builtin
2174 function; if convenient, the result should be placed in TARGET.
2175 SUBTARGET may be used as the target for computing one of EXP's
2176 operands. */
2178 static rtx
2179 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2181 optab builtin_optab;
2182 rtx op0;
2183 rtx_insn *insns;
2184 tree fndecl = get_callee_fndecl (exp);
2185 machine_mode mode;
2186 tree arg;
2188 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2189 return NULL_RTX;
2191 arg = CALL_EXPR_ARG (exp, 0);
2193 switch (DECL_FUNCTION_CODE (fndecl))
2195 CASE_FLT_FN (BUILT_IN_SIN):
2196 CASE_FLT_FN (BUILT_IN_COS):
2197 builtin_optab = sincos_optab; break;
2198 default:
2199 gcc_unreachable ();
2202 /* Make a suitable register to place result in. */
2203 mode = TYPE_MODE (TREE_TYPE (exp));
2205 /* Check if sincos insn is available, otherwise fallback
2206 to sin or cos insn. */
2207 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2208 switch (DECL_FUNCTION_CODE (fndecl))
2210 CASE_FLT_FN (BUILT_IN_SIN):
2211 builtin_optab = sin_optab; break;
2212 CASE_FLT_FN (BUILT_IN_COS):
2213 builtin_optab = cos_optab; break;
2214 default:
2215 gcc_unreachable ();
2218 /* Before working hard, check whether the instruction is available. */
2219 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2221 rtx result = gen_reg_rtx (mode);
2223 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2224 need to expand the argument again. This way, we will not perform
2225 side-effects more the once. */
2226 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2228 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2230 start_sequence ();
2232 /* Compute into RESULT.
2233 Set RESULT to wherever the result comes back. */
2234 if (builtin_optab == sincos_optab)
2236 int ok;
2238 switch (DECL_FUNCTION_CODE (fndecl))
2240 CASE_FLT_FN (BUILT_IN_SIN):
2241 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2242 break;
2243 CASE_FLT_FN (BUILT_IN_COS):
2244 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2245 break;
2246 default:
2247 gcc_unreachable ();
2249 gcc_assert (ok);
2251 else
2252 result = expand_unop (mode, builtin_optab, op0, result, 0);
2254 if (result != 0)
2256 /* Output the entire sequence. */
2257 insns = get_insns ();
2258 end_sequence ();
2259 emit_insn (insns);
2260 return result;
2263 /* If we were unable to expand via the builtin, stop the sequence
2264 (without outputting the insns) and call to the library function
2265 with the stabilized argument list. */
2266 end_sequence ();
2269 return expand_call (exp, target, target == const0_rtx);
2272 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2273 return an RTL instruction code that implements the functionality.
2274 If that isn't possible or available return CODE_FOR_nothing. */
2276 static enum insn_code
2277 interclass_mathfn_icode (tree arg, tree fndecl)
2279 bool errno_set = false;
2280 optab builtin_optab = unknown_optab;
2281 machine_mode mode;
2283 switch (DECL_FUNCTION_CODE (fndecl))
2285 CASE_FLT_FN (BUILT_IN_ILOGB):
2286 errno_set = true; builtin_optab = ilogb_optab; break;
2287 CASE_FLT_FN (BUILT_IN_ISINF):
2288 builtin_optab = isinf_optab; break;
2289 case BUILT_IN_ISNORMAL:
2290 case BUILT_IN_ISFINITE:
2291 CASE_FLT_FN (BUILT_IN_FINITE):
2292 case BUILT_IN_FINITED32:
2293 case BUILT_IN_FINITED64:
2294 case BUILT_IN_FINITED128:
2295 case BUILT_IN_ISINFD32:
2296 case BUILT_IN_ISINFD64:
2297 case BUILT_IN_ISINFD128:
2298 /* These builtins have no optabs (yet). */
2299 break;
2300 default:
2301 gcc_unreachable ();
2304 /* There's no easy way to detect the case we need to set EDOM. */
2305 if (flag_errno_math && errno_set)
2306 return CODE_FOR_nothing;
2308 /* Optab mode depends on the mode of the input argument. */
2309 mode = TYPE_MODE (TREE_TYPE (arg));
2311 if (builtin_optab)
2312 return optab_handler (builtin_optab, mode);
2313 return CODE_FOR_nothing;
2316 /* Expand a call to one of the builtin math functions that operate on
2317 floating point argument and output an integer result (ilogb, isinf,
2318 isnan, etc).
2319 Return 0 if a normal call should be emitted rather than expanding the
2320 function in-line. EXP is the expression that is a call to the builtin
2321 function; if convenient, the result should be placed in TARGET. */
2323 static rtx
2324 expand_builtin_interclass_mathfn (tree exp, rtx target)
2326 enum insn_code icode = CODE_FOR_nothing;
2327 rtx op0;
2328 tree fndecl = get_callee_fndecl (exp);
2329 machine_mode mode;
2330 tree arg;
2332 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2333 return NULL_RTX;
2335 arg = CALL_EXPR_ARG (exp, 0);
2336 icode = interclass_mathfn_icode (arg, fndecl);
2337 mode = TYPE_MODE (TREE_TYPE (arg));
2339 if (icode != CODE_FOR_nothing)
2341 struct expand_operand ops[1];
2342 rtx_insn *last = get_last_insn ();
2343 tree orig_arg = arg;
2345 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2346 need to expand the argument again. This way, we will not perform
2347 side-effects more the once. */
2348 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2350 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2352 if (mode != GET_MODE (op0))
2353 op0 = convert_to_mode (mode, op0, 0);
2355 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2356 if (maybe_legitimize_operands (icode, 0, 1, ops)
2357 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2358 return ops[0].value;
2360 delete_insns_since (last);
2361 CALL_EXPR_ARG (exp, 0) = orig_arg;
2364 return NULL_RTX;
2367 /* Expand a call to the builtin sincos math function.
2368 Return NULL_RTX if a normal call should be emitted rather than expanding the
2369 function in-line. EXP is the expression that is a call to the builtin
2370 function. */
2372 static rtx
2373 expand_builtin_sincos (tree exp)
2375 rtx op0, op1, op2, target1, target2;
2376 machine_mode mode;
2377 tree arg, sinp, cosp;
2378 int result;
2379 location_t loc = EXPR_LOCATION (exp);
2380 tree alias_type, alias_off;
2382 if (!validate_arglist (exp, REAL_TYPE,
2383 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2384 return NULL_RTX;
2386 arg = CALL_EXPR_ARG (exp, 0);
2387 sinp = CALL_EXPR_ARG (exp, 1);
2388 cosp = CALL_EXPR_ARG (exp, 2);
2390 /* Make a suitable register to place result in. */
2391 mode = TYPE_MODE (TREE_TYPE (arg));
2393 /* Check if sincos insn is available, otherwise emit the call. */
2394 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2395 return NULL_RTX;
2397 target1 = gen_reg_rtx (mode);
2398 target2 = gen_reg_rtx (mode);
2400 op0 = expand_normal (arg);
2401 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2402 alias_off = build_int_cst (alias_type, 0);
2403 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2404 sinp, alias_off));
2405 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2406 cosp, alias_off));
2408 /* Compute into target1 and target2.
2409 Set TARGET to wherever the result comes back. */
2410 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2411 gcc_assert (result);
2413 /* Move target1 and target2 to the memory locations indicated
2414 by op1 and op2. */
2415 emit_move_insn (op1, target1);
2416 emit_move_insn (op2, target2);
2418 return const0_rtx;
2421 /* Expand a call to the internal cexpi builtin to the sincos math function.
2422 EXP is the expression that is a call to the builtin function; if convenient,
2423 the result should be placed in TARGET. */
2425 static rtx
2426 expand_builtin_cexpi (tree exp, rtx target)
2428 tree fndecl = get_callee_fndecl (exp);
2429 tree arg, type;
2430 machine_mode mode;
2431 rtx op0, op1, op2;
2432 location_t loc = EXPR_LOCATION (exp);
2434 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2435 return NULL_RTX;
2437 arg = CALL_EXPR_ARG (exp, 0);
2438 type = TREE_TYPE (arg);
2439 mode = TYPE_MODE (TREE_TYPE (arg));
2441 /* Try expanding via a sincos optab, fall back to emitting a libcall
2442 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2443 is only generated from sincos, cexp or if we have either of them. */
2444 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2446 op1 = gen_reg_rtx (mode);
2447 op2 = gen_reg_rtx (mode);
2449 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2451 /* Compute into op1 and op2. */
2452 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2454 else if (targetm.libc_has_function (function_sincos))
2456 tree call, fn = NULL_TREE;
2457 tree top1, top2;
2458 rtx op1a, op2a;
2460 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2461 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2462 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2463 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2464 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2465 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2466 else
2467 gcc_unreachable ();
2469 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2470 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2471 op1a = copy_addr_to_reg (XEXP (op1, 0));
2472 op2a = copy_addr_to_reg (XEXP (op2, 0));
2473 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2474 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2476 /* Make sure not to fold the sincos call again. */
2477 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2478 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2479 call, 3, arg, top1, top2));
2481 else
2483 tree call, fn = NULL_TREE, narg;
2484 tree ctype = build_complex_type (type);
2486 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2487 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2488 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2489 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2490 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2491 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2492 else
2493 gcc_unreachable ();
2495 /* If we don't have a decl for cexp create one. This is the
2496 friendliest fallback if the user calls __builtin_cexpi
2497 without full target C99 function support. */
2498 if (fn == NULL_TREE)
2500 tree fntype;
2501 const char *name = NULL;
2503 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2504 name = "cexpf";
2505 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2506 name = "cexp";
2507 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2508 name = "cexpl";
2510 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2511 fn = build_fn_decl (name, fntype);
2514 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2515 build_real (type, dconst0), arg);
2517 /* Make sure not to fold the cexp call again. */
2518 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2519 return expand_expr (build_call_nary (ctype, call, 1, narg),
2520 target, VOIDmode, EXPAND_NORMAL);
2523 /* Now build the proper return type. */
2524 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2525 make_tree (TREE_TYPE (arg), op2),
2526 make_tree (TREE_TYPE (arg), op1)),
2527 target, VOIDmode, EXPAND_NORMAL);
2530 /* Conveniently construct a function call expression. FNDECL names the
2531 function to be called, N is the number of arguments, and the "..."
2532 parameters are the argument expressions. Unlike build_call_exr
2533 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2535 static tree
2536 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2538 va_list ap;
2539 tree fntype = TREE_TYPE (fndecl);
2540 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2542 va_start (ap, n);
2543 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2544 va_end (ap);
2545 SET_EXPR_LOCATION (fn, loc);
2546 return fn;
2549 /* Expand a call to one of the builtin rounding functions gcc defines
2550 as an extension (lfloor and lceil). As these are gcc extensions we
2551 do not need to worry about setting errno to EDOM.
2552 If expanding via optab fails, lower expression to (int)(floor(x)).
2553 EXP is the expression that is a call to the builtin function;
2554 if convenient, the result should be placed in TARGET. */
2556 static rtx
2557 expand_builtin_int_roundingfn (tree exp, rtx target)
2559 convert_optab builtin_optab;
2560 rtx op0, tmp;
2561 rtx_insn *insns;
2562 tree fndecl = get_callee_fndecl (exp);
2563 enum built_in_function fallback_fn;
2564 tree fallback_fndecl;
2565 machine_mode mode;
2566 tree arg;
2568 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2569 gcc_unreachable ();
2571 arg = CALL_EXPR_ARG (exp, 0);
2573 switch (DECL_FUNCTION_CODE (fndecl))
2575 CASE_FLT_FN (BUILT_IN_ICEIL):
2576 CASE_FLT_FN (BUILT_IN_LCEIL):
2577 CASE_FLT_FN (BUILT_IN_LLCEIL):
2578 builtin_optab = lceil_optab;
2579 fallback_fn = BUILT_IN_CEIL;
2580 break;
2582 CASE_FLT_FN (BUILT_IN_IFLOOR):
2583 CASE_FLT_FN (BUILT_IN_LFLOOR):
2584 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2585 builtin_optab = lfloor_optab;
2586 fallback_fn = BUILT_IN_FLOOR;
2587 break;
2589 default:
2590 gcc_unreachable ();
2593 /* Make a suitable register to place result in. */
2594 mode = TYPE_MODE (TREE_TYPE (exp));
2596 target = gen_reg_rtx (mode);
2598 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2599 need to expand the argument again. This way, we will not perform
2600 side-effects more the once. */
2601 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2603 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2605 start_sequence ();
2607 /* Compute into TARGET. */
2608 if (expand_sfix_optab (target, op0, builtin_optab))
2610 /* Output the entire sequence. */
2611 insns = get_insns ();
2612 end_sequence ();
2613 emit_insn (insns);
2614 return target;
2617 /* If we were unable to expand via the builtin, stop the sequence
2618 (without outputting the insns). */
2619 end_sequence ();
2621 /* Fall back to floating point rounding optab. */
2622 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2624 /* For non-C99 targets we may end up without a fallback fndecl here
2625 if the user called __builtin_lfloor directly. In this case emit
2626 a call to the floor/ceil variants nevertheless. This should result
2627 in the best user experience for not full C99 targets. */
2628 if (fallback_fndecl == NULL_TREE)
2630 tree fntype;
2631 const char *name = NULL;
2633 switch (DECL_FUNCTION_CODE (fndecl))
2635 case BUILT_IN_ICEIL:
2636 case BUILT_IN_LCEIL:
2637 case BUILT_IN_LLCEIL:
2638 name = "ceil";
2639 break;
2640 case BUILT_IN_ICEILF:
2641 case BUILT_IN_LCEILF:
2642 case BUILT_IN_LLCEILF:
2643 name = "ceilf";
2644 break;
2645 case BUILT_IN_ICEILL:
2646 case BUILT_IN_LCEILL:
2647 case BUILT_IN_LLCEILL:
2648 name = "ceill";
2649 break;
2650 case BUILT_IN_IFLOOR:
2651 case BUILT_IN_LFLOOR:
2652 case BUILT_IN_LLFLOOR:
2653 name = "floor";
2654 break;
2655 case BUILT_IN_IFLOORF:
2656 case BUILT_IN_LFLOORF:
2657 case BUILT_IN_LLFLOORF:
2658 name = "floorf";
2659 break;
2660 case BUILT_IN_IFLOORL:
2661 case BUILT_IN_LFLOORL:
2662 case BUILT_IN_LLFLOORL:
2663 name = "floorl";
2664 break;
2665 default:
2666 gcc_unreachable ();
2669 fntype = build_function_type_list (TREE_TYPE (arg),
2670 TREE_TYPE (arg), NULL_TREE);
2671 fallback_fndecl = build_fn_decl (name, fntype);
2674 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2676 tmp = expand_normal (exp);
2677 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2679 /* Truncate the result of floating point optab to integer
2680 via expand_fix (). */
2681 target = gen_reg_rtx (mode);
2682 expand_fix (target, tmp, 0);
2684 return target;
2687 /* Expand a call to one of the builtin math functions doing integer
2688 conversion (lrint).
2689 Return 0 if a normal call should be emitted rather than expanding the
2690 function in-line. EXP is the expression that is a call to the builtin
2691 function; if convenient, the result should be placed in TARGET. */
2693 static rtx
2694 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2696 convert_optab builtin_optab;
2697 rtx op0;
2698 rtx_insn *insns;
2699 tree fndecl = get_callee_fndecl (exp);
2700 tree arg;
2701 machine_mode mode;
2702 enum built_in_function fallback_fn = BUILT_IN_NONE;
2704 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2705 gcc_unreachable ();
2707 arg = CALL_EXPR_ARG (exp, 0);
2709 switch (DECL_FUNCTION_CODE (fndecl))
2711 CASE_FLT_FN (BUILT_IN_IRINT):
2712 fallback_fn = BUILT_IN_LRINT;
2713 gcc_fallthrough ();
2714 CASE_FLT_FN (BUILT_IN_LRINT):
2715 CASE_FLT_FN (BUILT_IN_LLRINT):
2716 builtin_optab = lrint_optab;
2717 break;
2719 CASE_FLT_FN (BUILT_IN_IROUND):
2720 fallback_fn = BUILT_IN_LROUND;
2721 gcc_fallthrough ();
2722 CASE_FLT_FN (BUILT_IN_LROUND):
2723 CASE_FLT_FN (BUILT_IN_LLROUND):
2724 builtin_optab = lround_optab;
2725 break;
2727 default:
2728 gcc_unreachable ();
2731 /* There's no easy way to detect the case we need to set EDOM. */
2732 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2733 return NULL_RTX;
2735 /* Make a suitable register to place result in. */
2736 mode = TYPE_MODE (TREE_TYPE (exp));
2738 /* There's no easy way to detect the case we need to set EDOM. */
2739 if (!flag_errno_math)
2741 rtx result = gen_reg_rtx (mode);
2743 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2744 need to expand the argument again. This way, we will not perform
2745 side-effects more the once. */
2746 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2748 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2750 start_sequence ();
2752 if (expand_sfix_optab (result, op0, builtin_optab))
2754 /* Output the entire sequence. */
2755 insns = get_insns ();
2756 end_sequence ();
2757 emit_insn (insns);
2758 return result;
2761 /* If we were unable to expand via the builtin, stop the sequence
2762 (without outputting the insns) and call to the library function
2763 with the stabilized argument list. */
2764 end_sequence ();
2767 if (fallback_fn != BUILT_IN_NONE)
2769 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2770 targets, (int) round (x) should never be transformed into
2771 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2772 a call to lround in the hope that the target provides at least some
2773 C99 functions. This should result in the best user experience for
2774 not full C99 targets. */
2775 tree fallback_fndecl = mathfn_built_in_1
2776 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2778 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2779 fallback_fndecl, 1, arg);
2781 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2782 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2783 return convert_to_mode (mode, target, 0);
2786 return expand_call (exp, target, target == const0_rtx);
2789 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2790 a normal call should be emitted rather than expanding the function
2791 in-line. EXP is the expression that is a call to the builtin
2792 function; if convenient, the result should be placed in TARGET. */
2794 static rtx
2795 expand_builtin_powi (tree exp, rtx target)
2797 tree arg0, arg1;
2798 rtx op0, op1;
2799 machine_mode mode;
2800 machine_mode mode2;
2802 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2803 return NULL_RTX;
2805 arg0 = CALL_EXPR_ARG (exp, 0);
2806 arg1 = CALL_EXPR_ARG (exp, 1);
2807 mode = TYPE_MODE (TREE_TYPE (exp));
2809 /* Emit a libcall to libgcc. */
2811 /* Mode of the 2nd argument must match that of an int. */
2812 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2814 if (target == NULL_RTX)
2815 target = gen_reg_rtx (mode);
2817 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2818 if (GET_MODE (op0) != mode)
2819 op0 = convert_to_mode (mode, op0, 0);
2820 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2821 if (GET_MODE (op1) != mode2)
2822 op1 = convert_to_mode (mode2, op1, 0);
2824 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2825 target, LCT_CONST, mode,
2826 op0, mode, op1, mode2);
2828 return target;
2831 /* Expand expression EXP which is a call to the strlen builtin. Return
2832 NULL_RTX if we failed and the caller should emit a normal call, otherwise
2833 try to get the result in TARGET, if convenient. */
2835 static rtx
2836 expand_builtin_strlen (tree exp, rtx target,
2837 machine_mode target_mode)
2839 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2840 return NULL_RTX;
2842 struct expand_operand ops[4];
2843 rtx pat;
2844 tree len;
2845 tree src = CALL_EXPR_ARG (exp, 0);
2846 rtx src_reg;
2847 rtx_insn *before_strlen;
2848 machine_mode insn_mode;
2849 enum insn_code icode = CODE_FOR_nothing;
2850 unsigned int align;
2852 /* If the length can be computed at compile-time, return it. */
2853 len = c_strlen (src, 0);
2854 if (len)
2855 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2857 /* If the length can be computed at compile-time and is constant
2858 integer, but there are side-effects in src, evaluate
2859 src for side-effects, then return len.
2860 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2861 can be optimized into: i++; x = 3; */
2862 len = c_strlen (src, 1);
2863 if (len && TREE_CODE (len) == INTEGER_CST)
2865 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2866 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2869 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2871 /* If SRC is not a pointer type, don't do this operation inline. */
2872 if (align == 0)
2873 return NULL_RTX;
2875 /* Bail out if we can't compute strlen in the right mode. */
2876 FOR_EACH_MODE_FROM (insn_mode, target_mode)
2878 icode = optab_handler (strlen_optab, insn_mode);
2879 if (icode != CODE_FOR_nothing)
2880 break;
2882 if (insn_mode == VOIDmode)
2883 return NULL_RTX;
2885 /* Make a place to hold the source address. We will not expand
2886 the actual source until we are sure that the expansion will
2887 not fail -- there are trees that cannot be expanded twice. */
2888 src_reg = gen_reg_rtx (Pmode);
2890 /* Mark the beginning of the strlen sequence so we can emit the
2891 source operand later. */
2892 before_strlen = get_last_insn ();
2894 create_output_operand (&ops[0], target, insn_mode);
2895 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2896 create_integer_operand (&ops[2], 0);
2897 create_integer_operand (&ops[3], align);
2898 if (!maybe_expand_insn (icode, 4, ops))
2899 return NULL_RTX;
2901 /* Check to see if the argument was declared attribute nonstring
2902 and if so, issue a warning since at this point it's not known
2903 to be nul-terminated. */
2904 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
2906 /* Now that we are assured of success, expand the source. */
2907 start_sequence ();
2908 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2909 if (pat != src_reg)
2911 #ifdef POINTERS_EXTEND_UNSIGNED
2912 if (GET_MODE (pat) != Pmode)
2913 pat = convert_to_mode (Pmode, pat,
2914 POINTERS_EXTEND_UNSIGNED);
2915 #endif
2916 emit_move_insn (src_reg, pat);
2918 pat = get_insns ();
2919 end_sequence ();
2921 if (before_strlen)
2922 emit_insn_after (pat, before_strlen);
2923 else
2924 emit_insn_before (pat, get_insns ());
2926 /* Return the value in the proper mode for this function. */
2927 if (GET_MODE (ops[0].value) == target_mode)
2928 target = ops[0].value;
2929 else if (target != 0)
2930 convert_move (target, ops[0].value, 0);
2931 else
2932 target = convert_to_mode (target_mode, ops[0].value, 0);
2934 return target;
2937 /* Expand call EXP to the strnlen built-in, returning the result
2938 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
2940 static rtx
2941 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
2943 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2944 return NULL_RTX;
2946 tree src = CALL_EXPR_ARG (exp, 0);
2947 tree bound = CALL_EXPR_ARG (exp, 1);
2949 if (!bound)
2950 return NULL_RTX;
2952 location_t loc = UNKNOWN_LOCATION;
2953 if (EXPR_HAS_LOCATION (exp))
2954 loc = EXPR_LOCATION (exp);
2956 tree maxobjsize = max_object_size ();
2957 tree func = get_callee_fndecl (exp);
2959 tree len = c_strlen (src, 0);
2961 if (TREE_CODE (bound) == INTEGER_CST)
2963 if (!TREE_NO_WARNING (exp)
2964 && tree_int_cst_lt (maxobjsize, bound)
2965 && warning_at (loc, OPT_Wstringop_overflow_,
2966 "%K%qD specified bound %E "
2967 "exceeds maximum object size %E",
2968 exp, func, bound, maxobjsize))
2969 TREE_NO_WARNING (exp) = true;
2971 if (!len || TREE_CODE (len) != INTEGER_CST)
2972 return NULL_RTX;
2974 len = fold_convert_loc (loc, size_type_node, len);
2975 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
2976 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2979 if (TREE_CODE (bound) != SSA_NAME)
2980 return NULL_RTX;
2982 wide_int min, max;
2983 enum value_range_type rng = get_range_info (bound, &min, &max);
2984 if (rng != VR_RANGE)
2985 return NULL_RTX;
2987 if (!TREE_NO_WARNING (exp)
2988 && wi::ltu_p (wi::to_wide (maxobjsize), min)
2989 && warning_at (loc, OPT_Wstringop_overflow_,
2990 "%K%qD specified bound [%wu, %wu] "
2991 "exceeds maximum object size %E",
2992 exp, func, min.to_uhwi (), max.to_uhwi (), maxobjsize))
2993 TREE_NO_WARNING (exp) = true;
2995 if (!len || TREE_CODE (len) != INTEGER_CST)
2996 return NULL_RTX;
2998 if (wi::gtu_p (min, wi::to_wide (len)))
2999 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3001 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3002 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3005 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3006 bytes from constant string DATA + OFFSET and return it as target
3007 constant. */
3009 static rtx
3010 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3011 scalar_int_mode mode)
3013 const char *str = (const char *) data;
3015 gcc_assert (offset >= 0
3016 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3017 <= strlen (str) + 1));
3019 return c_readstr (str + offset, mode);
3022 /* LEN specify length of the block of memcpy/memset operation.
3023 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3024 In some cases we can make very likely guess on max size, then we
3025 set it into PROBABLE_MAX_SIZE. */
3027 static void
3028 determine_block_size (tree len, rtx len_rtx,
3029 unsigned HOST_WIDE_INT *min_size,
3030 unsigned HOST_WIDE_INT *max_size,
3031 unsigned HOST_WIDE_INT *probable_max_size)
3033 if (CONST_INT_P (len_rtx))
3035 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3036 return;
3038 else
3040 wide_int min, max;
3041 enum value_range_type range_type = VR_UNDEFINED;
3043 /* Determine bounds from the type. */
3044 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3045 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3046 else
3047 *min_size = 0;
3048 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3049 *probable_max_size = *max_size
3050 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3051 else
3052 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3054 if (TREE_CODE (len) == SSA_NAME)
3055 range_type = get_range_info (len, &min, &max);
3056 if (range_type == VR_RANGE)
3058 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3059 *min_size = min.to_uhwi ();
3060 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3061 *probable_max_size = *max_size = max.to_uhwi ();
3063 else if (range_type == VR_ANTI_RANGE)
3065 /* Anti range 0...N lets us to determine minimal size to N+1. */
3066 if (min == 0)
3068 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3069 *min_size = max.to_uhwi () + 1;
3071 /* Code like
3073 int n;
3074 if (n < 100)
3075 memcpy (a, b, n)
3077 Produce anti range allowing negative values of N. We still
3078 can use the information and make a guess that N is not negative.
3080 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3081 *probable_max_size = min.to_uhwi () - 1;
3084 gcc_checking_assert (*max_size <=
3085 (unsigned HOST_WIDE_INT)
3086 GET_MODE_MASK (GET_MODE (len_rtx)));
3089 /* Try to verify that the sizes and lengths of the arguments to a string
3090 manipulation function given by EXP are within valid bounds and that
3091 the operation does not lead to buffer overflow or read past the end.
3092 Arguments other than EXP may be null. When non-null, the arguments
3093 have the following meaning:
3094 DST is the destination of a copy call or NULL otherwise.
3095 SRC is the source of a copy call or NULL otherwise.
3096 DSTWRITE is the number of bytes written into the destination obtained
3097 from the user-supplied size argument to the function (such as in
3098 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3099 MAXREAD is the user-supplied bound on the length of the source sequence
3100 (such as in strncat(d, s, N). It specifies the upper limit on the number
3101 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3102 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3103 expression EXP is a string function call (as opposed to a memory call
3104 like memcpy). As an exception, SRCSTR can also be an integer denoting
3105 the precomputed size of the source string or object (for functions like
3106 memcpy).
3107 DSTSIZE is the size of the destination object specified by the last
3108 argument to the _chk builtins, typically resulting from the expansion
3109 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3110 DSTSIZE).
3112 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3113 SIZE_MAX.
3115 If the call is successfully verified as safe return true, otherwise
3116 return false. */
3118 static bool
3119 check_access (tree exp, tree, tree, tree dstwrite,
3120 tree maxread, tree srcstr, tree dstsize)
3122 int opt = OPT_Wstringop_overflow_;
3124 /* The size of the largest object is half the address space, or
3125 PTRDIFF_MAX. (This is way too permissive.) */
3126 tree maxobjsize = max_object_size ();
3128 /* Either the length of the source string for string functions or
3129 the size of the source object for raw memory functions. */
3130 tree slen = NULL_TREE;
3132 tree range[2] = { NULL_TREE, NULL_TREE };
3134 /* Set to true when the exact number of bytes written by a string
3135 function like strcpy is not known and the only thing that is
3136 known is that it must be at least one (for the terminating nul). */
3137 bool at_least_one = false;
3138 if (srcstr)
3140 /* SRCSTR is normally a pointer to string but as a special case
3141 it can be an integer denoting the length of a string. */
3142 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
3144 /* Try to determine the range of lengths the source string
3145 refers to. If it can be determined and is less than
3146 the upper bound given by MAXREAD add one to it for
3147 the terminating nul. Otherwise, set it to one for
3148 the same reason, or to MAXREAD as appropriate. */
3149 get_range_strlen (srcstr, range);
3150 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
3152 if (maxread && tree_int_cst_le (maxread, range[0]))
3153 range[0] = range[1] = maxread;
3154 else
3155 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3156 range[0], size_one_node);
3158 if (maxread && tree_int_cst_le (maxread, range[1]))
3159 range[1] = maxread;
3160 else if (!integer_all_onesp (range[1]))
3161 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3162 range[1], size_one_node);
3164 slen = range[0];
3166 else
3168 at_least_one = true;
3169 slen = size_one_node;
3172 else
3173 slen = srcstr;
3176 if (!dstwrite && !maxread)
3178 /* When the only available piece of data is the object size
3179 there is nothing to do. */
3180 if (!slen)
3181 return true;
3183 /* Otherwise, when the length of the source sequence is known
3184 (as with strlen), set DSTWRITE to it. */
3185 if (!range[0])
3186 dstwrite = slen;
3189 if (!dstsize)
3190 dstsize = maxobjsize;
3192 if (dstwrite)
3193 get_size_range (dstwrite, range);
3195 tree func = get_callee_fndecl (exp);
3197 /* First check the number of bytes to be written against the maximum
3198 object size. */
3199 if (range[0] && tree_int_cst_lt (maxobjsize, range[0]))
3201 if (TREE_NO_WARNING (exp))
3202 return false;
3204 location_t loc = tree_nonartificial_location (exp);
3205 loc = expansion_point_location_if_in_system_header (loc);
3207 bool warned;
3208 if (range[0] == range[1])
3209 warned = warning_at (loc, opt,
3210 "%K%qD specified size %E "
3211 "exceeds maximum object size %E",
3212 exp, func, range[0], maxobjsize);
3213 else
3214 warned = warning_at (loc, opt,
3215 "%K%qD specified size between %E and %E "
3216 "exceeds maximum object size %E",
3217 exp, func,
3218 range[0], range[1], maxobjsize);
3219 if (warned)
3220 TREE_NO_WARNING (exp) = true;
3222 return false;
3225 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3226 constant, and in range of unsigned HOST_WIDE_INT. */
3227 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3229 /* Next check the number of bytes to be written against the destination
3230 object size. */
3231 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
3233 if (range[0]
3234 && ((tree_fits_uhwi_p (dstsize)
3235 && tree_int_cst_lt (dstsize, range[0]))
3236 || (tree_fits_uhwi_p (dstwrite)
3237 && tree_int_cst_lt (dstwrite, range[0]))))
3239 if (TREE_NO_WARNING (exp))
3240 return false;
3242 location_t loc = tree_nonartificial_location (exp);
3243 loc = expansion_point_location_if_in_system_header (loc);
3245 if (dstwrite == slen && at_least_one)
3247 /* This is a call to strcpy with a destination of 0 size
3248 and a source of unknown length. The call will write
3249 at least one byte past the end of the destination. */
3250 warning_at (loc, opt,
3251 "%K%qD writing %E or more bytes into a region "
3252 "of size %E overflows the destination",
3253 exp, func, range[0], dstsize);
3255 else if (tree_int_cst_equal (range[0], range[1]))
3256 warning_n (loc, opt, tree_to_uhwi (range[0]),
3257 "%K%qD writing %E byte into a region "
3258 "of size %E overflows the destination",
3259 "%K%qD writing %E bytes into a region "
3260 "of size %E overflows the destination",
3261 exp, func, range[0], dstsize);
3262 else if (tree_int_cst_sign_bit (range[1]))
3264 /* Avoid printing the upper bound if it's invalid. */
3265 warning_at (loc, opt,
3266 "%K%qD writing %E or more bytes into a region "
3267 "of size %E overflows the destination",
3268 exp, func, range[0], dstsize);
3270 else
3271 warning_at (loc, opt,
3272 "%K%qD writing between %E and %E bytes into "
3273 "a region of size %E overflows the destination",
3274 exp, func, range[0], range[1],
3275 dstsize);
3277 /* Return error when an overflow has been detected. */
3278 return false;
3282 /* Check the maximum length of the source sequence against the size
3283 of the destination object if known, or against the maximum size
3284 of an object. */
3285 if (maxread)
3287 get_size_range (maxread, range);
3289 /* Use the lower end for MAXREAD from now on. */
3290 if (range[0])
3291 maxread = range[0];
3293 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
3295 location_t loc = tree_nonartificial_location (exp);
3296 loc = expansion_point_location_if_in_system_header (loc);
3298 if (tree_int_cst_lt (maxobjsize, range[0]))
3300 if (TREE_NO_WARNING (exp))
3301 return false;
3303 /* Warn about crazy big sizes first since that's more
3304 likely to be meaningful than saying that the bound
3305 is greater than the object size if both are big. */
3306 if (range[0] == range[1])
3307 warning_at (loc, opt,
3308 "%K%qD specified bound %E "
3309 "exceeds maximum object size %E",
3310 exp, func,
3311 range[0], maxobjsize);
3312 else
3313 warning_at (loc, opt,
3314 "%K%qD specified bound between %E and %E "
3315 "exceeds maximum object size %E",
3316 exp, func,
3317 range[0], range[1], maxobjsize);
3319 return false;
3322 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
3324 if (TREE_NO_WARNING (exp))
3325 return false;
3327 if (tree_int_cst_equal (range[0], range[1]))
3328 warning_at (loc, opt,
3329 "%K%qD specified bound %E "
3330 "exceeds destination size %E",
3331 exp, func,
3332 range[0], dstsize);
3333 else
3334 warning_at (loc, opt,
3335 "%K%qD specified bound between %E and %E "
3336 "exceeds destination size %E",
3337 exp, func,
3338 range[0], range[1], dstsize);
3339 return false;
3344 /* Check for reading past the end of SRC. */
3345 if (slen
3346 && slen == srcstr
3347 && dstwrite && range[0]
3348 && tree_int_cst_lt (slen, range[0]))
3350 if (TREE_NO_WARNING (exp))
3351 return false;
3353 location_t loc = tree_nonartificial_location (exp);
3355 if (tree_int_cst_equal (range[0], range[1]))
3356 warning_n (loc, opt, tree_to_uhwi (range[0]),
3357 "%K%qD reading %E byte from a region of size %E",
3358 "%K%qD reading %E bytes from a region of size %E",
3359 exp, func, range[0], slen);
3360 else if (tree_int_cst_sign_bit (range[1]))
3362 /* Avoid printing the upper bound if it's invalid. */
3363 warning_at (loc, opt,
3364 "%K%qD reading %E or more bytes from a region "
3365 "of size %E",
3366 exp, func, range[0], slen);
3368 else
3369 warning_at (loc, opt,
3370 "%K%qD reading between %E and %E bytes from a region "
3371 "of size %E",
3372 exp, func, range[0], range[1], slen);
3373 return false;
3376 return true;
3379 /* Helper to compute the size of the object referenced by the DEST
3380 expression which must have pointer type, using Object Size type
3381 OSTYPE (only the least significant 2 bits are used). Return
3382 an estimate of the size of the object if successful or NULL when
3383 the size cannot be determined. When the referenced object involves
3384 a non-constant offset in some range the returned value represents
3385 the largest size given the smallest non-negative offset in the
3386 range. The function is intended for diagnostics and should not
3387 be used to influence code generation or optimization. */
3389 tree
3390 compute_objsize (tree dest, int ostype)
3392 unsigned HOST_WIDE_INT size;
3394 /* Only the two least significant bits are meaningful. */
3395 ostype &= 3;
3397 if (compute_builtin_object_size (dest, ostype, &size))
3398 return build_int_cst (sizetype, size);
3400 if (TREE_CODE (dest) == SSA_NAME)
3402 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3403 if (!is_gimple_assign (stmt))
3404 return NULL_TREE;
3406 dest = gimple_assign_rhs1 (stmt);
3408 tree_code code = gimple_assign_rhs_code (stmt);
3409 if (code == POINTER_PLUS_EXPR)
3411 /* compute_builtin_object_size fails for addresses with
3412 non-constant offsets. Try to determine the range of
3413 such an offset here and use it to adjust the constant
3414 size. */
3415 tree off = gimple_assign_rhs2 (stmt);
3416 if (TREE_CODE (off) == INTEGER_CST)
3418 if (tree size = compute_objsize (dest, ostype))
3420 wide_int wioff = wi::to_wide (off);
3421 wide_int wisiz = wi::to_wide (size);
3423 /* Ignore negative offsets for now. For others,
3424 use the lower bound as the most optimistic
3425 estimate of the (remaining) size. */
3426 if (wi::sign_mask (wioff))
3428 else if (wi::ltu_p (wioff, wisiz))
3429 return wide_int_to_tree (TREE_TYPE (size),
3430 wi::sub (wisiz, wioff));
3431 else
3432 return size_zero_node;
3435 else if (TREE_CODE (off) == SSA_NAME
3436 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3438 wide_int min, max;
3439 enum value_range_type rng = get_range_info (off, &min, &max);
3441 if (rng == VR_RANGE)
3443 if (tree size = compute_objsize (dest, ostype))
3445 wide_int wisiz = wi::to_wide (size);
3447 /* Ignore negative offsets for now. For others,
3448 use the lower bound as the most optimistic
3449 estimate of the (remaining)size. */
3450 if (wi::sign_mask (min))
3452 else if (wi::ltu_p (min, wisiz))
3453 return wide_int_to_tree (TREE_TYPE (size),
3454 wi::sub (wisiz, min));
3455 else
3456 return size_zero_node;
3461 else if (code != ADDR_EXPR)
3462 return NULL_TREE;
3465 /* Unless computing the largest size (for memcpy and other raw memory
3466 functions), try to determine the size of the object from its type. */
3467 if (!ostype)
3468 return NULL_TREE;
3470 if (TREE_CODE (dest) != ADDR_EXPR)
3471 return NULL_TREE;
3473 tree type = TREE_TYPE (dest);
3474 if (TREE_CODE (type) == POINTER_TYPE)
3475 type = TREE_TYPE (type);
3477 type = TYPE_MAIN_VARIANT (type);
3479 if (TREE_CODE (type) == ARRAY_TYPE
3480 && !array_at_struct_end_p (TREE_OPERAND (dest, 0)))
3482 /* Return the constant size unless it's zero (that's a zero-length
3483 array likely at the end of a struct). */
3484 tree size = TYPE_SIZE_UNIT (type);
3485 if (size && TREE_CODE (size) == INTEGER_CST
3486 && !integer_zerop (size))
3487 return size;
3490 return NULL_TREE;
3493 /* Helper to determine and check the sizes of the source and the destination
3494 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3495 call expression, DEST is the destination argument, SRC is the source
3496 argument or null, and LEN is the number of bytes. Use Object Size type-0
3497 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3498 (no overflow or invalid sizes), false otherwise. */
3500 static bool
3501 check_memop_access (tree exp, tree dest, tree src, tree size)
3503 /* For functions like memset and memcpy that operate on raw memory
3504 try to determine the size of the largest source and destination
3505 object using type-0 Object Size regardless of the object size
3506 type specified by the option. */
3507 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3508 tree dstsize = compute_objsize (dest, 0);
3510 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
3511 srcsize, dstsize);
3514 /* Validate memchr arguments without performing any expansion.
3515 Return NULL_RTX. */
3517 static rtx
3518 expand_builtin_memchr (tree exp, rtx)
3520 if (!validate_arglist (exp,
3521 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3522 return NULL_RTX;
3524 tree arg1 = CALL_EXPR_ARG (exp, 0);
3525 tree len = CALL_EXPR_ARG (exp, 2);
3527 /* Diagnose calls where the specified length exceeds the size
3528 of the object. */
3529 if (warn_stringop_overflow)
3531 tree size = compute_objsize (arg1, 0);
3532 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
3533 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
3536 return NULL_RTX;
3539 /* Expand a call EXP to the memcpy builtin.
3540 Return NULL_RTX if we failed, the caller should emit a normal call,
3541 otherwise try to get the result in TARGET, if convenient (and in
3542 mode MODE if that's convenient). */
3544 static rtx
3545 expand_builtin_memcpy (tree exp, rtx target)
3547 if (!validate_arglist (exp,
3548 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3549 return NULL_RTX;
3551 tree dest = CALL_EXPR_ARG (exp, 0);
3552 tree src = CALL_EXPR_ARG (exp, 1);
3553 tree len = CALL_EXPR_ARG (exp, 2);
3555 check_memop_access (exp, dest, src, len);
3557 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3558 /*endp=*/ 0);
3561 /* Check a call EXP to the memmove built-in for validity.
3562 Return NULL_RTX on both success and failure. */
3564 static rtx
3565 expand_builtin_memmove (tree exp, rtx)
3567 if (!validate_arglist (exp,
3568 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3569 return NULL_RTX;
3571 tree dest = CALL_EXPR_ARG (exp, 0);
3572 tree src = CALL_EXPR_ARG (exp, 1);
3573 tree len = CALL_EXPR_ARG (exp, 2);
3575 check_memop_access (exp, dest, src, len);
3577 return NULL_RTX;
3580 /* Expand a call EXP to the mempcpy builtin.
3581 Return NULL_RTX if we failed; the caller should emit a normal call,
3582 otherwise try to get the result in TARGET, if convenient (and in
3583 mode MODE if that's convenient). If ENDP is 0 return the
3584 destination pointer, if ENDP is 1 return the end pointer ala
3585 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3586 stpcpy. */
3588 static rtx
3589 expand_builtin_mempcpy (tree exp, rtx target)
3591 if (!validate_arglist (exp,
3592 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3593 return NULL_RTX;
3595 tree dest = CALL_EXPR_ARG (exp, 0);
3596 tree src = CALL_EXPR_ARG (exp, 1);
3597 tree len = CALL_EXPR_ARG (exp, 2);
3599 /* Policy does not generally allow using compute_objsize (which
3600 is used internally by check_memop_size) to change code generation
3601 or drive optimization decisions.
3603 In this instance it is safe because the code we generate has
3604 the same semantics regardless of the return value of
3605 check_memop_sizes. Exactly the same amount of data is copied
3606 and the return value is exactly the same in both cases.
3608 Furthermore, check_memop_size always uses mode 0 for the call to
3609 compute_objsize, so the imprecise nature of compute_objsize is
3610 avoided. */
3612 /* Avoid expanding mempcpy into memcpy when the call is determined
3613 to overflow the buffer. This also prevents the same overflow
3614 from being diagnosed again when expanding memcpy. */
3615 if (!check_memop_access (exp, dest, src, len))
3616 return NULL_RTX;
3618 return expand_builtin_mempcpy_args (dest, src, len,
3619 target, exp, /*endp=*/ 1);
3622 /* Helper function to do the actual work for expand of memory copy family
3623 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3624 of memory from SRC to DEST and assign to TARGET if convenient.
3625 If ENDP is 0 return the
3626 destination pointer, if ENDP is 1 return the end pointer ala
3627 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3628 stpcpy. */
3630 static rtx
3631 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3632 rtx target, tree exp, int endp)
3634 const char *src_str;
3635 unsigned int src_align = get_pointer_alignment (src);
3636 unsigned int dest_align = get_pointer_alignment (dest);
3637 rtx dest_mem, src_mem, dest_addr, len_rtx;
3638 HOST_WIDE_INT expected_size = -1;
3639 unsigned int expected_align = 0;
3640 unsigned HOST_WIDE_INT min_size;
3641 unsigned HOST_WIDE_INT max_size;
3642 unsigned HOST_WIDE_INT probable_max_size;
3644 /* If DEST is not a pointer type, call the normal function. */
3645 if (dest_align == 0)
3646 return NULL_RTX;
3648 /* If either SRC is not a pointer type, don't do this
3649 operation in-line. */
3650 if (src_align == 0)
3651 return NULL_RTX;
3653 if (currently_expanding_gimple_stmt)
3654 stringop_block_profile (currently_expanding_gimple_stmt,
3655 &expected_align, &expected_size);
3657 if (expected_align < dest_align)
3658 expected_align = dest_align;
3659 dest_mem = get_memory_rtx (dest, len);
3660 set_mem_align (dest_mem, dest_align);
3661 len_rtx = expand_normal (len);
3662 determine_block_size (len, len_rtx, &min_size, &max_size,
3663 &probable_max_size);
3664 src_str = c_getstr (src);
3666 /* If SRC is a string constant and block move would be done
3667 by pieces, we can avoid loading the string from memory
3668 and only stored the computed constants. */
3669 if (src_str
3670 && CONST_INT_P (len_rtx)
3671 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3672 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3673 CONST_CAST (char *, src_str),
3674 dest_align, false))
3676 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3677 builtin_memcpy_read_str,
3678 CONST_CAST (char *, src_str),
3679 dest_align, false, endp);
3680 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3681 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3682 return dest_mem;
3685 src_mem = get_memory_rtx (src, len);
3686 set_mem_align (src_mem, src_align);
3688 /* Copy word part most expediently. */
3689 enum block_op_methods method = BLOCK_OP_NORMAL;
3690 if (CALL_EXPR_TAILCALL (exp) && (endp == 0 || target == const0_rtx))
3691 method = BLOCK_OP_TAILCALL;
3692 if (endp == 1 && target != const0_rtx)
3693 method = BLOCK_OP_NO_LIBCALL_RET;
3694 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3695 expected_align, expected_size,
3696 min_size, max_size, probable_max_size);
3697 if (dest_addr == pc_rtx)
3698 return NULL_RTX;
3700 if (dest_addr == 0)
3702 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3703 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3706 if (endp && target != const0_rtx)
3708 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3709 /* stpcpy pointer to last byte. */
3710 if (endp == 2)
3711 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3714 return dest_addr;
3717 static rtx
3718 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3719 rtx target, tree orig_exp, int endp)
3721 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3722 endp);
3725 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3726 we failed, the caller should emit a normal call, otherwise try to
3727 get the result in TARGET, if convenient. If ENDP is 0 return the
3728 destination pointer, if ENDP is 1 return the end pointer ala
3729 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3730 stpcpy. */
3732 static rtx
3733 expand_movstr (tree dest, tree src, rtx target, int endp)
3735 struct expand_operand ops[3];
3736 rtx dest_mem;
3737 rtx src_mem;
3739 if (!targetm.have_movstr ())
3740 return NULL_RTX;
3742 dest_mem = get_memory_rtx (dest, NULL);
3743 src_mem = get_memory_rtx (src, NULL);
3744 if (!endp)
3746 target = force_reg (Pmode, XEXP (dest_mem, 0));
3747 dest_mem = replace_equiv_address (dest_mem, target);
3750 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3751 create_fixed_operand (&ops[1], dest_mem);
3752 create_fixed_operand (&ops[2], src_mem);
3753 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3754 return NULL_RTX;
3756 if (endp && target != const0_rtx)
3758 target = ops[0].value;
3759 /* movstr is supposed to set end to the address of the NUL
3760 terminator. If the caller requested a mempcpy-like return value,
3761 adjust it. */
3762 if (endp == 1)
3764 rtx tem = plus_constant (GET_MODE (target),
3765 gen_lowpart (GET_MODE (target), target), 1);
3766 emit_move_insn (target, force_operand (tem, NULL_RTX));
3769 return target;
3772 /* Do some very basic size validation of a call to the strcpy builtin
3773 given by EXP. Return NULL_RTX to have the built-in expand to a call
3774 to the library function. */
3776 static rtx
3777 expand_builtin_strcat (tree exp, rtx)
3779 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3780 || !warn_stringop_overflow)
3781 return NULL_RTX;
3783 tree dest = CALL_EXPR_ARG (exp, 0);
3784 tree src = CALL_EXPR_ARG (exp, 1);
3786 /* There is no way here to determine the length of the string in
3787 the destination to which the SRC string is being appended so
3788 just diagnose cases when the souce string is longer than
3789 the destination object. */
3791 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3793 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
3794 destsize);
3796 return NULL_RTX;
3799 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3800 NULL_RTX if we failed the caller should emit a normal call, otherwise
3801 try to get the result in TARGET, if convenient (and in mode MODE if that's
3802 convenient). */
3804 static rtx
3805 expand_builtin_strcpy (tree exp, rtx target)
3807 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3808 return NULL_RTX;
3810 tree dest = CALL_EXPR_ARG (exp, 0);
3811 tree src = CALL_EXPR_ARG (exp, 1);
3813 if (warn_stringop_overflow)
3815 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3816 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3817 src, destsize);
3820 if (rtx ret = expand_builtin_strcpy_args (dest, src, target))
3822 /* Check to see if the argument was declared attribute nonstring
3823 and if so, issue a warning since at this point it's not known
3824 to be nul-terminated. */
3825 tree fndecl = get_callee_fndecl (exp);
3826 maybe_warn_nonstring_arg (fndecl, exp);
3827 return ret;
3830 return NULL_RTX;
3833 /* Helper function to do the actual work for expand_builtin_strcpy. The
3834 arguments to the builtin_strcpy call DEST and SRC are broken out
3835 so that this can also be called without constructing an actual CALL_EXPR.
3836 The other arguments and return value are the same as for
3837 expand_builtin_strcpy. */
3839 static rtx
3840 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3842 return expand_movstr (dest, src, target, /*endp=*/0);
3845 /* Expand a call EXP to the stpcpy builtin.
3846 Return NULL_RTX if we failed the caller should emit a normal call,
3847 otherwise try to get the result in TARGET, if convenient (and in
3848 mode MODE if that's convenient). */
3850 static rtx
3851 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
3853 tree dst, src;
3854 location_t loc = EXPR_LOCATION (exp);
3856 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3857 return NULL_RTX;
3859 dst = CALL_EXPR_ARG (exp, 0);
3860 src = CALL_EXPR_ARG (exp, 1);
3862 if (warn_stringop_overflow)
3864 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
3865 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3866 src, destsize);
3869 /* If return value is ignored, transform stpcpy into strcpy. */
3870 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3872 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3873 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3874 return expand_expr (result, target, mode, EXPAND_NORMAL);
3876 else
3878 tree len, lenp1;
3879 rtx ret;
3881 /* Ensure we get an actual string whose length can be evaluated at
3882 compile-time, not an expression containing a string. This is
3883 because the latter will potentially produce pessimized code
3884 when used to produce the return value. */
3885 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3886 return expand_movstr (dst, src, target, /*endp=*/2);
3888 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3889 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3890 target, exp, /*endp=*/2);
3892 if (ret)
3893 return ret;
3895 if (TREE_CODE (len) == INTEGER_CST)
3897 rtx len_rtx = expand_normal (len);
3899 if (CONST_INT_P (len_rtx))
3901 ret = expand_builtin_strcpy_args (dst, src, target);
3903 if (ret)
3905 if (! target)
3907 if (mode != VOIDmode)
3908 target = gen_reg_rtx (mode);
3909 else
3910 target = gen_reg_rtx (GET_MODE (ret));
3912 if (GET_MODE (target) != GET_MODE (ret))
3913 ret = gen_lowpart (GET_MODE (target), ret);
3915 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3916 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3917 gcc_assert (ret);
3919 return target;
3924 return expand_movstr (dst, src, target, /*endp=*/2);
3928 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
3929 arguments while being careful to avoid duplicate warnings (which could
3930 be issued if the expander were to expand the call, resulting in it
3931 being emitted in expand_call(). */
3933 static rtx
3934 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3936 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
3938 /* The call has been successfully expanded. Check for nonstring
3939 arguments and issue warnings as appropriate. */
3940 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3941 return ret;
3944 return NULL_RTX;
3947 /* Check a call EXP to the stpncpy built-in for validity.
3948 Return NULL_RTX on both success and failure. */
3950 static rtx
3951 expand_builtin_stpncpy (tree exp, rtx)
3953 if (!validate_arglist (exp,
3954 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3955 || !warn_stringop_overflow)
3956 return NULL_RTX;
3958 /* The source and destination of the call. */
3959 tree dest = CALL_EXPR_ARG (exp, 0);
3960 tree src = CALL_EXPR_ARG (exp, 1);
3962 /* The exact number of bytes to write (not the maximum). */
3963 tree len = CALL_EXPR_ARG (exp, 2);
3965 /* The size of the destination object. */
3966 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3968 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
3970 return NULL_RTX;
3973 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3974 bytes from constant string DATA + OFFSET and return it as target
3975 constant. */
3978 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3979 scalar_int_mode mode)
3981 const char *str = (const char *) data;
3983 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3984 return const0_rtx;
3986 return c_readstr (str + offset, mode);
3989 /* Helper to check the sizes of sequences and the destination of calls
3990 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3991 success (no overflow or invalid sizes), false otherwise. */
3993 static bool
3994 check_strncat_sizes (tree exp, tree objsize)
3996 tree dest = CALL_EXPR_ARG (exp, 0);
3997 tree src = CALL_EXPR_ARG (exp, 1);
3998 tree maxread = CALL_EXPR_ARG (exp, 2);
4000 /* Try to determine the range of lengths that the source expression
4001 refers to. */
4002 tree lenrange[2];
4003 get_range_strlen (src, lenrange);
4005 /* Try to verify that the destination is big enough for the shortest
4006 string. */
4008 if (!objsize && warn_stringop_overflow)
4010 /* If it hasn't been provided by __strncat_chk, try to determine
4011 the size of the destination object into which the source is
4012 being copied. */
4013 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
4016 /* Add one for the terminating nul. */
4017 tree srclen = (lenrange[0]
4018 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4019 size_one_node)
4020 : NULL_TREE);
4022 /* The strncat function copies at most MAXREAD bytes and always appends
4023 the terminating nul so the specified upper bound should never be equal
4024 to (or greater than) the size of the destination. */
4025 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4026 && tree_int_cst_equal (objsize, maxread))
4028 location_t loc = tree_nonartificial_location (exp);
4029 loc = expansion_point_location_if_in_system_header (loc);
4031 warning_at (loc, OPT_Wstringop_overflow_,
4032 "%K%qD specified bound %E equals destination size",
4033 exp, get_callee_fndecl (exp), maxread);
4035 return false;
4038 if (!srclen
4039 || (maxread && tree_fits_uhwi_p (maxread)
4040 && tree_fits_uhwi_p (srclen)
4041 && tree_int_cst_lt (maxread, srclen)))
4042 srclen = maxread;
4044 /* The number of bytes to write is LEN but check_access will also
4045 check SRCLEN if LEN's value isn't known. */
4046 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
4047 objsize);
4050 /* Similar to expand_builtin_strcat, do some very basic size validation
4051 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
4052 the built-in expand to a call to the library function. */
4054 static rtx
4055 expand_builtin_strncat (tree exp, rtx)
4057 if (!validate_arglist (exp,
4058 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4059 || !warn_stringop_overflow)
4060 return NULL_RTX;
4062 tree dest = CALL_EXPR_ARG (exp, 0);
4063 tree src = CALL_EXPR_ARG (exp, 1);
4064 /* The upper bound on the number of bytes to write. */
4065 tree maxread = CALL_EXPR_ARG (exp, 2);
4066 /* The length of the source sequence. */
4067 tree slen = c_strlen (src, 1);
4069 /* Try to determine the range of lengths that the source expression
4070 refers to. */
4071 tree lenrange[2];
4072 if (slen)
4073 lenrange[0] = lenrange[1] = slen;
4074 else
4075 get_range_strlen (src, lenrange);
4077 /* Try to verify that the destination is big enough for the shortest
4078 string. First try to determine the size of the destination object
4079 into which the source is being copied. */
4080 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4082 /* Add one for the terminating nul. */
4083 tree srclen = (lenrange[0]
4084 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4085 size_one_node)
4086 : NULL_TREE);
4088 /* The strncat function copies at most MAXREAD bytes and always appends
4089 the terminating nul so the specified upper bound should never be equal
4090 to (or greater than) the size of the destination. */
4091 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4092 && tree_int_cst_equal (destsize, maxread))
4094 location_t loc = tree_nonartificial_location (exp);
4095 loc = expansion_point_location_if_in_system_header (loc);
4097 warning_at (loc, OPT_Wstringop_overflow_,
4098 "%K%qD specified bound %E equals destination size",
4099 exp, get_callee_fndecl (exp), maxread);
4101 return NULL_RTX;
4104 if (!srclen
4105 || (maxread && tree_fits_uhwi_p (maxread)
4106 && tree_fits_uhwi_p (srclen)
4107 && tree_int_cst_lt (maxread, srclen)))
4108 srclen = maxread;
4110 /* The number of bytes to write is SRCLEN. */
4111 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
4113 return NULL_RTX;
4116 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4117 NULL_RTX if we failed the caller should emit a normal call. */
4119 static rtx
4120 expand_builtin_strncpy (tree exp, rtx target)
4122 location_t loc = EXPR_LOCATION (exp);
4124 if (validate_arglist (exp,
4125 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4127 tree dest = CALL_EXPR_ARG (exp, 0);
4128 tree src = CALL_EXPR_ARG (exp, 1);
4129 /* The number of bytes to write (not the maximum). */
4130 tree len = CALL_EXPR_ARG (exp, 2);
4131 /* The length of the source sequence. */
4132 tree slen = c_strlen (src, 1);
4134 if (warn_stringop_overflow)
4136 tree destsize = compute_objsize (dest,
4137 warn_stringop_overflow - 1);
4139 /* The number of bytes to write is LEN but check_access will also
4140 check SLEN if LEN's value isn't known. */
4141 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4142 destsize);
4145 /* We must be passed a constant len and src parameter. */
4146 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4147 return NULL_RTX;
4149 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4151 /* We're required to pad with trailing zeros if the requested
4152 len is greater than strlen(s2)+1. In that case try to
4153 use store_by_pieces, if it fails, punt. */
4154 if (tree_int_cst_lt (slen, len))
4156 unsigned int dest_align = get_pointer_alignment (dest);
4157 const char *p = c_getstr (src);
4158 rtx dest_mem;
4160 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4161 || !can_store_by_pieces (tree_to_uhwi (len),
4162 builtin_strncpy_read_str,
4163 CONST_CAST (char *, p),
4164 dest_align, false))
4165 return NULL_RTX;
4167 dest_mem = get_memory_rtx (dest, len);
4168 store_by_pieces (dest_mem, tree_to_uhwi (len),
4169 builtin_strncpy_read_str,
4170 CONST_CAST (char *, p), dest_align, false, 0);
4171 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4172 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4173 return dest_mem;
4176 return NULL_RTX;
4179 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4180 bytes from constant string DATA + OFFSET and return it as target
4181 constant. */
4184 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4185 scalar_int_mode mode)
4187 const char *c = (const char *) data;
4188 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4190 memset (p, *c, GET_MODE_SIZE (mode));
4192 return c_readstr (p, mode);
4195 /* Callback routine for store_by_pieces. Return the RTL of a register
4196 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4197 char value given in the RTL register data. For example, if mode is
4198 4 bytes wide, return the RTL for 0x01010101*data. */
4200 static rtx
4201 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4202 scalar_int_mode mode)
4204 rtx target, coeff;
4205 size_t size;
4206 char *p;
4208 size = GET_MODE_SIZE (mode);
4209 if (size == 1)
4210 return (rtx) data;
4212 p = XALLOCAVEC (char, size);
4213 memset (p, 1, size);
4214 coeff = c_readstr (p, mode);
4216 target = convert_to_mode (mode, (rtx) data, 1);
4217 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4218 return force_reg (mode, target);
4221 /* Expand expression EXP, which is a call to the memset builtin. Return
4222 NULL_RTX if we failed the caller should emit a normal call, otherwise
4223 try to get the result in TARGET, if convenient (and in mode MODE if that's
4224 convenient). */
4226 static rtx
4227 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4229 if (!validate_arglist (exp,
4230 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4231 return NULL_RTX;
4233 tree dest = CALL_EXPR_ARG (exp, 0);
4234 tree val = CALL_EXPR_ARG (exp, 1);
4235 tree len = CALL_EXPR_ARG (exp, 2);
4237 check_memop_access (exp, dest, NULL_TREE, len);
4239 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4242 /* Helper function to do the actual work for expand_builtin_memset. The
4243 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4244 so that this can also be called without constructing an actual CALL_EXPR.
4245 The other arguments and return value are the same as for
4246 expand_builtin_memset. */
4248 static rtx
4249 expand_builtin_memset_args (tree dest, tree val, tree len,
4250 rtx target, machine_mode mode, tree orig_exp)
4252 tree fndecl, fn;
4253 enum built_in_function fcode;
4254 machine_mode val_mode;
4255 char c;
4256 unsigned int dest_align;
4257 rtx dest_mem, dest_addr, len_rtx;
4258 HOST_WIDE_INT expected_size = -1;
4259 unsigned int expected_align = 0;
4260 unsigned HOST_WIDE_INT min_size;
4261 unsigned HOST_WIDE_INT max_size;
4262 unsigned HOST_WIDE_INT probable_max_size;
4264 dest_align = get_pointer_alignment (dest);
4266 /* If DEST is not a pointer type, don't do this operation in-line. */
4267 if (dest_align == 0)
4268 return NULL_RTX;
4270 if (currently_expanding_gimple_stmt)
4271 stringop_block_profile (currently_expanding_gimple_stmt,
4272 &expected_align, &expected_size);
4274 if (expected_align < dest_align)
4275 expected_align = dest_align;
4277 /* If the LEN parameter is zero, return DEST. */
4278 if (integer_zerop (len))
4280 /* Evaluate and ignore VAL in case it has side-effects. */
4281 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4282 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4285 /* Stabilize the arguments in case we fail. */
4286 dest = builtin_save_expr (dest);
4287 val = builtin_save_expr (val);
4288 len = builtin_save_expr (len);
4290 len_rtx = expand_normal (len);
4291 determine_block_size (len, len_rtx, &min_size, &max_size,
4292 &probable_max_size);
4293 dest_mem = get_memory_rtx (dest, len);
4294 val_mode = TYPE_MODE (unsigned_char_type_node);
4296 if (TREE_CODE (val) != INTEGER_CST)
4298 rtx val_rtx;
4300 val_rtx = expand_normal (val);
4301 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4303 /* Assume that we can memset by pieces if we can store
4304 * the coefficients by pieces (in the required modes).
4305 * We can't pass builtin_memset_gen_str as that emits RTL. */
4306 c = 1;
4307 if (tree_fits_uhwi_p (len)
4308 && can_store_by_pieces (tree_to_uhwi (len),
4309 builtin_memset_read_str, &c, dest_align,
4310 true))
4312 val_rtx = force_reg (val_mode, val_rtx);
4313 store_by_pieces (dest_mem, tree_to_uhwi (len),
4314 builtin_memset_gen_str, val_rtx, dest_align,
4315 true, 0);
4317 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4318 dest_align, expected_align,
4319 expected_size, min_size, max_size,
4320 probable_max_size))
4321 goto do_libcall;
4323 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4324 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4325 return dest_mem;
4328 if (target_char_cast (val, &c))
4329 goto do_libcall;
4331 if (c)
4333 if (tree_fits_uhwi_p (len)
4334 && can_store_by_pieces (tree_to_uhwi (len),
4335 builtin_memset_read_str, &c, dest_align,
4336 true))
4337 store_by_pieces (dest_mem, tree_to_uhwi (len),
4338 builtin_memset_read_str, &c, dest_align, true, 0);
4339 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4340 gen_int_mode (c, val_mode),
4341 dest_align, expected_align,
4342 expected_size, min_size, max_size,
4343 probable_max_size))
4344 goto do_libcall;
4346 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4347 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4348 return dest_mem;
4351 set_mem_align (dest_mem, dest_align);
4352 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4353 CALL_EXPR_TAILCALL (orig_exp)
4354 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4355 expected_align, expected_size,
4356 min_size, max_size,
4357 probable_max_size);
4359 if (dest_addr == 0)
4361 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4362 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4365 return dest_addr;
4367 do_libcall:
4368 fndecl = get_callee_fndecl (orig_exp);
4369 fcode = DECL_FUNCTION_CODE (fndecl);
4370 if (fcode == BUILT_IN_MEMSET)
4371 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4372 dest, val, len);
4373 else if (fcode == BUILT_IN_BZERO)
4374 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4375 dest, len);
4376 else
4377 gcc_unreachable ();
4378 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4379 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4380 return expand_call (fn, target, target == const0_rtx);
4383 /* Expand expression EXP, which is a call to the bzero builtin. Return
4384 NULL_RTX if we failed the caller should emit a normal call. */
4386 static rtx
4387 expand_builtin_bzero (tree exp)
4389 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4390 return NULL_RTX;
4392 tree dest = CALL_EXPR_ARG (exp, 0);
4393 tree size = CALL_EXPR_ARG (exp, 1);
4395 check_memop_access (exp, dest, NULL_TREE, size);
4397 /* New argument list transforming bzero(ptr x, int y) to
4398 memset(ptr x, int 0, size_t y). This is done this way
4399 so that if it isn't expanded inline, we fallback to
4400 calling bzero instead of memset. */
4402 location_t loc = EXPR_LOCATION (exp);
4404 return expand_builtin_memset_args (dest, integer_zero_node,
4405 fold_convert_loc (loc,
4406 size_type_node, size),
4407 const0_rtx, VOIDmode, exp);
4410 /* Try to expand cmpstr operation ICODE with the given operands.
4411 Return the result rtx on success, otherwise return null. */
4413 static rtx
4414 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4415 HOST_WIDE_INT align)
4417 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4419 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4420 target = NULL_RTX;
4422 struct expand_operand ops[4];
4423 create_output_operand (&ops[0], target, insn_mode);
4424 create_fixed_operand (&ops[1], arg1_rtx);
4425 create_fixed_operand (&ops[2], arg2_rtx);
4426 create_integer_operand (&ops[3], align);
4427 if (maybe_expand_insn (icode, 4, ops))
4428 return ops[0].value;
4429 return NULL_RTX;
4432 /* Expand expression EXP, which is a call to the memcmp built-in function.
4433 Return NULL_RTX if we failed and the caller should emit a normal call,
4434 otherwise try to get the result in TARGET, if convenient.
4435 RESULT_EQ is true if we can relax the returned value to be either zero
4436 or nonzero, without caring about the sign. */
4438 static rtx
4439 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4441 if (!validate_arglist (exp,
4442 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4443 return NULL_RTX;
4445 tree arg1 = CALL_EXPR_ARG (exp, 0);
4446 tree arg2 = CALL_EXPR_ARG (exp, 1);
4447 tree len = CALL_EXPR_ARG (exp, 2);
4448 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4449 bool no_overflow = true;
4451 /* Diagnose calls where the specified length exceeds the size of either
4452 object. */
4453 tree size = compute_objsize (arg1, 0);
4454 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4455 len, /*maxread=*/NULL_TREE, size,
4456 /*objsize=*/NULL_TREE);
4457 if (no_overflow)
4459 size = compute_objsize (arg2, 0);
4460 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4461 len, /*maxread=*/NULL_TREE, size,
4462 /*objsize=*/NULL_TREE);
4465 /* Due to the performance benefit, always inline the calls first
4466 when result_eq is false. */
4467 rtx result = NULL_RTX;
4469 if (!result_eq && fcode != BUILT_IN_BCMP && no_overflow)
4471 result = inline_expand_builtin_string_cmp (exp, target, true);
4472 if (result)
4473 return result;
4476 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4477 location_t loc = EXPR_LOCATION (exp);
4479 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4480 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4482 /* If we don't have POINTER_TYPE, call the function. */
4483 if (arg1_align == 0 || arg2_align == 0)
4484 return NULL_RTX;
4486 rtx arg1_rtx = get_memory_rtx (arg1, len);
4487 rtx arg2_rtx = get_memory_rtx (arg2, len);
4488 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4490 /* Set MEM_SIZE as appropriate. */
4491 if (CONST_INT_P (len_rtx))
4493 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4494 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4497 by_pieces_constfn constfn = NULL;
4499 const char *src_str = c_getstr (arg2);
4500 if (result_eq && src_str == NULL)
4502 src_str = c_getstr (arg1);
4503 if (src_str != NULL)
4504 std::swap (arg1_rtx, arg2_rtx);
4507 /* If SRC is a string constant and block move would be done
4508 by pieces, we can avoid loading the string from memory
4509 and only stored the computed constants. */
4510 if (src_str
4511 && CONST_INT_P (len_rtx)
4512 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4513 constfn = builtin_memcpy_read_str;
4515 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4516 TREE_TYPE (len), target,
4517 result_eq, constfn,
4518 CONST_CAST (char *, src_str));
4520 if (result)
4522 /* Return the value in the proper mode for this function. */
4523 if (GET_MODE (result) == mode)
4524 return result;
4526 if (target != 0)
4528 convert_move (target, result, 0);
4529 return target;
4532 return convert_to_mode (mode, result, 0);
4535 return NULL_RTX;
4538 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4539 if we failed the caller should emit a normal call, otherwise try to get
4540 the result in TARGET, if convenient. */
4542 static rtx
4543 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4545 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4546 return NULL_RTX;
4548 /* Due to the performance benefit, always inline the calls first. */
4549 rtx result = NULL_RTX;
4550 result = inline_expand_builtin_string_cmp (exp, target, false);
4551 if (result)
4552 return result;
4554 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4555 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4556 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4557 return NULL_RTX;
4559 tree arg1 = CALL_EXPR_ARG (exp, 0);
4560 tree arg2 = CALL_EXPR_ARG (exp, 1);
4562 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4563 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4565 /* If we don't have POINTER_TYPE, call the function. */
4566 if (arg1_align == 0 || arg2_align == 0)
4567 return NULL_RTX;
4569 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4570 arg1 = builtin_save_expr (arg1);
4571 arg2 = builtin_save_expr (arg2);
4573 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4574 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4576 /* Try to call cmpstrsi. */
4577 if (cmpstr_icode != CODE_FOR_nothing)
4578 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4579 MIN (arg1_align, arg2_align));
4581 /* Try to determine at least one length and call cmpstrnsi. */
4582 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4584 tree len;
4585 rtx arg3_rtx;
4587 tree len1 = c_strlen (arg1, 1);
4588 tree len2 = c_strlen (arg2, 1);
4590 if (len1)
4591 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4592 if (len2)
4593 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4595 /* If we don't have a constant length for the first, use the length
4596 of the second, if we know it. We don't require a constant for
4597 this case; some cost analysis could be done if both are available
4598 but neither is constant. For now, assume they're equally cheap,
4599 unless one has side effects. If both strings have constant lengths,
4600 use the smaller. */
4602 if (!len1)
4603 len = len2;
4604 else if (!len2)
4605 len = len1;
4606 else if (TREE_SIDE_EFFECTS (len1))
4607 len = len2;
4608 else if (TREE_SIDE_EFFECTS (len2))
4609 len = len1;
4610 else if (TREE_CODE (len1) != INTEGER_CST)
4611 len = len2;
4612 else if (TREE_CODE (len2) != INTEGER_CST)
4613 len = len1;
4614 else if (tree_int_cst_lt (len1, len2))
4615 len = len1;
4616 else
4617 len = len2;
4619 /* If both arguments have side effects, we cannot optimize. */
4620 if (len && !TREE_SIDE_EFFECTS (len))
4622 arg3_rtx = expand_normal (len);
4623 result = expand_cmpstrn_or_cmpmem
4624 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4625 arg3_rtx, MIN (arg1_align, arg2_align));
4629 tree fndecl = get_callee_fndecl (exp);
4630 if (result)
4632 /* Check to see if the argument was declared attribute nonstring
4633 and if so, issue a warning since at this point it's not known
4634 to be nul-terminated. */
4635 maybe_warn_nonstring_arg (fndecl, exp);
4637 /* Return the value in the proper mode for this function. */
4638 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4639 if (GET_MODE (result) == mode)
4640 return result;
4641 if (target == 0)
4642 return convert_to_mode (mode, result, 0);
4643 convert_move (target, result, 0);
4644 return target;
4647 /* Expand the library call ourselves using a stabilized argument
4648 list to avoid re-evaluating the function's arguments twice. */
4649 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4650 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4651 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4652 return expand_call (fn, target, target == const0_rtx);
4655 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4656 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4657 the result in TARGET, if convenient. */
4659 static rtx
4660 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4661 ATTRIBUTE_UNUSED machine_mode mode)
4663 if (!validate_arglist (exp,
4664 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4665 return NULL_RTX;
4667 /* Due to the performance benefit, always inline the calls first. */
4668 rtx result = NULL_RTX;
4669 result = inline_expand_builtin_string_cmp (exp, target, false);
4670 if (result)
4671 return result;
4673 /* If c_strlen can determine an expression for one of the string
4674 lengths, and it doesn't have side effects, then emit cmpstrnsi
4675 using length MIN(strlen(string)+1, arg3). */
4676 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4677 if (cmpstrn_icode == CODE_FOR_nothing)
4678 return NULL_RTX;
4680 tree len;
4682 tree arg1 = CALL_EXPR_ARG (exp, 0);
4683 tree arg2 = CALL_EXPR_ARG (exp, 1);
4684 tree arg3 = CALL_EXPR_ARG (exp, 2);
4686 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4687 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4689 tree len1 = c_strlen (arg1, 1);
4690 tree len2 = c_strlen (arg2, 1);
4692 location_t loc = EXPR_LOCATION (exp);
4694 if (len1)
4695 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4696 if (len2)
4697 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4699 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4701 /* If we don't have a constant length for the first, use the length
4702 of the second, if we know it. If neither string is constant length,
4703 use the given length argument. We don't require a constant for
4704 this case; some cost analysis could be done if both are available
4705 but neither is constant. For now, assume they're equally cheap,
4706 unless one has side effects. If both strings have constant lengths,
4707 use the smaller. */
4709 if (!len1 && !len2)
4710 len = len3;
4711 else if (!len1)
4712 len = len2;
4713 else if (!len2)
4714 len = len1;
4715 else if (TREE_SIDE_EFFECTS (len1))
4716 len = len2;
4717 else if (TREE_SIDE_EFFECTS (len2))
4718 len = len1;
4719 else if (TREE_CODE (len1) != INTEGER_CST)
4720 len = len2;
4721 else if (TREE_CODE (len2) != INTEGER_CST)
4722 len = len1;
4723 else if (tree_int_cst_lt (len1, len2))
4724 len = len1;
4725 else
4726 len = len2;
4728 /* If we are not using the given length, we must incorporate it here.
4729 The actual new length parameter will be MIN(len,arg3) in this case. */
4730 if (len != len3)
4731 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4732 rtx arg1_rtx = get_memory_rtx (arg1, len);
4733 rtx arg2_rtx = get_memory_rtx (arg2, len);
4734 rtx arg3_rtx = expand_normal (len);
4735 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4736 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4737 MIN (arg1_align, arg2_align));
4739 tree fndecl = get_callee_fndecl (exp);
4740 if (result)
4742 /* Check to see if the argument was declared attribute nonstring
4743 and if so, issue a warning since at this point it's not known
4744 to be nul-terminated. */
4745 maybe_warn_nonstring_arg (fndecl, exp);
4747 /* Return the value in the proper mode for this function. */
4748 mode = TYPE_MODE (TREE_TYPE (exp));
4749 if (GET_MODE (result) == mode)
4750 return result;
4751 if (target == 0)
4752 return convert_to_mode (mode, result, 0);
4753 convert_move (target, result, 0);
4754 return target;
4757 /* Expand the library call ourselves using a stabilized argument
4758 list to avoid re-evaluating the function's arguments twice. */
4759 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4760 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4761 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4762 return expand_call (fn, target, target == const0_rtx);
4765 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4766 if that's convenient. */
4769 expand_builtin_saveregs (void)
4771 rtx val;
4772 rtx_insn *seq;
4774 /* Don't do __builtin_saveregs more than once in a function.
4775 Save the result of the first call and reuse it. */
4776 if (saveregs_value != 0)
4777 return saveregs_value;
4779 /* When this function is called, it means that registers must be
4780 saved on entry to this function. So we migrate the call to the
4781 first insn of this function. */
4783 start_sequence ();
4785 /* Do whatever the machine needs done in this case. */
4786 val = targetm.calls.expand_builtin_saveregs ();
4788 seq = get_insns ();
4789 end_sequence ();
4791 saveregs_value = val;
4793 /* Put the insns after the NOTE that starts the function. If this
4794 is inside a start_sequence, make the outer-level insn chain current, so
4795 the code is placed at the start of the function. */
4796 push_topmost_sequence ();
4797 emit_insn_after (seq, entry_of_function ());
4798 pop_topmost_sequence ();
4800 return val;
4803 /* Expand a call to __builtin_next_arg. */
4805 static rtx
4806 expand_builtin_next_arg (void)
4808 /* Checking arguments is already done in fold_builtin_next_arg
4809 that must be called before this function. */
4810 return expand_binop (ptr_mode, add_optab,
4811 crtl->args.internal_arg_pointer,
4812 crtl->args.arg_offset_rtx,
4813 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4816 /* Make it easier for the backends by protecting the valist argument
4817 from multiple evaluations. */
4819 static tree
4820 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4822 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4824 /* The current way of determining the type of valist is completely
4825 bogus. We should have the information on the va builtin instead. */
4826 if (!vatype)
4827 vatype = targetm.fn_abi_va_list (cfun->decl);
4829 if (TREE_CODE (vatype) == ARRAY_TYPE)
4831 if (TREE_SIDE_EFFECTS (valist))
4832 valist = save_expr (valist);
4834 /* For this case, the backends will be expecting a pointer to
4835 vatype, but it's possible we've actually been given an array
4836 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4837 So fix it. */
4838 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4840 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4841 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4844 else
4846 tree pt = build_pointer_type (vatype);
4848 if (! needs_lvalue)
4850 if (! TREE_SIDE_EFFECTS (valist))
4851 return valist;
4853 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4854 TREE_SIDE_EFFECTS (valist) = 1;
4857 if (TREE_SIDE_EFFECTS (valist))
4858 valist = save_expr (valist);
4859 valist = fold_build2_loc (loc, MEM_REF,
4860 vatype, valist, build_int_cst (pt, 0));
4863 return valist;
4866 /* The "standard" definition of va_list is void*. */
4868 tree
4869 std_build_builtin_va_list (void)
4871 return ptr_type_node;
4874 /* The "standard" abi va_list is va_list_type_node. */
4876 tree
4877 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4879 return va_list_type_node;
4882 /* The "standard" type of va_list is va_list_type_node. */
4884 tree
4885 std_canonical_va_list_type (tree type)
4887 tree wtype, htype;
4889 wtype = va_list_type_node;
4890 htype = type;
4892 if (TREE_CODE (wtype) == ARRAY_TYPE)
4894 /* If va_list is an array type, the argument may have decayed
4895 to a pointer type, e.g. by being passed to another function.
4896 In that case, unwrap both types so that we can compare the
4897 underlying records. */
4898 if (TREE_CODE (htype) == ARRAY_TYPE
4899 || POINTER_TYPE_P (htype))
4901 wtype = TREE_TYPE (wtype);
4902 htype = TREE_TYPE (htype);
4905 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4906 return va_list_type_node;
4908 return NULL_TREE;
4911 /* The "standard" implementation of va_start: just assign `nextarg' to
4912 the variable. */
4914 void
4915 std_expand_builtin_va_start (tree valist, rtx nextarg)
4917 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4918 convert_move (va_r, nextarg, 0);
4921 /* Expand EXP, a call to __builtin_va_start. */
4923 static rtx
4924 expand_builtin_va_start (tree exp)
4926 rtx nextarg;
4927 tree valist;
4928 location_t loc = EXPR_LOCATION (exp);
4930 if (call_expr_nargs (exp) < 2)
4932 error_at (loc, "too few arguments to function %<va_start%>");
4933 return const0_rtx;
4936 if (fold_builtin_next_arg (exp, true))
4937 return const0_rtx;
4939 nextarg = expand_builtin_next_arg ();
4940 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4942 if (targetm.expand_builtin_va_start)
4943 targetm.expand_builtin_va_start (valist, nextarg);
4944 else
4945 std_expand_builtin_va_start (valist, nextarg);
4947 return const0_rtx;
4950 /* Expand EXP, a call to __builtin_va_end. */
4952 static rtx
4953 expand_builtin_va_end (tree exp)
4955 tree valist = CALL_EXPR_ARG (exp, 0);
4957 /* Evaluate for side effects, if needed. I hate macros that don't
4958 do that. */
4959 if (TREE_SIDE_EFFECTS (valist))
4960 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4962 return const0_rtx;
4965 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4966 builtin rather than just as an assignment in stdarg.h because of the
4967 nastiness of array-type va_list types. */
4969 static rtx
4970 expand_builtin_va_copy (tree exp)
4972 tree dst, src, t;
4973 location_t loc = EXPR_LOCATION (exp);
4975 dst = CALL_EXPR_ARG (exp, 0);
4976 src = CALL_EXPR_ARG (exp, 1);
4978 dst = stabilize_va_list_loc (loc, dst, 1);
4979 src = stabilize_va_list_loc (loc, src, 0);
4981 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4983 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4985 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4986 TREE_SIDE_EFFECTS (t) = 1;
4987 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4989 else
4991 rtx dstb, srcb, size;
4993 /* Evaluate to pointers. */
4994 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4995 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4996 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4997 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4999 dstb = convert_memory_address (Pmode, dstb);
5000 srcb = convert_memory_address (Pmode, srcb);
5002 /* "Dereference" to BLKmode memories. */
5003 dstb = gen_rtx_MEM (BLKmode, dstb);
5004 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5005 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5006 srcb = gen_rtx_MEM (BLKmode, srcb);
5007 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5008 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5010 /* Copy. */
5011 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5014 return const0_rtx;
5017 /* Expand a call to one of the builtin functions __builtin_frame_address or
5018 __builtin_return_address. */
5020 static rtx
5021 expand_builtin_frame_address (tree fndecl, tree exp)
5023 /* The argument must be a nonnegative integer constant.
5024 It counts the number of frames to scan up the stack.
5025 The value is either the frame pointer value or the return
5026 address saved in that frame. */
5027 if (call_expr_nargs (exp) == 0)
5028 /* Warning about missing arg was already issued. */
5029 return const0_rtx;
5030 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
5032 error ("invalid argument to %qD", fndecl);
5033 return const0_rtx;
5035 else
5037 /* Number of frames to scan up the stack. */
5038 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5040 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
5042 /* Some ports cannot access arbitrary stack frames. */
5043 if (tem == NULL)
5045 warning (0, "unsupported argument to %qD", fndecl);
5046 return const0_rtx;
5049 if (count)
5051 /* Warn since no effort is made to ensure that any frame
5052 beyond the current one exists or can be safely reached. */
5053 warning (OPT_Wframe_address, "calling %qD with "
5054 "a nonzero argument is unsafe", fndecl);
5057 /* For __builtin_frame_address, return what we've got. */
5058 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5059 return tem;
5061 if (!REG_P (tem)
5062 && ! CONSTANT_P (tem))
5063 tem = copy_addr_to_reg (tem);
5064 return tem;
5068 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5069 failed and the caller should emit a normal call. */
5071 static rtx
5072 expand_builtin_alloca (tree exp)
5074 rtx op0;
5075 rtx result;
5076 unsigned int align;
5077 tree fndecl = get_callee_fndecl (exp);
5078 HOST_WIDE_INT max_size;
5079 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5080 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5081 bool valid_arglist
5082 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5083 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5084 VOID_TYPE)
5085 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5086 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5087 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5089 if (!valid_arglist)
5090 return NULL_RTX;
5092 if ((alloca_for_var && !warn_vla_limit)
5093 || (!alloca_for_var && !warn_alloca_limit))
5095 /* -Walloca-larger-than and -Wvla-larger-than settings override
5096 the more general -Walloc-size-larger-than so unless either of
5097 the former options is specified check the alloca arguments for
5098 overflow. */
5099 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5100 int idx[] = { 0, -1 };
5101 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5104 /* Compute the argument. */
5105 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5107 /* Compute the alignment. */
5108 align = (fcode == BUILT_IN_ALLOCA
5109 ? BIGGEST_ALIGNMENT
5110 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5112 /* Compute the maximum size. */
5113 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5114 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5115 : -1);
5117 /* Allocate the desired space. If the allocation stems from the declaration
5118 of a variable-sized object, it cannot accumulate. */
5119 result
5120 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5121 result = convert_memory_address (ptr_mode, result);
5123 return result;
5126 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5127 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5128 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5129 handle_builtin_stack_restore function. */
5131 static rtx
5132 expand_asan_emit_allocas_unpoison (tree exp)
5134 tree arg0 = CALL_EXPR_ARG (exp, 0);
5135 tree arg1 = CALL_EXPR_ARG (exp, 1);
5136 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5137 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5138 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5139 stack_pointer_rtx, NULL_RTX, 0,
5140 OPTAB_LIB_WIDEN);
5141 off = convert_modes (ptr_mode, Pmode, off, 0);
5142 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5143 OPTAB_LIB_WIDEN);
5144 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5145 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5146 top, ptr_mode, bot, ptr_mode);
5147 return ret;
5150 /* Expand a call to bswap builtin in EXP.
5151 Return NULL_RTX if a normal call should be emitted rather than expanding the
5152 function in-line. If convenient, the result should be placed in TARGET.
5153 SUBTARGET may be used as the target for computing one of EXP's operands. */
5155 static rtx
5156 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5157 rtx subtarget)
5159 tree arg;
5160 rtx op0;
5162 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5163 return NULL_RTX;
5165 arg = CALL_EXPR_ARG (exp, 0);
5166 op0 = expand_expr (arg,
5167 subtarget && GET_MODE (subtarget) == target_mode
5168 ? subtarget : NULL_RTX,
5169 target_mode, EXPAND_NORMAL);
5170 if (GET_MODE (op0) != target_mode)
5171 op0 = convert_to_mode (target_mode, op0, 1);
5173 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5175 gcc_assert (target);
5177 return convert_to_mode (target_mode, target, 1);
5180 /* Expand a call to a unary builtin in EXP.
5181 Return NULL_RTX if a normal call should be emitted rather than expanding the
5182 function in-line. If convenient, the result should be placed in TARGET.
5183 SUBTARGET may be used as the target for computing one of EXP's operands. */
5185 static rtx
5186 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5187 rtx subtarget, optab op_optab)
5189 rtx op0;
5191 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5192 return NULL_RTX;
5194 /* Compute the argument. */
5195 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5196 (subtarget
5197 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5198 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5199 VOIDmode, EXPAND_NORMAL);
5200 /* Compute op, into TARGET if possible.
5201 Set TARGET to wherever the result comes back. */
5202 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5203 op_optab, op0, target, op_optab != clrsb_optab);
5204 gcc_assert (target);
5206 return convert_to_mode (target_mode, target, 0);
5209 /* Expand a call to __builtin_expect. We just return our argument
5210 as the builtin_expect semantic should've been already executed by
5211 tree branch prediction pass. */
5213 static rtx
5214 expand_builtin_expect (tree exp, rtx target)
5216 tree arg;
5218 if (call_expr_nargs (exp) < 2)
5219 return const0_rtx;
5220 arg = CALL_EXPR_ARG (exp, 0);
5222 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5223 /* When guessing was done, the hints should be already stripped away. */
5224 gcc_assert (!flag_guess_branch_prob
5225 || optimize == 0 || seen_error ());
5226 return target;
5229 /* Expand a call to __builtin_assume_aligned. We just return our first
5230 argument as the builtin_assume_aligned semantic should've been already
5231 executed by CCP. */
5233 static rtx
5234 expand_builtin_assume_aligned (tree exp, rtx target)
5236 if (call_expr_nargs (exp) < 2)
5237 return const0_rtx;
5238 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5239 EXPAND_NORMAL);
5240 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5241 && (call_expr_nargs (exp) < 3
5242 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5243 return target;
5246 void
5247 expand_builtin_trap (void)
5249 if (targetm.have_trap ())
5251 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5252 /* For trap insns when not accumulating outgoing args force
5253 REG_ARGS_SIZE note to prevent crossjumping of calls with
5254 different args sizes. */
5255 if (!ACCUMULATE_OUTGOING_ARGS)
5256 add_args_size_note (insn, stack_pointer_delta);
5258 else
5260 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5261 tree call_expr = build_call_expr (fn, 0);
5262 expand_call (call_expr, NULL_RTX, false);
5265 emit_barrier ();
5268 /* Expand a call to __builtin_unreachable. We do nothing except emit
5269 a barrier saying that control flow will not pass here.
5271 It is the responsibility of the program being compiled to ensure
5272 that control flow does never reach __builtin_unreachable. */
5273 static void
5274 expand_builtin_unreachable (void)
5276 emit_barrier ();
5279 /* Expand EXP, a call to fabs, fabsf or fabsl.
5280 Return NULL_RTX if a normal call should be emitted rather than expanding
5281 the function inline. If convenient, the result should be placed
5282 in TARGET. SUBTARGET may be used as the target for computing
5283 the operand. */
5285 static rtx
5286 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5288 machine_mode mode;
5289 tree arg;
5290 rtx op0;
5292 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5293 return NULL_RTX;
5295 arg = CALL_EXPR_ARG (exp, 0);
5296 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5297 mode = TYPE_MODE (TREE_TYPE (arg));
5298 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5299 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5302 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5303 Return NULL is a normal call should be emitted rather than expanding the
5304 function inline. If convenient, the result should be placed in TARGET.
5305 SUBTARGET may be used as the target for computing the operand. */
5307 static rtx
5308 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5310 rtx op0, op1;
5311 tree arg;
5313 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5314 return NULL_RTX;
5316 arg = CALL_EXPR_ARG (exp, 0);
5317 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5319 arg = CALL_EXPR_ARG (exp, 1);
5320 op1 = expand_normal (arg);
5322 return expand_copysign (op0, op1, target);
5325 /* Expand a call to __builtin___clear_cache. */
5327 static rtx
5328 expand_builtin___clear_cache (tree exp)
5330 if (!targetm.code_for_clear_cache)
5332 #ifdef CLEAR_INSN_CACHE
5333 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5334 does something. Just do the default expansion to a call to
5335 __clear_cache(). */
5336 return NULL_RTX;
5337 #else
5338 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5339 does nothing. There is no need to call it. Do nothing. */
5340 return const0_rtx;
5341 #endif /* CLEAR_INSN_CACHE */
5344 /* We have a "clear_cache" insn, and it will handle everything. */
5345 tree begin, end;
5346 rtx begin_rtx, end_rtx;
5348 /* We must not expand to a library call. If we did, any
5349 fallback library function in libgcc that might contain a call to
5350 __builtin___clear_cache() would recurse infinitely. */
5351 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5353 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5354 return const0_rtx;
5357 if (targetm.have_clear_cache ())
5359 struct expand_operand ops[2];
5361 begin = CALL_EXPR_ARG (exp, 0);
5362 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5364 end = CALL_EXPR_ARG (exp, 1);
5365 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5367 create_address_operand (&ops[0], begin_rtx);
5368 create_address_operand (&ops[1], end_rtx);
5369 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5370 return const0_rtx;
5372 return const0_rtx;
5375 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5377 static rtx
5378 round_trampoline_addr (rtx tramp)
5380 rtx temp, addend, mask;
5382 /* If we don't need too much alignment, we'll have been guaranteed
5383 proper alignment by get_trampoline_type. */
5384 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5385 return tramp;
5387 /* Round address up to desired boundary. */
5388 temp = gen_reg_rtx (Pmode);
5389 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5390 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5392 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5393 temp, 0, OPTAB_LIB_WIDEN);
5394 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5395 temp, 0, OPTAB_LIB_WIDEN);
5397 return tramp;
5400 static rtx
5401 expand_builtin_init_trampoline (tree exp, bool onstack)
5403 tree t_tramp, t_func, t_chain;
5404 rtx m_tramp, r_tramp, r_chain, tmp;
5406 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5407 POINTER_TYPE, VOID_TYPE))
5408 return NULL_RTX;
5410 t_tramp = CALL_EXPR_ARG (exp, 0);
5411 t_func = CALL_EXPR_ARG (exp, 1);
5412 t_chain = CALL_EXPR_ARG (exp, 2);
5414 r_tramp = expand_normal (t_tramp);
5415 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5416 MEM_NOTRAP_P (m_tramp) = 1;
5418 /* If ONSTACK, the TRAMP argument should be the address of a field
5419 within the local function's FRAME decl. Either way, let's see if
5420 we can fill in the MEM_ATTRs for this memory. */
5421 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5422 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5424 /* Creator of a heap trampoline is responsible for making sure the
5425 address is aligned to at least STACK_BOUNDARY. Normally malloc
5426 will ensure this anyhow. */
5427 tmp = round_trampoline_addr (r_tramp);
5428 if (tmp != r_tramp)
5430 m_tramp = change_address (m_tramp, BLKmode, tmp);
5431 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5432 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5435 /* The FUNC argument should be the address of the nested function.
5436 Extract the actual function decl to pass to the hook. */
5437 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5438 t_func = TREE_OPERAND (t_func, 0);
5439 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5441 r_chain = expand_normal (t_chain);
5443 /* Generate insns to initialize the trampoline. */
5444 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5446 if (onstack)
5448 trampolines_created = 1;
5450 if (targetm.calls.custom_function_descriptors != 0)
5451 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5452 "trampoline generated for nested function %qD", t_func);
5455 return const0_rtx;
5458 static rtx
5459 expand_builtin_adjust_trampoline (tree exp)
5461 rtx tramp;
5463 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5464 return NULL_RTX;
5466 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5467 tramp = round_trampoline_addr (tramp);
5468 if (targetm.calls.trampoline_adjust_address)
5469 tramp = targetm.calls.trampoline_adjust_address (tramp);
5471 return tramp;
5474 /* Expand a call to the builtin descriptor initialization routine.
5475 A descriptor is made up of a couple of pointers to the static
5476 chain and the code entry in this order. */
5478 static rtx
5479 expand_builtin_init_descriptor (tree exp)
5481 tree t_descr, t_func, t_chain;
5482 rtx m_descr, r_descr, r_func, r_chain;
5484 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5485 VOID_TYPE))
5486 return NULL_RTX;
5488 t_descr = CALL_EXPR_ARG (exp, 0);
5489 t_func = CALL_EXPR_ARG (exp, 1);
5490 t_chain = CALL_EXPR_ARG (exp, 2);
5492 r_descr = expand_normal (t_descr);
5493 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5494 MEM_NOTRAP_P (m_descr) = 1;
5496 r_func = expand_normal (t_func);
5497 r_chain = expand_normal (t_chain);
5499 /* Generate insns to initialize the descriptor. */
5500 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5501 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5502 POINTER_SIZE / BITS_PER_UNIT), r_func);
5504 return const0_rtx;
5507 /* Expand a call to the builtin descriptor adjustment routine. */
5509 static rtx
5510 expand_builtin_adjust_descriptor (tree exp)
5512 rtx tramp;
5514 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5515 return NULL_RTX;
5517 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5519 /* Unalign the descriptor to allow runtime identification. */
5520 tramp = plus_constant (ptr_mode, tramp,
5521 targetm.calls.custom_function_descriptors);
5523 return force_operand (tramp, NULL_RTX);
5526 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5527 function. The function first checks whether the back end provides
5528 an insn to implement signbit for the respective mode. If not, it
5529 checks whether the floating point format of the value is such that
5530 the sign bit can be extracted. If that is not the case, error out.
5531 EXP is the expression that is a call to the builtin function; if
5532 convenient, the result should be placed in TARGET. */
5533 static rtx
5534 expand_builtin_signbit (tree exp, rtx target)
5536 const struct real_format *fmt;
5537 scalar_float_mode fmode;
5538 scalar_int_mode rmode, imode;
5539 tree arg;
5540 int word, bitpos;
5541 enum insn_code icode;
5542 rtx temp;
5543 location_t loc = EXPR_LOCATION (exp);
5545 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5546 return NULL_RTX;
5548 arg = CALL_EXPR_ARG (exp, 0);
5549 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5550 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5551 fmt = REAL_MODE_FORMAT (fmode);
5553 arg = builtin_save_expr (arg);
5555 /* Expand the argument yielding a RTX expression. */
5556 temp = expand_normal (arg);
5558 /* Check if the back end provides an insn that handles signbit for the
5559 argument's mode. */
5560 icode = optab_handler (signbit_optab, fmode);
5561 if (icode != CODE_FOR_nothing)
5563 rtx_insn *last = get_last_insn ();
5564 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5565 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5566 return target;
5567 delete_insns_since (last);
5570 /* For floating point formats without a sign bit, implement signbit
5571 as "ARG < 0.0". */
5572 bitpos = fmt->signbit_ro;
5573 if (bitpos < 0)
5575 /* But we can't do this if the format supports signed zero. */
5576 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5578 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5579 build_real (TREE_TYPE (arg), dconst0));
5580 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5583 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5585 imode = int_mode_for_mode (fmode).require ();
5586 temp = gen_lowpart (imode, temp);
5588 else
5590 imode = word_mode;
5591 /* Handle targets with different FP word orders. */
5592 if (FLOAT_WORDS_BIG_ENDIAN)
5593 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5594 else
5595 word = bitpos / BITS_PER_WORD;
5596 temp = operand_subword_force (temp, word, fmode);
5597 bitpos = bitpos % BITS_PER_WORD;
5600 /* Force the intermediate word_mode (or narrower) result into a
5601 register. This avoids attempting to create paradoxical SUBREGs
5602 of floating point modes below. */
5603 temp = force_reg (imode, temp);
5605 /* If the bitpos is within the "result mode" lowpart, the operation
5606 can be implement with a single bitwise AND. Otherwise, we need
5607 a right shift and an AND. */
5609 if (bitpos < GET_MODE_BITSIZE (rmode))
5611 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5613 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5614 temp = gen_lowpart (rmode, temp);
5615 temp = expand_binop (rmode, and_optab, temp,
5616 immed_wide_int_const (mask, rmode),
5617 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5619 else
5621 /* Perform a logical right shift to place the signbit in the least
5622 significant bit, then truncate the result to the desired mode
5623 and mask just this bit. */
5624 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5625 temp = gen_lowpart (rmode, temp);
5626 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5627 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5630 return temp;
5633 /* Expand fork or exec calls. TARGET is the desired target of the
5634 call. EXP is the call. FN is the
5635 identificator of the actual function. IGNORE is nonzero if the
5636 value is to be ignored. */
5638 static rtx
5639 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5641 tree id, decl;
5642 tree call;
5644 /* If we are not profiling, just call the function. */
5645 if (!profile_arc_flag)
5646 return NULL_RTX;
5648 /* Otherwise call the wrapper. This should be equivalent for the rest of
5649 compiler, so the code does not diverge, and the wrapper may run the
5650 code necessary for keeping the profiling sane. */
5652 switch (DECL_FUNCTION_CODE (fn))
5654 case BUILT_IN_FORK:
5655 id = get_identifier ("__gcov_fork");
5656 break;
5658 case BUILT_IN_EXECL:
5659 id = get_identifier ("__gcov_execl");
5660 break;
5662 case BUILT_IN_EXECV:
5663 id = get_identifier ("__gcov_execv");
5664 break;
5666 case BUILT_IN_EXECLP:
5667 id = get_identifier ("__gcov_execlp");
5668 break;
5670 case BUILT_IN_EXECLE:
5671 id = get_identifier ("__gcov_execle");
5672 break;
5674 case BUILT_IN_EXECVP:
5675 id = get_identifier ("__gcov_execvp");
5676 break;
5678 case BUILT_IN_EXECVE:
5679 id = get_identifier ("__gcov_execve");
5680 break;
5682 default:
5683 gcc_unreachable ();
5686 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5687 FUNCTION_DECL, id, TREE_TYPE (fn));
5688 DECL_EXTERNAL (decl) = 1;
5689 TREE_PUBLIC (decl) = 1;
5690 DECL_ARTIFICIAL (decl) = 1;
5691 TREE_NOTHROW (decl) = 1;
5692 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5693 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5694 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5695 return expand_call (call, target, ignore);
5700 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5701 the pointer in these functions is void*, the tree optimizers may remove
5702 casts. The mode computed in expand_builtin isn't reliable either, due
5703 to __sync_bool_compare_and_swap.
5705 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5706 group of builtins. This gives us log2 of the mode size. */
5708 static inline machine_mode
5709 get_builtin_sync_mode (int fcode_diff)
5711 /* The size is not negotiable, so ask not to get BLKmode in return
5712 if the target indicates that a smaller size would be better. */
5713 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5716 /* Expand the memory expression LOC and return the appropriate memory operand
5717 for the builtin_sync operations. */
5719 static rtx
5720 get_builtin_sync_mem (tree loc, machine_mode mode)
5722 rtx addr, mem;
5724 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5725 addr = convert_memory_address (Pmode, addr);
5727 /* Note that we explicitly do not want any alias information for this
5728 memory, so that we kill all other live memories. Otherwise we don't
5729 satisfy the full barrier semantics of the intrinsic. */
5730 mem = validize_mem (gen_rtx_MEM (mode, addr));
5732 /* The alignment needs to be at least according to that of the mode. */
5733 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5734 get_pointer_alignment (loc)));
5735 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5736 MEM_VOLATILE_P (mem) = 1;
5738 return mem;
5741 /* Make sure an argument is in the right mode.
5742 EXP is the tree argument.
5743 MODE is the mode it should be in. */
5745 static rtx
5746 expand_expr_force_mode (tree exp, machine_mode mode)
5748 rtx val;
5749 machine_mode old_mode;
5751 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5752 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5753 of CONST_INTs, where we know the old_mode only from the call argument. */
5755 old_mode = GET_MODE (val);
5756 if (old_mode == VOIDmode)
5757 old_mode = TYPE_MODE (TREE_TYPE (exp));
5758 val = convert_modes (mode, old_mode, val, 1);
5759 return val;
5763 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5764 EXP is the CALL_EXPR. CODE is the rtx code
5765 that corresponds to the arithmetic or logical operation from the name;
5766 an exception here is that NOT actually means NAND. TARGET is an optional
5767 place for us to store the results; AFTER is true if this is the
5768 fetch_and_xxx form. */
5770 static rtx
5771 expand_builtin_sync_operation (machine_mode mode, tree exp,
5772 enum rtx_code code, bool after,
5773 rtx target)
5775 rtx val, mem;
5776 location_t loc = EXPR_LOCATION (exp);
5778 if (code == NOT && warn_sync_nand)
5780 tree fndecl = get_callee_fndecl (exp);
5781 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5783 static bool warned_f_a_n, warned_n_a_f;
5785 switch (fcode)
5787 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5788 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5789 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5790 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5791 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5792 if (warned_f_a_n)
5793 break;
5795 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5796 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5797 warned_f_a_n = true;
5798 break;
5800 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5801 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5802 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5803 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5804 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5805 if (warned_n_a_f)
5806 break;
5808 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5809 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5810 warned_n_a_f = true;
5811 break;
5813 default:
5814 gcc_unreachable ();
5818 /* Expand the operands. */
5819 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5820 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5822 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5823 after);
5826 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5827 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5828 true if this is the boolean form. TARGET is a place for us to store the
5829 results; this is NOT optional if IS_BOOL is true. */
5831 static rtx
5832 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5833 bool is_bool, rtx target)
5835 rtx old_val, new_val, mem;
5836 rtx *pbool, *poval;
5838 /* Expand the operands. */
5839 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5840 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5841 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5843 pbool = poval = NULL;
5844 if (target != const0_rtx)
5846 if (is_bool)
5847 pbool = &target;
5848 else
5849 poval = &target;
5851 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5852 false, MEMMODEL_SYNC_SEQ_CST,
5853 MEMMODEL_SYNC_SEQ_CST))
5854 return NULL_RTX;
5856 return target;
5859 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5860 general form is actually an atomic exchange, and some targets only
5861 support a reduced form with the second argument being a constant 1.
5862 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5863 the results. */
5865 static rtx
5866 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5867 rtx target)
5869 rtx val, mem;
5871 /* Expand the operands. */
5872 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5873 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5875 return expand_sync_lock_test_and_set (target, mem, val);
5878 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5880 static void
5881 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5883 rtx mem;
5885 /* Expand the operands. */
5886 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5888 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5891 /* Given an integer representing an ``enum memmodel'', verify its
5892 correctness and return the memory model enum. */
5894 static enum memmodel
5895 get_memmodel (tree exp)
5897 rtx op;
5898 unsigned HOST_WIDE_INT val;
5899 source_location loc
5900 = expansion_point_location_if_in_system_header (input_location);
5902 /* If the parameter is not a constant, it's a run time value so we'll just
5903 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5904 if (TREE_CODE (exp) != INTEGER_CST)
5905 return MEMMODEL_SEQ_CST;
5907 op = expand_normal (exp);
5909 val = INTVAL (op);
5910 if (targetm.memmodel_check)
5911 val = targetm.memmodel_check (val);
5912 else if (val & ~MEMMODEL_MASK)
5914 warning_at (loc, OPT_Winvalid_memory_model,
5915 "unknown architecture specifier in memory model to builtin");
5916 return MEMMODEL_SEQ_CST;
5919 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5920 if (memmodel_base (val) >= MEMMODEL_LAST)
5922 warning_at (loc, OPT_Winvalid_memory_model,
5923 "invalid memory model argument to builtin");
5924 return MEMMODEL_SEQ_CST;
5927 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5928 be conservative and promote consume to acquire. */
5929 if (val == MEMMODEL_CONSUME)
5930 val = MEMMODEL_ACQUIRE;
5932 return (enum memmodel) val;
5935 /* Expand the __atomic_exchange intrinsic:
5936 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5937 EXP is the CALL_EXPR.
5938 TARGET is an optional place for us to store the results. */
5940 static rtx
5941 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5943 rtx val, mem;
5944 enum memmodel model;
5946 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5948 if (!flag_inline_atomics)
5949 return NULL_RTX;
5951 /* Expand the operands. */
5952 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5953 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5955 return expand_atomic_exchange (target, mem, val, model);
5958 /* Expand the __atomic_compare_exchange intrinsic:
5959 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5960 TYPE desired, BOOL weak,
5961 enum memmodel success,
5962 enum memmodel failure)
5963 EXP is the CALL_EXPR.
5964 TARGET is an optional place for us to store the results. */
5966 static rtx
5967 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5968 rtx target)
5970 rtx expect, desired, mem, oldval;
5971 rtx_code_label *label;
5972 enum memmodel success, failure;
5973 tree weak;
5974 bool is_weak;
5975 source_location loc
5976 = expansion_point_location_if_in_system_header (input_location);
5978 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5979 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5981 if (failure > success)
5983 warning_at (loc, OPT_Winvalid_memory_model,
5984 "failure memory model cannot be stronger than success "
5985 "memory model for %<__atomic_compare_exchange%>");
5986 success = MEMMODEL_SEQ_CST;
5989 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5991 warning_at (loc, OPT_Winvalid_memory_model,
5992 "invalid failure memory model for "
5993 "%<__atomic_compare_exchange%>");
5994 failure = MEMMODEL_SEQ_CST;
5995 success = MEMMODEL_SEQ_CST;
5999 if (!flag_inline_atomics)
6000 return NULL_RTX;
6002 /* Expand the operands. */
6003 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6005 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6006 expect = convert_memory_address (Pmode, expect);
6007 expect = gen_rtx_MEM (mode, expect);
6008 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6010 weak = CALL_EXPR_ARG (exp, 3);
6011 is_weak = false;
6012 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
6013 is_weak = true;
6015 if (target == const0_rtx)
6016 target = NULL;
6018 /* Lest the rtl backend create a race condition with an imporoper store
6019 to memory, always create a new pseudo for OLDVAL. */
6020 oldval = NULL;
6022 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6023 is_weak, success, failure))
6024 return NULL_RTX;
6026 /* Conditionally store back to EXPECT, lest we create a race condition
6027 with an improper store to memory. */
6028 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6029 the normal case where EXPECT is totally private, i.e. a register. At
6030 which point the store can be unconditional. */
6031 label = gen_label_rtx ();
6032 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6033 GET_MODE (target), 1, label);
6034 emit_move_insn (expect, oldval);
6035 emit_label (label);
6037 return target;
6040 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6041 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6042 call. The weak parameter must be dropped to match the expected parameter
6043 list and the expected argument changed from value to pointer to memory
6044 slot. */
6046 static void
6047 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6049 unsigned int z;
6050 vec<tree, va_gc> *vec;
6052 vec_alloc (vec, 5);
6053 vec->quick_push (gimple_call_arg (call, 0));
6054 tree expected = gimple_call_arg (call, 1);
6055 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6056 TREE_TYPE (expected));
6057 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6058 if (expd != x)
6059 emit_move_insn (x, expd);
6060 tree v = make_tree (TREE_TYPE (expected), x);
6061 vec->quick_push (build1 (ADDR_EXPR,
6062 build_pointer_type (TREE_TYPE (expected)), v));
6063 vec->quick_push (gimple_call_arg (call, 2));
6064 /* Skip the boolean weak parameter. */
6065 for (z = 4; z < 6; z++)
6066 vec->quick_push (gimple_call_arg (call, z));
6067 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6068 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6069 gcc_assert (bytes_log2 < 5);
6070 built_in_function fncode
6071 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6072 + bytes_log2);
6073 tree fndecl = builtin_decl_explicit (fncode);
6074 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6075 fndecl);
6076 tree exp = build_call_vec (boolean_type_node, fn, vec);
6077 tree lhs = gimple_call_lhs (call);
6078 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6079 if (lhs)
6081 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6082 if (GET_MODE (boolret) != mode)
6083 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6084 x = force_reg (mode, x);
6085 write_complex_part (target, boolret, true);
6086 write_complex_part (target, x, false);
6090 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6092 void
6093 expand_ifn_atomic_compare_exchange (gcall *call)
6095 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6096 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6097 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6098 rtx expect, desired, mem, oldval, boolret;
6099 enum memmodel success, failure;
6100 tree lhs;
6101 bool is_weak;
6102 source_location loc
6103 = expansion_point_location_if_in_system_header (gimple_location (call));
6105 success = get_memmodel (gimple_call_arg (call, 4));
6106 failure = get_memmodel (gimple_call_arg (call, 5));
6108 if (failure > success)
6110 warning_at (loc, OPT_Winvalid_memory_model,
6111 "failure memory model cannot be stronger than success "
6112 "memory model for %<__atomic_compare_exchange%>");
6113 success = MEMMODEL_SEQ_CST;
6116 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6118 warning_at (loc, OPT_Winvalid_memory_model,
6119 "invalid failure memory model for "
6120 "%<__atomic_compare_exchange%>");
6121 failure = MEMMODEL_SEQ_CST;
6122 success = MEMMODEL_SEQ_CST;
6125 if (!flag_inline_atomics)
6127 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6128 return;
6131 /* Expand the operands. */
6132 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6134 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6135 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6137 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6139 boolret = NULL;
6140 oldval = NULL;
6142 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6143 is_weak, success, failure))
6145 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6146 return;
6149 lhs = gimple_call_lhs (call);
6150 if (lhs)
6152 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6153 if (GET_MODE (boolret) != mode)
6154 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6155 write_complex_part (target, boolret, true);
6156 write_complex_part (target, oldval, false);
6160 /* Expand the __atomic_load intrinsic:
6161 TYPE __atomic_load (TYPE *object, enum memmodel)
6162 EXP is the CALL_EXPR.
6163 TARGET is an optional place for us to store the results. */
6165 static rtx
6166 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6168 rtx mem;
6169 enum memmodel model;
6171 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6172 if (is_mm_release (model) || is_mm_acq_rel (model))
6174 source_location loc
6175 = expansion_point_location_if_in_system_header (input_location);
6176 warning_at (loc, OPT_Winvalid_memory_model,
6177 "invalid memory model for %<__atomic_load%>");
6178 model = MEMMODEL_SEQ_CST;
6181 if (!flag_inline_atomics)
6182 return NULL_RTX;
6184 /* Expand the operand. */
6185 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6187 return expand_atomic_load (target, mem, model);
6191 /* Expand the __atomic_store intrinsic:
6192 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6193 EXP is the CALL_EXPR.
6194 TARGET is an optional place for us to store the results. */
6196 static rtx
6197 expand_builtin_atomic_store (machine_mode mode, tree exp)
6199 rtx mem, val;
6200 enum memmodel model;
6202 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6203 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6204 || is_mm_release (model)))
6206 source_location loc
6207 = expansion_point_location_if_in_system_header (input_location);
6208 warning_at (loc, OPT_Winvalid_memory_model,
6209 "invalid memory model for %<__atomic_store%>");
6210 model = MEMMODEL_SEQ_CST;
6213 if (!flag_inline_atomics)
6214 return NULL_RTX;
6216 /* Expand the operands. */
6217 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6218 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6220 return expand_atomic_store (mem, val, model, false);
6223 /* Expand the __atomic_fetch_XXX intrinsic:
6224 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6225 EXP is the CALL_EXPR.
6226 TARGET is an optional place for us to store the results.
6227 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6228 FETCH_AFTER is true if returning the result of the operation.
6229 FETCH_AFTER is false if returning the value before the operation.
6230 IGNORE is true if the result is not used.
6231 EXT_CALL is the correct builtin for an external call if this cannot be
6232 resolved to an instruction sequence. */
6234 static rtx
6235 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6236 enum rtx_code code, bool fetch_after,
6237 bool ignore, enum built_in_function ext_call)
6239 rtx val, mem, ret;
6240 enum memmodel model;
6241 tree fndecl;
6242 tree addr;
6244 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6246 /* Expand the operands. */
6247 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6248 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6250 /* Only try generating instructions if inlining is turned on. */
6251 if (flag_inline_atomics)
6253 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6254 if (ret)
6255 return ret;
6258 /* Return if a different routine isn't needed for the library call. */
6259 if (ext_call == BUILT_IN_NONE)
6260 return NULL_RTX;
6262 /* Change the call to the specified function. */
6263 fndecl = get_callee_fndecl (exp);
6264 addr = CALL_EXPR_FN (exp);
6265 STRIP_NOPS (addr);
6267 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6268 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6270 /* If we will emit code after the call, the call can not be a tail call.
6271 If it is emitted as a tail call, a barrier is emitted after it, and
6272 then all trailing code is removed. */
6273 if (!ignore)
6274 CALL_EXPR_TAILCALL (exp) = 0;
6276 /* Expand the call here so we can emit trailing code. */
6277 ret = expand_call (exp, target, ignore);
6279 /* Replace the original function just in case it matters. */
6280 TREE_OPERAND (addr, 0) = fndecl;
6282 /* Then issue the arithmetic correction to return the right result. */
6283 if (!ignore)
6285 if (code == NOT)
6287 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6288 OPTAB_LIB_WIDEN);
6289 ret = expand_simple_unop (mode, NOT, ret, target, true);
6291 else
6292 ret = expand_simple_binop (mode, code, ret, val, target, true,
6293 OPTAB_LIB_WIDEN);
6295 return ret;
6298 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6300 void
6301 expand_ifn_atomic_bit_test_and (gcall *call)
6303 tree ptr = gimple_call_arg (call, 0);
6304 tree bit = gimple_call_arg (call, 1);
6305 tree flag = gimple_call_arg (call, 2);
6306 tree lhs = gimple_call_lhs (call);
6307 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6308 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6309 enum rtx_code code;
6310 optab optab;
6311 struct expand_operand ops[5];
6313 gcc_assert (flag_inline_atomics);
6315 if (gimple_call_num_args (call) == 4)
6316 model = get_memmodel (gimple_call_arg (call, 3));
6318 rtx mem = get_builtin_sync_mem (ptr, mode);
6319 rtx val = expand_expr_force_mode (bit, mode);
6321 switch (gimple_call_internal_fn (call))
6323 case IFN_ATOMIC_BIT_TEST_AND_SET:
6324 code = IOR;
6325 optab = atomic_bit_test_and_set_optab;
6326 break;
6327 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6328 code = XOR;
6329 optab = atomic_bit_test_and_complement_optab;
6330 break;
6331 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6332 code = AND;
6333 optab = atomic_bit_test_and_reset_optab;
6334 break;
6335 default:
6336 gcc_unreachable ();
6339 if (lhs == NULL_TREE)
6341 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6342 val, NULL_RTX, true, OPTAB_DIRECT);
6343 if (code == AND)
6344 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6345 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6346 return;
6349 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6350 enum insn_code icode = direct_optab_handler (optab, mode);
6351 gcc_assert (icode != CODE_FOR_nothing);
6352 create_output_operand (&ops[0], target, mode);
6353 create_fixed_operand (&ops[1], mem);
6354 create_convert_operand_to (&ops[2], val, mode, true);
6355 create_integer_operand (&ops[3], model);
6356 create_integer_operand (&ops[4], integer_onep (flag));
6357 if (maybe_expand_insn (icode, 5, ops))
6358 return;
6360 rtx bitval = val;
6361 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6362 val, NULL_RTX, true, OPTAB_DIRECT);
6363 rtx maskval = val;
6364 if (code == AND)
6365 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6366 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6367 code, model, false);
6368 if (integer_onep (flag))
6370 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6371 NULL_RTX, true, OPTAB_DIRECT);
6372 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6373 true, OPTAB_DIRECT);
6375 else
6376 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6377 OPTAB_DIRECT);
6378 if (result != target)
6379 emit_move_insn (target, result);
6382 /* Expand an atomic clear operation.
6383 void _atomic_clear (BOOL *obj, enum memmodel)
6384 EXP is the call expression. */
6386 static rtx
6387 expand_builtin_atomic_clear (tree exp)
6389 machine_mode mode;
6390 rtx mem, ret;
6391 enum memmodel model;
6393 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6394 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6395 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6397 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6399 source_location loc
6400 = expansion_point_location_if_in_system_header (input_location);
6401 warning_at (loc, OPT_Winvalid_memory_model,
6402 "invalid memory model for %<__atomic_store%>");
6403 model = MEMMODEL_SEQ_CST;
6406 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6407 Failing that, a store is issued by __atomic_store. The only way this can
6408 fail is if the bool type is larger than a word size. Unlikely, but
6409 handle it anyway for completeness. Assume a single threaded model since
6410 there is no atomic support in this case, and no barriers are required. */
6411 ret = expand_atomic_store (mem, const0_rtx, model, true);
6412 if (!ret)
6413 emit_move_insn (mem, const0_rtx);
6414 return const0_rtx;
6417 /* Expand an atomic test_and_set operation.
6418 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6419 EXP is the call expression. */
6421 static rtx
6422 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6424 rtx mem;
6425 enum memmodel model;
6426 machine_mode mode;
6428 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6429 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6430 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6432 return expand_atomic_test_and_set (target, mem, model);
6436 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6437 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6439 static tree
6440 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6442 int size;
6443 machine_mode mode;
6444 unsigned int mode_align, type_align;
6446 if (TREE_CODE (arg0) != INTEGER_CST)
6447 return NULL_TREE;
6449 /* We need a corresponding integer mode for the access to be lock-free. */
6450 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6451 if (!int_mode_for_size (size, 0).exists (&mode))
6452 return boolean_false_node;
6454 mode_align = GET_MODE_ALIGNMENT (mode);
6456 if (TREE_CODE (arg1) == INTEGER_CST)
6458 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6460 /* Either this argument is null, or it's a fake pointer encoding
6461 the alignment of the object. */
6462 val = least_bit_hwi (val);
6463 val *= BITS_PER_UNIT;
6465 if (val == 0 || mode_align < val)
6466 type_align = mode_align;
6467 else
6468 type_align = val;
6470 else
6472 tree ttype = TREE_TYPE (arg1);
6474 /* This function is usually invoked and folded immediately by the front
6475 end before anything else has a chance to look at it. The pointer
6476 parameter at this point is usually cast to a void *, so check for that
6477 and look past the cast. */
6478 if (CONVERT_EXPR_P (arg1)
6479 && POINTER_TYPE_P (ttype)
6480 && VOID_TYPE_P (TREE_TYPE (ttype))
6481 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6482 arg1 = TREE_OPERAND (arg1, 0);
6484 ttype = TREE_TYPE (arg1);
6485 gcc_assert (POINTER_TYPE_P (ttype));
6487 /* Get the underlying type of the object. */
6488 ttype = TREE_TYPE (ttype);
6489 type_align = TYPE_ALIGN (ttype);
6492 /* If the object has smaller alignment, the lock free routines cannot
6493 be used. */
6494 if (type_align < mode_align)
6495 return boolean_false_node;
6497 /* Check if a compare_and_swap pattern exists for the mode which represents
6498 the required size. The pattern is not allowed to fail, so the existence
6499 of the pattern indicates support is present. Also require that an
6500 atomic load exists for the required size. */
6501 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6502 return boolean_true_node;
6503 else
6504 return boolean_false_node;
6507 /* Return true if the parameters to call EXP represent an object which will
6508 always generate lock free instructions. The first argument represents the
6509 size of the object, and the second parameter is a pointer to the object
6510 itself. If NULL is passed for the object, then the result is based on
6511 typical alignment for an object of the specified size. Otherwise return
6512 false. */
6514 static rtx
6515 expand_builtin_atomic_always_lock_free (tree exp)
6517 tree size;
6518 tree arg0 = CALL_EXPR_ARG (exp, 0);
6519 tree arg1 = CALL_EXPR_ARG (exp, 1);
6521 if (TREE_CODE (arg0) != INTEGER_CST)
6523 error ("non-constant argument 1 to __atomic_always_lock_free");
6524 return const0_rtx;
6527 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6528 if (size == boolean_true_node)
6529 return const1_rtx;
6530 return const0_rtx;
6533 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6534 is lock free on this architecture. */
6536 static tree
6537 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6539 if (!flag_inline_atomics)
6540 return NULL_TREE;
6542 /* If it isn't always lock free, don't generate a result. */
6543 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6544 return boolean_true_node;
6546 return NULL_TREE;
6549 /* Return true if the parameters to call EXP represent an object which will
6550 always generate lock free instructions. The first argument represents the
6551 size of the object, and the second parameter is a pointer to the object
6552 itself. If NULL is passed for the object, then the result is based on
6553 typical alignment for an object of the specified size. Otherwise return
6554 NULL*/
6556 static rtx
6557 expand_builtin_atomic_is_lock_free (tree exp)
6559 tree size;
6560 tree arg0 = CALL_EXPR_ARG (exp, 0);
6561 tree arg1 = CALL_EXPR_ARG (exp, 1);
6563 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6565 error ("non-integer argument 1 to __atomic_is_lock_free");
6566 return NULL_RTX;
6569 if (!flag_inline_atomics)
6570 return NULL_RTX;
6572 /* If the value is known at compile time, return the RTX for it. */
6573 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6574 if (size == boolean_true_node)
6575 return const1_rtx;
6577 return NULL_RTX;
6580 /* Expand the __atomic_thread_fence intrinsic:
6581 void __atomic_thread_fence (enum memmodel)
6582 EXP is the CALL_EXPR. */
6584 static void
6585 expand_builtin_atomic_thread_fence (tree exp)
6587 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6588 expand_mem_thread_fence (model);
6591 /* Expand the __atomic_signal_fence intrinsic:
6592 void __atomic_signal_fence (enum memmodel)
6593 EXP is the CALL_EXPR. */
6595 static void
6596 expand_builtin_atomic_signal_fence (tree exp)
6598 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6599 expand_mem_signal_fence (model);
6602 /* Expand the __sync_synchronize intrinsic. */
6604 static void
6605 expand_builtin_sync_synchronize (void)
6607 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6610 static rtx
6611 expand_builtin_thread_pointer (tree exp, rtx target)
6613 enum insn_code icode;
6614 if (!validate_arglist (exp, VOID_TYPE))
6615 return const0_rtx;
6616 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6617 if (icode != CODE_FOR_nothing)
6619 struct expand_operand op;
6620 /* If the target is not sutitable then create a new target. */
6621 if (target == NULL_RTX
6622 || !REG_P (target)
6623 || GET_MODE (target) != Pmode)
6624 target = gen_reg_rtx (Pmode);
6625 create_output_operand (&op, target, Pmode);
6626 expand_insn (icode, 1, &op);
6627 return target;
6629 error ("__builtin_thread_pointer is not supported on this target");
6630 return const0_rtx;
6633 static void
6634 expand_builtin_set_thread_pointer (tree exp)
6636 enum insn_code icode;
6637 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6638 return;
6639 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6640 if (icode != CODE_FOR_nothing)
6642 struct expand_operand op;
6643 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6644 Pmode, EXPAND_NORMAL);
6645 create_input_operand (&op, val, Pmode);
6646 expand_insn (icode, 1, &op);
6647 return;
6649 error ("__builtin_set_thread_pointer is not supported on this target");
6653 /* Emit code to restore the current value of stack. */
6655 static void
6656 expand_stack_restore (tree var)
6658 rtx_insn *prev;
6659 rtx sa = expand_normal (var);
6661 sa = convert_memory_address (Pmode, sa);
6663 prev = get_last_insn ();
6664 emit_stack_restore (SAVE_BLOCK, sa);
6666 record_new_stack_level ();
6668 fixup_args_size_notes (prev, get_last_insn (), 0);
6671 /* Emit code to save the current value of stack. */
6673 static rtx
6674 expand_stack_save (void)
6676 rtx ret = NULL_RTX;
6678 emit_stack_save (SAVE_BLOCK, &ret);
6679 return ret;
6682 /* Emit code to get the openacc gang, worker or vector id or size. */
6684 static rtx
6685 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
6687 const char *name;
6688 rtx fallback_retval;
6689 rtx_insn *(*gen_fn) (rtx, rtx);
6690 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
6692 case BUILT_IN_GOACC_PARLEVEL_ID:
6693 name = "__builtin_goacc_parlevel_id";
6694 fallback_retval = const0_rtx;
6695 gen_fn = targetm.gen_oacc_dim_pos;
6696 break;
6697 case BUILT_IN_GOACC_PARLEVEL_SIZE:
6698 name = "__builtin_goacc_parlevel_size";
6699 fallback_retval = const1_rtx;
6700 gen_fn = targetm.gen_oacc_dim_size;
6701 break;
6702 default:
6703 gcc_unreachable ();
6706 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
6708 error ("%qs only supported in OpenACC code", name);
6709 return const0_rtx;
6712 tree arg = CALL_EXPR_ARG (exp, 0);
6713 if (TREE_CODE (arg) != INTEGER_CST)
6715 error ("non-constant argument 0 to %qs", name);
6716 return const0_rtx;
6719 int dim = TREE_INT_CST_LOW (arg);
6720 switch (dim)
6722 case GOMP_DIM_GANG:
6723 case GOMP_DIM_WORKER:
6724 case GOMP_DIM_VECTOR:
6725 break;
6726 default:
6727 error ("illegal argument 0 to %qs", name);
6728 return const0_rtx;
6731 if (ignore)
6732 return target;
6734 if (target == NULL_RTX)
6735 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6737 if (!targetm.have_oacc_dim_size ())
6739 emit_move_insn (target, fallback_retval);
6740 return target;
6743 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
6744 emit_insn (gen_fn (reg, GEN_INT (dim)));
6745 if (reg != target)
6746 emit_move_insn (target, reg);
6748 return target;
6751 /* Expand a string compare operation using a sequence of char comparison
6752 to get rid of the calling overhead, with result going to TARGET if
6753 that's convenient.
6755 VAR_STR is the variable string source;
6756 CONST_STR is the constant string source;
6757 LENGTH is the number of chars to compare;
6758 CONST_STR_N indicates which source string is the constant string;
6759 IS_MEMCMP indicates whether it's a memcmp or strcmp.
6761 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
6763 target = var_str[0] - const_str[0];
6764 if (target != 0)
6765 goto ne_label;
6767 target = var_str[length - 2] - const_str[length - 2];
6768 if (target != 0)
6769 goto ne_label;
6770 target = var_str[length - 1] - const_str[length - 1];
6771 ne_label:
6774 static rtx
6775 inline_string_cmp (rtx target, tree var_str, const char* const_str,
6776 unsigned HOST_WIDE_INT length,
6777 int const_str_n, machine_mode mode,
6778 bool is_memcmp)
6780 HOST_WIDE_INT offset = 0;
6781 rtx var_rtx_array
6782 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
6783 rtx var_rtx = NULL_RTX;
6784 rtx const_rtx = NULL_RTX;
6785 rtx result = target ? target : gen_reg_rtx (mode);
6786 rtx_code_label *ne_label = gen_label_rtx ();
6787 tree unit_type_node = is_memcmp ? unsigned_char_type_node : char_type_node;
6789 start_sequence ();
6791 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
6793 var_rtx
6794 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
6795 const_rtx
6796 = builtin_memcpy_read_str (CONST_CAST (char *, const_str),
6797 offset,
6798 as_a <scalar_int_mode>
6799 TYPE_MODE (unit_type_node));
6800 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
6801 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
6803 result = expand_simple_binop (mode, MINUS, op0, op1,
6804 result, is_memcmp ? 1 : 0, OPTAB_WIDEN);
6805 if (i < length - 1)
6806 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
6807 mode, true, ne_label);
6808 offset
6809 += GET_MODE_SIZE (as_a <scalar_int_mode> TYPE_MODE (unit_type_node));
6812 emit_label (ne_label);
6813 rtx_insn *insns = get_insns ();
6814 end_sequence ();
6815 emit_insn (insns);
6817 return result;
6820 /* Inline expansion a call to str(n)cmp, with result going to
6821 TARGET if that's convenient.
6822 If the call is not been inlined, return NULL_RTX. */
6823 static rtx
6824 inline_expand_builtin_string_cmp (tree exp, rtx target, bool is_memcmp)
6826 tree fndecl = get_callee_fndecl (exp);
6827 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6828 unsigned HOST_WIDE_INT length = 0;
6829 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
6831 gcc_checking_assert (fcode == BUILT_IN_STRCMP
6832 || fcode == BUILT_IN_STRNCMP
6833 || fcode == BUILT_IN_MEMCMP);
6835 tree arg1 = CALL_EXPR_ARG (exp, 0);
6836 tree arg2 = CALL_EXPR_ARG (exp, 1);
6837 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
6839 unsigned HOST_WIDE_INT len1 = 0;
6840 unsigned HOST_WIDE_INT len2 = 0;
6841 unsigned HOST_WIDE_INT len3 = 0;
6843 const char *src_str1 = c_getstr (arg1, &len1);
6844 const char *src_str2 = c_getstr (arg2, &len2);
6846 /* If neither strings is constant string, the call is not qualify. */
6847 if (!src_str1 && !src_str2)
6848 return NULL_RTX;
6850 /* For strncmp, if the length is not a const, not qualify. */
6851 if (is_ncmp && !tree_fits_uhwi_p (len3_tree))
6852 return NULL_RTX;
6854 int const_str_n = 0;
6855 if (!len1)
6856 const_str_n = 2;
6857 else if (!len2)
6858 const_str_n = 1;
6859 else if (len2 > len1)
6860 const_str_n = 1;
6861 else
6862 const_str_n = 2;
6864 gcc_checking_assert (const_str_n > 0);
6865 length = (const_str_n == 1) ? len1 : len2;
6867 if (is_ncmp && (len3 = tree_to_uhwi (len3_tree)) < length)
6868 length = len3;
6870 /* If the length of the comparision is larger than the threshold,
6871 do nothing. */
6872 if (length > (unsigned HOST_WIDE_INT)
6873 PARAM_VALUE (BUILTIN_STRING_CMP_INLINE_LENGTH))
6874 return NULL_RTX;
6876 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6878 /* Now, start inline expansion the call. */
6879 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
6880 (const_str_n == 1) ? src_str1 : src_str2, length,
6881 const_str_n, mode, is_memcmp);
6884 /* Expand an expression EXP that calls a built-in function,
6885 with result going to TARGET if that's convenient
6886 (and in mode MODE if that's convenient).
6887 SUBTARGET may be used as the target for computing one of EXP's operands.
6888 IGNORE is nonzero if the value is to be ignored. */
6891 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
6892 int ignore)
6894 tree fndecl = get_callee_fndecl (exp);
6895 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6896 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6897 int flags;
6899 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6900 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6902 /* When ASan is enabled, we don't want to expand some memory/string
6903 builtins and rely on libsanitizer's hooks. This allows us to avoid
6904 redundant checks and be sure, that possible overflow will be detected
6905 by ASan. */
6907 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
6908 return expand_call (exp, target, ignore);
6910 /* When not optimizing, generate calls to library functions for a certain
6911 set of builtins. */
6912 if (!optimize
6913 && !called_as_built_in (fndecl)
6914 && fcode != BUILT_IN_FORK
6915 && fcode != BUILT_IN_EXECL
6916 && fcode != BUILT_IN_EXECV
6917 && fcode != BUILT_IN_EXECLP
6918 && fcode != BUILT_IN_EXECLE
6919 && fcode != BUILT_IN_EXECVP
6920 && fcode != BUILT_IN_EXECVE
6921 && !ALLOCA_FUNCTION_CODE_P (fcode)
6922 && fcode != BUILT_IN_FREE)
6923 return expand_call (exp, target, ignore);
6925 /* The built-in function expanders test for target == const0_rtx
6926 to determine whether the function's result will be ignored. */
6927 if (ignore)
6928 target = const0_rtx;
6930 /* If the result of a pure or const built-in function is ignored, and
6931 none of its arguments are volatile, we can avoid expanding the
6932 built-in call and just evaluate the arguments for side-effects. */
6933 if (target == const0_rtx
6934 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6935 && !(flags & ECF_LOOPING_CONST_OR_PURE))
6937 bool volatilep = false;
6938 tree arg;
6939 call_expr_arg_iterator iter;
6941 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6942 if (TREE_THIS_VOLATILE (arg))
6944 volatilep = true;
6945 break;
6948 if (! volatilep)
6950 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6951 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6952 return const0_rtx;
6956 switch (fcode)
6958 CASE_FLT_FN (BUILT_IN_FABS):
6959 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
6960 case BUILT_IN_FABSD32:
6961 case BUILT_IN_FABSD64:
6962 case BUILT_IN_FABSD128:
6963 target = expand_builtin_fabs (exp, target, subtarget);
6964 if (target)
6965 return target;
6966 break;
6968 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6969 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
6970 target = expand_builtin_copysign (exp, target, subtarget);
6971 if (target)
6972 return target;
6973 break;
6975 /* Just do a normal library call if we were unable to fold
6976 the values. */
6977 CASE_FLT_FN (BUILT_IN_CABS):
6978 break;
6980 CASE_FLT_FN (BUILT_IN_FMA):
6981 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
6982 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6983 if (target)
6984 return target;
6985 break;
6987 CASE_FLT_FN (BUILT_IN_ILOGB):
6988 if (! flag_unsafe_math_optimizations)
6989 break;
6990 gcc_fallthrough ();
6991 CASE_FLT_FN (BUILT_IN_ISINF):
6992 CASE_FLT_FN (BUILT_IN_FINITE):
6993 case BUILT_IN_ISFINITE:
6994 case BUILT_IN_ISNORMAL:
6995 target = expand_builtin_interclass_mathfn (exp, target);
6996 if (target)
6997 return target;
6998 break;
7000 CASE_FLT_FN (BUILT_IN_ICEIL):
7001 CASE_FLT_FN (BUILT_IN_LCEIL):
7002 CASE_FLT_FN (BUILT_IN_LLCEIL):
7003 CASE_FLT_FN (BUILT_IN_LFLOOR):
7004 CASE_FLT_FN (BUILT_IN_IFLOOR):
7005 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7006 target = expand_builtin_int_roundingfn (exp, target);
7007 if (target)
7008 return target;
7009 break;
7011 CASE_FLT_FN (BUILT_IN_IRINT):
7012 CASE_FLT_FN (BUILT_IN_LRINT):
7013 CASE_FLT_FN (BUILT_IN_LLRINT):
7014 CASE_FLT_FN (BUILT_IN_IROUND):
7015 CASE_FLT_FN (BUILT_IN_LROUND):
7016 CASE_FLT_FN (BUILT_IN_LLROUND):
7017 target = expand_builtin_int_roundingfn_2 (exp, target);
7018 if (target)
7019 return target;
7020 break;
7022 CASE_FLT_FN (BUILT_IN_POWI):
7023 target = expand_builtin_powi (exp, target);
7024 if (target)
7025 return target;
7026 break;
7028 CASE_FLT_FN (BUILT_IN_CEXPI):
7029 target = expand_builtin_cexpi (exp, target);
7030 gcc_assert (target);
7031 return target;
7033 CASE_FLT_FN (BUILT_IN_SIN):
7034 CASE_FLT_FN (BUILT_IN_COS):
7035 if (! flag_unsafe_math_optimizations)
7036 break;
7037 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7038 if (target)
7039 return target;
7040 break;
7042 CASE_FLT_FN (BUILT_IN_SINCOS):
7043 if (! flag_unsafe_math_optimizations)
7044 break;
7045 target = expand_builtin_sincos (exp);
7046 if (target)
7047 return target;
7048 break;
7050 case BUILT_IN_APPLY_ARGS:
7051 return expand_builtin_apply_args ();
7053 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7054 FUNCTION with a copy of the parameters described by
7055 ARGUMENTS, and ARGSIZE. It returns a block of memory
7056 allocated on the stack into which is stored all the registers
7057 that might possibly be used for returning the result of a
7058 function. ARGUMENTS is the value returned by
7059 __builtin_apply_args. ARGSIZE is the number of bytes of
7060 arguments that must be copied. ??? How should this value be
7061 computed? We'll also need a safe worst case value for varargs
7062 functions. */
7063 case BUILT_IN_APPLY:
7064 if (!validate_arglist (exp, POINTER_TYPE,
7065 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7066 && !validate_arglist (exp, REFERENCE_TYPE,
7067 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7068 return const0_rtx;
7069 else
7071 rtx ops[3];
7073 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7074 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7075 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
7077 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7080 /* __builtin_return (RESULT) causes the function to return the
7081 value described by RESULT. RESULT is address of the block of
7082 memory returned by __builtin_apply. */
7083 case BUILT_IN_RETURN:
7084 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7085 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
7086 return const0_rtx;
7088 case BUILT_IN_SAVEREGS:
7089 return expand_builtin_saveregs ();
7091 case BUILT_IN_VA_ARG_PACK:
7092 /* All valid uses of __builtin_va_arg_pack () are removed during
7093 inlining. */
7094 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
7095 return const0_rtx;
7097 case BUILT_IN_VA_ARG_PACK_LEN:
7098 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7099 inlining. */
7100 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
7101 return const0_rtx;
7103 /* Return the address of the first anonymous stack arg. */
7104 case BUILT_IN_NEXT_ARG:
7105 if (fold_builtin_next_arg (exp, false))
7106 return const0_rtx;
7107 return expand_builtin_next_arg ();
7109 case BUILT_IN_CLEAR_CACHE:
7110 target = expand_builtin___clear_cache (exp);
7111 if (target)
7112 return target;
7113 break;
7115 case BUILT_IN_CLASSIFY_TYPE:
7116 return expand_builtin_classify_type (exp);
7118 case BUILT_IN_CONSTANT_P:
7119 return const0_rtx;
7121 case BUILT_IN_FRAME_ADDRESS:
7122 case BUILT_IN_RETURN_ADDRESS:
7123 return expand_builtin_frame_address (fndecl, exp);
7125 /* Returns the address of the area where the structure is returned.
7126 0 otherwise. */
7127 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
7128 if (call_expr_nargs (exp) != 0
7129 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
7130 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
7131 return const0_rtx;
7132 else
7133 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7135 CASE_BUILT_IN_ALLOCA:
7136 target = expand_builtin_alloca (exp);
7137 if (target)
7138 return target;
7139 break;
7141 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7142 return expand_asan_emit_allocas_unpoison (exp);
7144 case BUILT_IN_STACK_SAVE:
7145 return expand_stack_save ();
7147 case BUILT_IN_STACK_RESTORE:
7148 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
7149 return const0_rtx;
7151 case BUILT_IN_BSWAP16:
7152 case BUILT_IN_BSWAP32:
7153 case BUILT_IN_BSWAP64:
7154 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
7155 if (target)
7156 return target;
7157 break;
7159 CASE_INT_FN (BUILT_IN_FFS):
7160 target = expand_builtin_unop (target_mode, exp, target,
7161 subtarget, ffs_optab);
7162 if (target)
7163 return target;
7164 break;
7166 CASE_INT_FN (BUILT_IN_CLZ):
7167 target = expand_builtin_unop (target_mode, exp, target,
7168 subtarget, clz_optab);
7169 if (target)
7170 return target;
7171 break;
7173 CASE_INT_FN (BUILT_IN_CTZ):
7174 target = expand_builtin_unop (target_mode, exp, target,
7175 subtarget, ctz_optab);
7176 if (target)
7177 return target;
7178 break;
7180 CASE_INT_FN (BUILT_IN_CLRSB):
7181 target = expand_builtin_unop (target_mode, exp, target,
7182 subtarget, clrsb_optab);
7183 if (target)
7184 return target;
7185 break;
7187 CASE_INT_FN (BUILT_IN_POPCOUNT):
7188 target = expand_builtin_unop (target_mode, exp, target,
7189 subtarget, popcount_optab);
7190 if (target)
7191 return target;
7192 break;
7194 CASE_INT_FN (BUILT_IN_PARITY):
7195 target = expand_builtin_unop (target_mode, exp, target,
7196 subtarget, parity_optab);
7197 if (target)
7198 return target;
7199 break;
7201 case BUILT_IN_STRLEN:
7202 target = expand_builtin_strlen (exp, target, target_mode);
7203 if (target)
7204 return target;
7205 break;
7207 case BUILT_IN_STRNLEN:
7208 target = expand_builtin_strnlen (exp, target, target_mode);
7209 if (target)
7210 return target;
7211 break;
7213 case BUILT_IN_STRCAT:
7214 target = expand_builtin_strcat (exp, target);
7215 if (target)
7216 return target;
7217 break;
7219 case BUILT_IN_STRCPY:
7220 target = expand_builtin_strcpy (exp, target);
7221 if (target)
7222 return target;
7223 break;
7225 case BUILT_IN_STRNCAT:
7226 target = expand_builtin_strncat (exp, target);
7227 if (target)
7228 return target;
7229 break;
7231 case BUILT_IN_STRNCPY:
7232 target = expand_builtin_strncpy (exp, target);
7233 if (target)
7234 return target;
7235 break;
7237 case BUILT_IN_STPCPY:
7238 target = expand_builtin_stpcpy (exp, target, mode);
7239 if (target)
7240 return target;
7241 break;
7243 case BUILT_IN_STPNCPY:
7244 target = expand_builtin_stpncpy (exp, target);
7245 if (target)
7246 return target;
7247 break;
7249 case BUILT_IN_MEMCHR:
7250 target = expand_builtin_memchr (exp, target);
7251 if (target)
7252 return target;
7253 break;
7255 case BUILT_IN_MEMCPY:
7256 target = expand_builtin_memcpy (exp, target);
7257 if (target)
7258 return target;
7259 break;
7261 case BUILT_IN_MEMMOVE:
7262 target = expand_builtin_memmove (exp, target);
7263 if (target)
7264 return target;
7265 break;
7267 case BUILT_IN_MEMPCPY:
7268 target = expand_builtin_mempcpy (exp, target);
7269 if (target)
7270 return target;
7271 break;
7273 case BUILT_IN_MEMSET:
7274 target = expand_builtin_memset (exp, target, mode);
7275 if (target)
7276 return target;
7277 break;
7279 case BUILT_IN_BZERO:
7280 target = expand_builtin_bzero (exp);
7281 if (target)
7282 return target;
7283 break;
7285 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7286 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
7287 when changing it to a strcmp call. */
7288 case BUILT_IN_STRCMP_EQ:
7289 target = expand_builtin_memcmp (exp, target, true);
7290 if (target)
7291 return target;
7293 /* Change this call back to a BUILT_IN_STRCMP. */
7294 TREE_OPERAND (exp, 1)
7295 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7297 /* Delete the last parameter. */
7298 unsigned int i;
7299 vec<tree, va_gc> *arg_vec;
7300 vec_alloc (arg_vec, 2);
7301 for (i = 0; i < 2; i++)
7302 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7303 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7304 /* FALLTHROUGH */
7306 case BUILT_IN_STRCMP:
7307 target = expand_builtin_strcmp (exp, target);
7308 if (target)
7309 return target;
7310 break;
7312 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7313 back to a BUILT_IN_STRNCMP. */
7314 case BUILT_IN_STRNCMP_EQ:
7315 target = expand_builtin_memcmp (exp, target, true);
7316 if (target)
7317 return target;
7319 /* Change it back to a BUILT_IN_STRNCMP. */
7320 TREE_OPERAND (exp, 1)
7321 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7322 /* FALLTHROUGH */
7324 case BUILT_IN_STRNCMP:
7325 target = expand_builtin_strncmp (exp, target, mode);
7326 if (target)
7327 return target;
7328 break;
7330 case BUILT_IN_BCMP:
7331 case BUILT_IN_MEMCMP:
7332 case BUILT_IN_MEMCMP_EQ:
7333 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7334 if (target)
7335 return target;
7336 if (fcode == BUILT_IN_MEMCMP_EQ)
7338 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7339 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7341 break;
7343 case BUILT_IN_SETJMP:
7344 /* This should have been lowered to the builtins below. */
7345 gcc_unreachable ();
7347 case BUILT_IN_SETJMP_SETUP:
7348 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7349 and the receiver label. */
7350 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7352 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7353 VOIDmode, EXPAND_NORMAL);
7354 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7355 rtx_insn *label_r = label_rtx (label);
7357 /* This is copied from the handling of non-local gotos. */
7358 expand_builtin_setjmp_setup (buf_addr, label_r);
7359 nonlocal_goto_handler_labels
7360 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7361 nonlocal_goto_handler_labels);
7362 /* ??? Do not let expand_label treat us as such since we would
7363 not want to be both on the list of non-local labels and on
7364 the list of forced labels. */
7365 FORCED_LABEL (label) = 0;
7366 return const0_rtx;
7368 break;
7370 case BUILT_IN_SETJMP_RECEIVER:
7371 /* __builtin_setjmp_receiver is passed the receiver label. */
7372 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7374 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7375 rtx_insn *label_r = label_rtx (label);
7377 expand_builtin_setjmp_receiver (label_r);
7378 return const0_rtx;
7380 break;
7382 /* __builtin_longjmp is passed a pointer to an array of five words.
7383 It's similar to the C library longjmp function but works with
7384 __builtin_setjmp above. */
7385 case BUILT_IN_LONGJMP:
7386 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7388 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7389 VOIDmode, EXPAND_NORMAL);
7390 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7392 if (value != const1_rtx)
7394 error ("%<__builtin_longjmp%> second argument must be 1");
7395 return const0_rtx;
7398 expand_builtin_longjmp (buf_addr, value);
7399 return const0_rtx;
7401 break;
7403 case BUILT_IN_NONLOCAL_GOTO:
7404 target = expand_builtin_nonlocal_goto (exp);
7405 if (target)
7406 return target;
7407 break;
7409 /* This updates the setjmp buffer that is its argument with the value
7410 of the current stack pointer. */
7411 case BUILT_IN_UPDATE_SETJMP_BUF:
7412 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7414 rtx buf_addr
7415 = expand_normal (CALL_EXPR_ARG (exp, 0));
7417 expand_builtin_update_setjmp_buf (buf_addr);
7418 return const0_rtx;
7420 break;
7422 case BUILT_IN_TRAP:
7423 expand_builtin_trap ();
7424 return const0_rtx;
7426 case BUILT_IN_UNREACHABLE:
7427 expand_builtin_unreachable ();
7428 return const0_rtx;
7430 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7431 case BUILT_IN_SIGNBITD32:
7432 case BUILT_IN_SIGNBITD64:
7433 case BUILT_IN_SIGNBITD128:
7434 target = expand_builtin_signbit (exp, target);
7435 if (target)
7436 return target;
7437 break;
7439 /* Various hooks for the DWARF 2 __throw routine. */
7440 case BUILT_IN_UNWIND_INIT:
7441 expand_builtin_unwind_init ();
7442 return const0_rtx;
7443 case BUILT_IN_DWARF_CFA:
7444 return virtual_cfa_rtx;
7445 #ifdef DWARF2_UNWIND_INFO
7446 case BUILT_IN_DWARF_SP_COLUMN:
7447 return expand_builtin_dwarf_sp_column ();
7448 case BUILT_IN_INIT_DWARF_REG_SIZES:
7449 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7450 return const0_rtx;
7451 #endif
7452 case BUILT_IN_FROB_RETURN_ADDR:
7453 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7454 case BUILT_IN_EXTRACT_RETURN_ADDR:
7455 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7456 case BUILT_IN_EH_RETURN:
7457 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7458 CALL_EXPR_ARG (exp, 1));
7459 return const0_rtx;
7460 case BUILT_IN_EH_RETURN_DATA_REGNO:
7461 return expand_builtin_eh_return_data_regno (exp);
7462 case BUILT_IN_EXTEND_POINTER:
7463 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7464 case BUILT_IN_EH_POINTER:
7465 return expand_builtin_eh_pointer (exp);
7466 case BUILT_IN_EH_FILTER:
7467 return expand_builtin_eh_filter (exp);
7468 case BUILT_IN_EH_COPY_VALUES:
7469 return expand_builtin_eh_copy_values (exp);
7471 case BUILT_IN_VA_START:
7472 return expand_builtin_va_start (exp);
7473 case BUILT_IN_VA_END:
7474 return expand_builtin_va_end (exp);
7475 case BUILT_IN_VA_COPY:
7476 return expand_builtin_va_copy (exp);
7477 case BUILT_IN_EXPECT:
7478 return expand_builtin_expect (exp, target);
7479 case BUILT_IN_ASSUME_ALIGNED:
7480 return expand_builtin_assume_aligned (exp, target);
7481 case BUILT_IN_PREFETCH:
7482 expand_builtin_prefetch (exp);
7483 return const0_rtx;
7485 case BUILT_IN_INIT_TRAMPOLINE:
7486 return expand_builtin_init_trampoline (exp, true);
7487 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7488 return expand_builtin_init_trampoline (exp, false);
7489 case BUILT_IN_ADJUST_TRAMPOLINE:
7490 return expand_builtin_adjust_trampoline (exp);
7492 case BUILT_IN_INIT_DESCRIPTOR:
7493 return expand_builtin_init_descriptor (exp);
7494 case BUILT_IN_ADJUST_DESCRIPTOR:
7495 return expand_builtin_adjust_descriptor (exp);
7497 case BUILT_IN_FORK:
7498 case BUILT_IN_EXECL:
7499 case BUILT_IN_EXECV:
7500 case BUILT_IN_EXECLP:
7501 case BUILT_IN_EXECLE:
7502 case BUILT_IN_EXECVP:
7503 case BUILT_IN_EXECVE:
7504 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7505 if (target)
7506 return target;
7507 break;
7509 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7510 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7511 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7512 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7513 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7514 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7515 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7516 if (target)
7517 return target;
7518 break;
7520 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7521 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7522 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7523 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7524 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7525 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7526 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7527 if (target)
7528 return target;
7529 break;
7531 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7532 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7533 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7534 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7535 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7536 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7537 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7538 if (target)
7539 return target;
7540 break;
7542 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7543 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7544 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7545 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7546 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7547 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7548 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7549 if (target)
7550 return target;
7551 break;
7553 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7554 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7555 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7556 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7557 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7558 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7559 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7560 if (target)
7561 return target;
7562 break;
7564 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7565 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7566 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7567 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7568 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7569 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7570 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7571 if (target)
7572 return target;
7573 break;
7575 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7576 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7577 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7578 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7579 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7580 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7581 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7582 if (target)
7583 return target;
7584 break;
7586 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7587 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7588 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7589 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7590 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7591 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7592 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7593 if (target)
7594 return target;
7595 break;
7597 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7598 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7599 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7600 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7601 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7602 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7603 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7604 if (target)
7605 return target;
7606 break;
7608 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7609 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7610 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7611 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7612 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7613 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7614 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7615 if (target)
7616 return target;
7617 break;
7619 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7620 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7621 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7622 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7623 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7624 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7625 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7626 if (target)
7627 return target;
7628 break;
7630 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7631 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7632 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7633 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7634 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7635 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7636 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7637 if (target)
7638 return target;
7639 break;
7641 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7642 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7643 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7644 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7645 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7646 if (mode == VOIDmode)
7647 mode = TYPE_MODE (boolean_type_node);
7648 if (!target || !register_operand (target, mode))
7649 target = gen_reg_rtx (mode);
7651 mode = get_builtin_sync_mode
7652 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7653 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7654 if (target)
7655 return target;
7656 break;
7658 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7659 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7660 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7661 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7662 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7663 mode = get_builtin_sync_mode
7664 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7665 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7666 if (target)
7667 return target;
7668 break;
7670 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7671 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7672 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7673 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7674 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7675 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7676 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7677 if (target)
7678 return target;
7679 break;
7681 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7682 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7683 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7684 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7685 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7686 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7687 expand_builtin_sync_lock_release (mode, exp);
7688 return const0_rtx;
7690 case BUILT_IN_SYNC_SYNCHRONIZE:
7691 expand_builtin_sync_synchronize ();
7692 return const0_rtx;
7694 case BUILT_IN_ATOMIC_EXCHANGE_1:
7695 case BUILT_IN_ATOMIC_EXCHANGE_2:
7696 case BUILT_IN_ATOMIC_EXCHANGE_4:
7697 case BUILT_IN_ATOMIC_EXCHANGE_8:
7698 case BUILT_IN_ATOMIC_EXCHANGE_16:
7699 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7700 target = expand_builtin_atomic_exchange (mode, exp, target);
7701 if (target)
7702 return target;
7703 break;
7705 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7706 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7707 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7708 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7709 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7711 unsigned int nargs, z;
7712 vec<tree, va_gc> *vec;
7714 mode =
7715 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7716 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7717 if (target)
7718 return target;
7720 /* If this is turned into an external library call, the weak parameter
7721 must be dropped to match the expected parameter list. */
7722 nargs = call_expr_nargs (exp);
7723 vec_alloc (vec, nargs - 1);
7724 for (z = 0; z < 3; z++)
7725 vec->quick_push (CALL_EXPR_ARG (exp, z));
7726 /* Skip the boolean weak parameter. */
7727 for (z = 4; z < 6; z++)
7728 vec->quick_push (CALL_EXPR_ARG (exp, z));
7729 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7730 break;
7733 case BUILT_IN_ATOMIC_LOAD_1:
7734 case BUILT_IN_ATOMIC_LOAD_2:
7735 case BUILT_IN_ATOMIC_LOAD_4:
7736 case BUILT_IN_ATOMIC_LOAD_8:
7737 case BUILT_IN_ATOMIC_LOAD_16:
7738 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7739 target = expand_builtin_atomic_load (mode, exp, target);
7740 if (target)
7741 return target;
7742 break;
7744 case BUILT_IN_ATOMIC_STORE_1:
7745 case BUILT_IN_ATOMIC_STORE_2:
7746 case BUILT_IN_ATOMIC_STORE_4:
7747 case BUILT_IN_ATOMIC_STORE_8:
7748 case BUILT_IN_ATOMIC_STORE_16:
7749 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7750 target = expand_builtin_atomic_store (mode, exp);
7751 if (target)
7752 return const0_rtx;
7753 break;
7755 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7756 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7757 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7758 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7759 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7761 enum built_in_function lib;
7762 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7763 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7764 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7765 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7766 ignore, lib);
7767 if (target)
7768 return target;
7769 break;
7771 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7772 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7773 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7774 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7775 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7777 enum built_in_function lib;
7778 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7779 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7780 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7781 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7782 ignore, lib);
7783 if (target)
7784 return target;
7785 break;
7787 case BUILT_IN_ATOMIC_AND_FETCH_1:
7788 case BUILT_IN_ATOMIC_AND_FETCH_2:
7789 case BUILT_IN_ATOMIC_AND_FETCH_4:
7790 case BUILT_IN_ATOMIC_AND_FETCH_8:
7791 case BUILT_IN_ATOMIC_AND_FETCH_16:
7793 enum built_in_function lib;
7794 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7795 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7796 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7797 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7798 ignore, lib);
7799 if (target)
7800 return target;
7801 break;
7803 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7804 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7805 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7806 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7807 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7809 enum built_in_function lib;
7810 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7811 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7812 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7813 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7814 ignore, lib);
7815 if (target)
7816 return target;
7817 break;
7819 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7820 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7821 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7822 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7823 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7825 enum built_in_function lib;
7826 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7827 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7828 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7829 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7830 ignore, lib);
7831 if (target)
7832 return target;
7833 break;
7835 case BUILT_IN_ATOMIC_OR_FETCH_1:
7836 case BUILT_IN_ATOMIC_OR_FETCH_2:
7837 case BUILT_IN_ATOMIC_OR_FETCH_4:
7838 case BUILT_IN_ATOMIC_OR_FETCH_8:
7839 case BUILT_IN_ATOMIC_OR_FETCH_16:
7841 enum built_in_function lib;
7842 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7843 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7844 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7845 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7846 ignore, lib);
7847 if (target)
7848 return target;
7849 break;
7851 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7852 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7853 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7854 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7855 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7856 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7857 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7858 ignore, BUILT_IN_NONE);
7859 if (target)
7860 return target;
7861 break;
7863 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7864 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7865 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7866 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7867 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7868 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7869 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7870 ignore, BUILT_IN_NONE);
7871 if (target)
7872 return target;
7873 break;
7875 case BUILT_IN_ATOMIC_FETCH_AND_1:
7876 case BUILT_IN_ATOMIC_FETCH_AND_2:
7877 case BUILT_IN_ATOMIC_FETCH_AND_4:
7878 case BUILT_IN_ATOMIC_FETCH_AND_8:
7879 case BUILT_IN_ATOMIC_FETCH_AND_16:
7880 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7881 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7882 ignore, BUILT_IN_NONE);
7883 if (target)
7884 return target;
7885 break;
7887 case BUILT_IN_ATOMIC_FETCH_NAND_1:
7888 case BUILT_IN_ATOMIC_FETCH_NAND_2:
7889 case BUILT_IN_ATOMIC_FETCH_NAND_4:
7890 case BUILT_IN_ATOMIC_FETCH_NAND_8:
7891 case BUILT_IN_ATOMIC_FETCH_NAND_16:
7892 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
7893 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
7894 ignore, BUILT_IN_NONE);
7895 if (target)
7896 return target;
7897 break;
7899 case BUILT_IN_ATOMIC_FETCH_XOR_1:
7900 case BUILT_IN_ATOMIC_FETCH_XOR_2:
7901 case BUILT_IN_ATOMIC_FETCH_XOR_4:
7902 case BUILT_IN_ATOMIC_FETCH_XOR_8:
7903 case BUILT_IN_ATOMIC_FETCH_XOR_16:
7904 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
7905 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
7906 ignore, BUILT_IN_NONE);
7907 if (target)
7908 return target;
7909 break;
7911 case BUILT_IN_ATOMIC_FETCH_OR_1:
7912 case BUILT_IN_ATOMIC_FETCH_OR_2:
7913 case BUILT_IN_ATOMIC_FETCH_OR_4:
7914 case BUILT_IN_ATOMIC_FETCH_OR_8:
7915 case BUILT_IN_ATOMIC_FETCH_OR_16:
7916 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
7917 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
7918 ignore, BUILT_IN_NONE);
7919 if (target)
7920 return target;
7921 break;
7923 case BUILT_IN_ATOMIC_TEST_AND_SET:
7924 return expand_builtin_atomic_test_and_set (exp, target);
7926 case BUILT_IN_ATOMIC_CLEAR:
7927 return expand_builtin_atomic_clear (exp);
7929 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
7930 return expand_builtin_atomic_always_lock_free (exp);
7932 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
7933 target = expand_builtin_atomic_is_lock_free (exp);
7934 if (target)
7935 return target;
7936 break;
7938 case BUILT_IN_ATOMIC_THREAD_FENCE:
7939 expand_builtin_atomic_thread_fence (exp);
7940 return const0_rtx;
7942 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
7943 expand_builtin_atomic_signal_fence (exp);
7944 return const0_rtx;
7946 case BUILT_IN_OBJECT_SIZE:
7947 return expand_builtin_object_size (exp);
7949 case BUILT_IN_MEMCPY_CHK:
7950 case BUILT_IN_MEMPCPY_CHK:
7951 case BUILT_IN_MEMMOVE_CHK:
7952 case BUILT_IN_MEMSET_CHK:
7953 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7954 if (target)
7955 return target;
7956 break;
7958 case BUILT_IN_STRCPY_CHK:
7959 case BUILT_IN_STPCPY_CHK:
7960 case BUILT_IN_STRNCPY_CHK:
7961 case BUILT_IN_STPNCPY_CHK:
7962 case BUILT_IN_STRCAT_CHK:
7963 case BUILT_IN_STRNCAT_CHK:
7964 case BUILT_IN_SNPRINTF_CHK:
7965 case BUILT_IN_VSNPRINTF_CHK:
7966 maybe_emit_chk_warning (exp, fcode);
7967 break;
7969 case BUILT_IN_SPRINTF_CHK:
7970 case BUILT_IN_VSPRINTF_CHK:
7971 maybe_emit_sprintf_chk_warning (exp, fcode);
7972 break;
7974 case BUILT_IN_FREE:
7975 if (warn_free_nonheap_object)
7976 maybe_emit_free_warning (exp);
7977 break;
7979 case BUILT_IN_THREAD_POINTER:
7980 return expand_builtin_thread_pointer (exp, target);
7982 case BUILT_IN_SET_THREAD_POINTER:
7983 expand_builtin_set_thread_pointer (exp);
7984 return const0_rtx;
7986 case BUILT_IN_ACC_ON_DEVICE:
7987 /* Do library call, if we failed to expand the builtin when
7988 folding. */
7989 break;
7991 case BUILT_IN_GOACC_PARLEVEL_ID:
7992 case BUILT_IN_GOACC_PARLEVEL_SIZE:
7993 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
7995 default: /* just do library call, if unknown builtin */
7996 break;
7999 /* The switch statement above can drop through to cause the function
8000 to be called normally. */
8001 return expand_call (exp, target, ignore);
8004 /* Determine whether a tree node represents a call to a built-in
8005 function. If the tree T is a call to a built-in function with
8006 the right number of arguments of the appropriate types, return
8007 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8008 Otherwise the return value is END_BUILTINS. */
8010 enum built_in_function
8011 builtin_mathfn_code (const_tree t)
8013 const_tree fndecl, arg, parmlist;
8014 const_tree argtype, parmtype;
8015 const_call_expr_arg_iterator iter;
8017 if (TREE_CODE (t) != CALL_EXPR)
8018 return END_BUILTINS;
8020 fndecl = get_callee_fndecl (t);
8021 if (fndecl == NULL_TREE
8022 || TREE_CODE (fndecl) != FUNCTION_DECL
8023 || ! DECL_BUILT_IN (fndecl)
8024 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8025 return END_BUILTINS;
8027 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8028 init_const_call_expr_arg_iterator (t, &iter);
8029 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
8031 /* If a function doesn't take a variable number of arguments,
8032 the last element in the list will have type `void'. */
8033 parmtype = TREE_VALUE (parmlist);
8034 if (VOID_TYPE_P (parmtype))
8036 if (more_const_call_expr_args_p (&iter))
8037 return END_BUILTINS;
8038 return DECL_FUNCTION_CODE (fndecl);
8041 if (! more_const_call_expr_args_p (&iter))
8042 return END_BUILTINS;
8044 arg = next_const_call_expr_arg (&iter);
8045 argtype = TREE_TYPE (arg);
8047 if (SCALAR_FLOAT_TYPE_P (parmtype))
8049 if (! SCALAR_FLOAT_TYPE_P (argtype))
8050 return END_BUILTINS;
8052 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8054 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8055 return END_BUILTINS;
8057 else if (POINTER_TYPE_P (parmtype))
8059 if (! POINTER_TYPE_P (argtype))
8060 return END_BUILTINS;
8062 else if (INTEGRAL_TYPE_P (parmtype))
8064 if (! INTEGRAL_TYPE_P (argtype))
8065 return END_BUILTINS;
8067 else
8068 return END_BUILTINS;
8071 /* Variable-length argument list. */
8072 return DECL_FUNCTION_CODE (fndecl);
8075 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8076 evaluate to a constant. */
8078 static tree
8079 fold_builtin_constant_p (tree arg)
8081 /* We return 1 for a numeric type that's known to be a constant
8082 value at compile-time or for an aggregate type that's a
8083 literal constant. */
8084 STRIP_NOPS (arg);
8086 /* If we know this is a constant, emit the constant of one. */
8087 if (CONSTANT_CLASS_P (arg)
8088 || (TREE_CODE (arg) == CONSTRUCTOR
8089 && TREE_CONSTANT (arg)))
8090 return integer_one_node;
8091 if (TREE_CODE (arg) == ADDR_EXPR)
8093 tree op = TREE_OPERAND (arg, 0);
8094 if (TREE_CODE (op) == STRING_CST
8095 || (TREE_CODE (op) == ARRAY_REF
8096 && integer_zerop (TREE_OPERAND (op, 1))
8097 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8098 return integer_one_node;
8101 /* If this expression has side effects, show we don't know it to be a
8102 constant. Likewise if it's a pointer or aggregate type since in
8103 those case we only want literals, since those are only optimized
8104 when generating RTL, not later.
8105 And finally, if we are compiling an initializer, not code, we
8106 need to return a definite result now; there's not going to be any
8107 more optimization done. */
8108 if (TREE_SIDE_EFFECTS (arg)
8109 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8110 || POINTER_TYPE_P (TREE_TYPE (arg))
8111 || cfun == 0
8112 || folding_initializer
8113 || force_folding_builtin_constant_p)
8114 return integer_zero_node;
8116 return NULL_TREE;
8119 /* Create builtin_expect with PRED and EXPECTED as its arguments and
8120 return it as a truthvalue. */
8122 static tree
8123 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
8124 tree predictor)
8126 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
8128 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
8129 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8130 ret_type = TREE_TYPE (TREE_TYPE (fn));
8131 pred_type = TREE_VALUE (arg_types);
8132 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8134 pred = fold_convert_loc (loc, pred_type, pred);
8135 expected = fold_convert_loc (loc, expected_type, expected);
8136 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8137 predictor);
8139 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8140 build_int_cst (ret_type, 0));
8143 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
8144 NULL_TREE if no simplification is possible. */
8146 tree
8147 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
8149 tree inner, fndecl, inner_arg0;
8150 enum tree_code code;
8152 /* Distribute the expected value over short-circuiting operators.
8153 See through the cast from truthvalue_type_node to long. */
8154 inner_arg0 = arg0;
8155 while (CONVERT_EXPR_P (inner_arg0)
8156 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8157 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8158 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8160 /* If this is a builtin_expect within a builtin_expect keep the
8161 inner one. See through a comparison against a constant. It
8162 might have been added to create a thruthvalue. */
8163 inner = inner_arg0;
8165 if (COMPARISON_CLASS_P (inner)
8166 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8167 inner = TREE_OPERAND (inner, 0);
8169 if (TREE_CODE (inner) == CALL_EXPR
8170 && (fndecl = get_callee_fndecl (inner))
8171 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
8172 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
8173 return arg0;
8175 inner = inner_arg0;
8176 code = TREE_CODE (inner);
8177 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8179 tree op0 = TREE_OPERAND (inner, 0);
8180 tree op1 = TREE_OPERAND (inner, 1);
8181 arg1 = save_expr (arg1);
8183 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
8184 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
8185 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8187 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
8190 /* If the argument isn't invariant then there's nothing else we can do. */
8191 if (!TREE_CONSTANT (inner_arg0))
8192 return NULL_TREE;
8194 /* If we expect that a comparison against the argument will fold to
8195 a constant return the constant. In practice, this means a true
8196 constant or the address of a non-weak symbol. */
8197 inner = inner_arg0;
8198 STRIP_NOPS (inner);
8199 if (TREE_CODE (inner) == ADDR_EXPR)
8203 inner = TREE_OPERAND (inner, 0);
8205 while (TREE_CODE (inner) == COMPONENT_REF
8206 || TREE_CODE (inner) == ARRAY_REF);
8207 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8208 return NULL_TREE;
8211 /* Otherwise, ARG0 already has the proper type for the return value. */
8212 return arg0;
8215 /* Fold a call to __builtin_classify_type with argument ARG. */
8217 static tree
8218 fold_builtin_classify_type (tree arg)
8220 if (arg == 0)
8221 return build_int_cst (integer_type_node, no_type_class);
8223 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8226 /* Fold a call to __builtin_strlen with argument ARG. */
8228 static tree
8229 fold_builtin_strlen (location_t loc, tree type, tree arg)
8231 if (!validate_arg (arg, POINTER_TYPE))
8232 return NULL_TREE;
8233 else
8235 tree len = c_strlen (arg, 0);
8237 if (len)
8238 return fold_convert_loc (loc, type, len);
8240 return NULL_TREE;
8244 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8246 static tree
8247 fold_builtin_inf (location_t loc, tree type, int warn)
8249 REAL_VALUE_TYPE real;
8251 /* __builtin_inff is intended to be usable to define INFINITY on all
8252 targets. If an infinity is not available, INFINITY expands "to a
8253 positive constant of type float that overflows at translation
8254 time", footnote "In this case, using INFINITY will violate the
8255 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8256 Thus we pedwarn to ensure this constraint violation is
8257 diagnosed. */
8258 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8259 pedwarn (loc, 0, "target format does not support infinity");
8261 real_inf (&real);
8262 return build_real (type, real);
8265 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8266 NULL_TREE if no simplification can be made. */
8268 static tree
8269 fold_builtin_sincos (location_t loc,
8270 tree arg0, tree arg1, tree arg2)
8272 tree type;
8273 tree fndecl, call = NULL_TREE;
8275 if (!validate_arg (arg0, REAL_TYPE)
8276 || !validate_arg (arg1, POINTER_TYPE)
8277 || !validate_arg (arg2, POINTER_TYPE))
8278 return NULL_TREE;
8280 type = TREE_TYPE (arg0);
8282 /* Calculate the result when the argument is a constant. */
8283 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8284 if (fn == END_BUILTINS)
8285 return NULL_TREE;
8287 /* Canonicalize sincos to cexpi. */
8288 if (TREE_CODE (arg0) == REAL_CST)
8290 tree complex_type = build_complex_type (type);
8291 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8293 if (!call)
8295 if (!targetm.libc_has_function (function_c99_math_complex)
8296 || !builtin_decl_implicit_p (fn))
8297 return NULL_TREE;
8298 fndecl = builtin_decl_explicit (fn);
8299 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8300 call = builtin_save_expr (call);
8303 tree ptype = build_pointer_type (type);
8304 arg1 = fold_convert (ptype, arg1);
8305 arg2 = fold_convert (ptype, arg2);
8306 return build2 (COMPOUND_EXPR, void_type_node,
8307 build2 (MODIFY_EXPR, void_type_node,
8308 build_fold_indirect_ref_loc (loc, arg1),
8309 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8310 build2 (MODIFY_EXPR, void_type_node,
8311 build_fold_indirect_ref_loc (loc, arg2),
8312 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8315 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8316 Return NULL_TREE if no simplification can be made. */
8318 static tree
8319 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8321 if (!validate_arg (arg1, POINTER_TYPE)
8322 || !validate_arg (arg2, POINTER_TYPE)
8323 || !validate_arg (len, INTEGER_TYPE))
8324 return NULL_TREE;
8326 /* If the LEN parameter is zero, return zero. */
8327 if (integer_zerop (len))
8328 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8329 arg1, arg2);
8331 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8332 if (operand_equal_p (arg1, arg2, 0))
8333 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8335 /* If len parameter is one, return an expression corresponding to
8336 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8337 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8339 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8340 tree cst_uchar_ptr_node
8341 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8343 tree ind1
8344 = fold_convert_loc (loc, integer_type_node,
8345 build1 (INDIRECT_REF, cst_uchar_node,
8346 fold_convert_loc (loc,
8347 cst_uchar_ptr_node,
8348 arg1)));
8349 tree ind2
8350 = fold_convert_loc (loc, integer_type_node,
8351 build1 (INDIRECT_REF, cst_uchar_node,
8352 fold_convert_loc (loc,
8353 cst_uchar_ptr_node,
8354 arg2)));
8355 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8358 return NULL_TREE;
8361 /* Fold a call to builtin isascii with argument ARG. */
8363 static tree
8364 fold_builtin_isascii (location_t loc, tree arg)
8366 if (!validate_arg (arg, INTEGER_TYPE))
8367 return NULL_TREE;
8368 else
8370 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8371 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8372 build_int_cst (integer_type_node,
8373 ~ (unsigned HOST_WIDE_INT) 0x7f));
8374 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8375 arg, integer_zero_node);
8379 /* Fold a call to builtin toascii with argument ARG. */
8381 static tree
8382 fold_builtin_toascii (location_t loc, tree arg)
8384 if (!validate_arg (arg, INTEGER_TYPE))
8385 return NULL_TREE;
8387 /* Transform toascii(c) -> (c & 0x7f). */
8388 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8389 build_int_cst (integer_type_node, 0x7f));
8392 /* Fold a call to builtin isdigit with argument ARG. */
8394 static tree
8395 fold_builtin_isdigit (location_t loc, tree arg)
8397 if (!validate_arg (arg, INTEGER_TYPE))
8398 return NULL_TREE;
8399 else
8401 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8402 /* According to the C standard, isdigit is unaffected by locale.
8403 However, it definitely is affected by the target character set. */
8404 unsigned HOST_WIDE_INT target_digit0
8405 = lang_hooks.to_target_charset ('0');
8407 if (target_digit0 == 0)
8408 return NULL_TREE;
8410 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8411 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8412 build_int_cst (unsigned_type_node, target_digit0));
8413 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8414 build_int_cst (unsigned_type_node, 9));
8418 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8420 static tree
8421 fold_builtin_fabs (location_t loc, tree arg, tree type)
8423 if (!validate_arg (arg, REAL_TYPE))
8424 return NULL_TREE;
8426 arg = fold_convert_loc (loc, type, arg);
8427 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8430 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8432 static tree
8433 fold_builtin_abs (location_t loc, tree arg, tree type)
8435 if (!validate_arg (arg, INTEGER_TYPE))
8436 return NULL_TREE;
8438 arg = fold_convert_loc (loc, type, arg);
8439 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8442 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8444 static tree
8445 fold_builtin_carg (location_t loc, tree arg, tree type)
8447 if (validate_arg (arg, COMPLEX_TYPE)
8448 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8450 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8452 if (atan2_fn)
8454 tree new_arg = builtin_save_expr (arg);
8455 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8456 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8457 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8461 return NULL_TREE;
8464 /* Fold a call to builtin frexp, we can assume the base is 2. */
8466 static tree
8467 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8469 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8470 return NULL_TREE;
8472 STRIP_NOPS (arg0);
8474 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8475 return NULL_TREE;
8477 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8479 /* Proceed if a valid pointer type was passed in. */
8480 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8482 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8483 tree frac, exp;
8485 switch (value->cl)
8487 case rvc_zero:
8488 /* For +-0, return (*exp = 0, +-0). */
8489 exp = integer_zero_node;
8490 frac = arg0;
8491 break;
8492 case rvc_nan:
8493 case rvc_inf:
8494 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8495 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8496 case rvc_normal:
8498 /* Since the frexp function always expects base 2, and in
8499 GCC normalized significands are already in the range
8500 [0.5, 1.0), we have exactly what frexp wants. */
8501 REAL_VALUE_TYPE frac_rvt = *value;
8502 SET_REAL_EXP (&frac_rvt, 0);
8503 frac = build_real (rettype, frac_rvt);
8504 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8506 break;
8507 default:
8508 gcc_unreachable ();
8511 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8512 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8513 TREE_SIDE_EFFECTS (arg1) = 1;
8514 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8517 return NULL_TREE;
8520 /* Fold a call to builtin modf. */
8522 static tree
8523 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8525 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8526 return NULL_TREE;
8528 STRIP_NOPS (arg0);
8530 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8531 return NULL_TREE;
8533 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8535 /* Proceed if a valid pointer type was passed in. */
8536 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8538 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8539 REAL_VALUE_TYPE trunc, frac;
8541 switch (value->cl)
8543 case rvc_nan:
8544 case rvc_zero:
8545 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8546 trunc = frac = *value;
8547 break;
8548 case rvc_inf:
8549 /* For +-Inf, return (*arg1 = arg0, +-0). */
8550 frac = dconst0;
8551 frac.sign = value->sign;
8552 trunc = *value;
8553 break;
8554 case rvc_normal:
8555 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8556 real_trunc (&trunc, VOIDmode, value);
8557 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8558 /* If the original number was negative and already
8559 integral, then the fractional part is -0.0. */
8560 if (value->sign && frac.cl == rvc_zero)
8561 frac.sign = value->sign;
8562 break;
8565 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8566 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8567 build_real (rettype, trunc));
8568 TREE_SIDE_EFFECTS (arg1) = 1;
8569 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8570 build_real (rettype, frac));
8573 return NULL_TREE;
8576 /* Given a location LOC, an interclass builtin function decl FNDECL
8577 and its single argument ARG, return an folded expression computing
8578 the same, or NULL_TREE if we either couldn't or didn't want to fold
8579 (the latter happen if there's an RTL instruction available). */
8581 static tree
8582 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8584 machine_mode mode;
8586 if (!validate_arg (arg, REAL_TYPE))
8587 return NULL_TREE;
8589 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8590 return NULL_TREE;
8592 mode = TYPE_MODE (TREE_TYPE (arg));
8594 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8596 /* If there is no optab, try generic code. */
8597 switch (DECL_FUNCTION_CODE (fndecl))
8599 tree result;
8601 CASE_FLT_FN (BUILT_IN_ISINF):
8603 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8604 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8605 tree type = TREE_TYPE (arg);
8606 REAL_VALUE_TYPE r;
8607 char buf[128];
8609 if (is_ibm_extended)
8611 /* NaN and Inf are encoded in the high-order double value
8612 only. The low-order value is not significant. */
8613 type = double_type_node;
8614 mode = DFmode;
8615 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8617 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8618 real_from_string (&r, buf);
8619 result = build_call_expr (isgr_fn, 2,
8620 fold_build1_loc (loc, ABS_EXPR, type, arg),
8621 build_real (type, r));
8622 return result;
8624 CASE_FLT_FN (BUILT_IN_FINITE):
8625 case BUILT_IN_ISFINITE:
8627 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8628 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8629 tree type = TREE_TYPE (arg);
8630 REAL_VALUE_TYPE r;
8631 char buf[128];
8633 if (is_ibm_extended)
8635 /* NaN and Inf are encoded in the high-order double value
8636 only. The low-order value is not significant. */
8637 type = double_type_node;
8638 mode = DFmode;
8639 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8641 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8642 real_from_string (&r, buf);
8643 result = build_call_expr (isle_fn, 2,
8644 fold_build1_loc (loc, ABS_EXPR, type, arg),
8645 build_real (type, r));
8646 /*result = fold_build2_loc (loc, UNGT_EXPR,
8647 TREE_TYPE (TREE_TYPE (fndecl)),
8648 fold_build1_loc (loc, ABS_EXPR, type, arg),
8649 build_real (type, r));
8650 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8651 TREE_TYPE (TREE_TYPE (fndecl)),
8652 result);*/
8653 return result;
8655 case BUILT_IN_ISNORMAL:
8657 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8658 islessequal(fabs(x),DBL_MAX). */
8659 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8660 tree type = TREE_TYPE (arg);
8661 tree orig_arg, max_exp, min_exp;
8662 machine_mode orig_mode = mode;
8663 REAL_VALUE_TYPE rmax, rmin;
8664 char buf[128];
8666 orig_arg = arg = builtin_save_expr (arg);
8667 if (is_ibm_extended)
8669 /* Use double to test the normal range of IBM extended
8670 precision. Emin for IBM extended precision is
8671 different to emin for IEEE double, being 53 higher
8672 since the low double exponent is at least 53 lower
8673 than the high double exponent. */
8674 type = double_type_node;
8675 mode = DFmode;
8676 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8678 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8680 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8681 real_from_string (&rmax, buf);
8682 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8683 real_from_string (&rmin, buf);
8684 max_exp = build_real (type, rmax);
8685 min_exp = build_real (type, rmin);
8687 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8688 if (is_ibm_extended)
8690 /* Testing the high end of the range is done just using
8691 the high double, using the same test as isfinite().
8692 For the subnormal end of the range we first test the
8693 high double, then if its magnitude is equal to the
8694 limit of 0x1p-969, we test whether the low double is
8695 non-zero and opposite sign to the high double. */
8696 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8697 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8698 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8699 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8700 arg, min_exp);
8701 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8702 complex_double_type_node, orig_arg);
8703 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8704 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8705 tree zero = build_real (type, dconst0);
8706 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8707 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8708 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8709 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8710 fold_build3 (COND_EXPR,
8711 integer_type_node,
8712 hilt, logt, lolt));
8713 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8714 eq_min, ok_lo);
8715 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8716 gt_min, eq_min);
8718 else
8720 tree const isge_fn
8721 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8722 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8724 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8725 max_exp, min_exp);
8726 return result;
8728 default:
8729 break;
8732 return NULL_TREE;
8735 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8736 ARG is the argument for the call. */
8738 static tree
8739 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8741 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8743 if (!validate_arg (arg, REAL_TYPE))
8744 return NULL_TREE;
8746 switch (builtin_index)
8748 case BUILT_IN_ISINF:
8749 if (!HONOR_INFINITIES (arg))
8750 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8752 return NULL_TREE;
8754 case BUILT_IN_ISINF_SIGN:
8756 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8757 /* In a boolean context, GCC will fold the inner COND_EXPR to
8758 1. So e.g. "if (isinf_sign(x))" would be folded to just
8759 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8760 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8761 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8762 tree tmp = NULL_TREE;
8764 arg = builtin_save_expr (arg);
8766 if (signbit_fn && isinf_fn)
8768 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8769 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8771 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8772 signbit_call, integer_zero_node);
8773 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8774 isinf_call, integer_zero_node);
8776 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8777 integer_minus_one_node, integer_one_node);
8778 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8779 isinf_call, tmp,
8780 integer_zero_node);
8783 return tmp;
8786 case BUILT_IN_ISFINITE:
8787 if (!HONOR_NANS (arg)
8788 && !HONOR_INFINITIES (arg))
8789 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8791 return NULL_TREE;
8793 case BUILT_IN_ISNAN:
8794 if (!HONOR_NANS (arg))
8795 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8798 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8799 if (is_ibm_extended)
8801 /* NaN and Inf are encoded in the high-order double value
8802 only. The low-order value is not significant. */
8803 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8806 arg = builtin_save_expr (arg);
8807 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8809 default:
8810 gcc_unreachable ();
8814 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8815 This builtin will generate code to return the appropriate floating
8816 point classification depending on the value of the floating point
8817 number passed in. The possible return values must be supplied as
8818 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8819 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8820 one floating point argument which is "type generic". */
8822 static tree
8823 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8825 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8826 arg, type, res, tmp;
8827 machine_mode mode;
8828 REAL_VALUE_TYPE r;
8829 char buf[128];
8831 /* Verify the required arguments in the original call. */
8832 if (nargs != 6
8833 || !validate_arg (args[0], INTEGER_TYPE)
8834 || !validate_arg (args[1], INTEGER_TYPE)
8835 || !validate_arg (args[2], INTEGER_TYPE)
8836 || !validate_arg (args[3], INTEGER_TYPE)
8837 || !validate_arg (args[4], INTEGER_TYPE)
8838 || !validate_arg (args[5], REAL_TYPE))
8839 return NULL_TREE;
8841 fp_nan = args[0];
8842 fp_infinite = args[1];
8843 fp_normal = args[2];
8844 fp_subnormal = args[3];
8845 fp_zero = args[4];
8846 arg = args[5];
8847 type = TREE_TYPE (arg);
8848 mode = TYPE_MODE (type);
8849 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8851 /* fpclassify(x) ->
8852 isnan(x) ? FP_NAN :
8853 (fabs(x) == Inf ? FP_INFINITE :
8854 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8855 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8857 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8858 build_real (type, dconst0));
8859 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8860 tmp, fp_zero, fp_subnormal);
8862 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8863 real_from_string (&r, buf);
8864 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8865 arg, build_real (type, r));
8866 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8868 if (HONOR_INFINITIES (mode))
8870 real_inf (&r);
8871 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8872 build_real (type, r));
8873 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8874 fp_infinite, res);
8877 if (HONOR_NANS (mode))
8879 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8880 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8883 return res;
8886 /* Fold a call to an unordered comparison function such as
8887 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8888 being called and ARG0 and ARG1 are the arguments for the call.
8889 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8890 the opposite of the desired result. UNORDERED_CODE is used
8891 for modes that can hold NaNs and ORDERED_CODE is used for
8892 the rest. */
8894 static tree
8895 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8896 enum tree_code unordered_code,
8897 enum tree_code ordered_code)
8899 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8900 enum tree_code code;
8901 tree type0, type1;
8902 enum tree_code code0, code1;
8903 tree cmp_type = NULL_TREE;
8905 type0 = TREE_TYPE (arg0);
8906 type1 = TREE_TYPE (arg1);
8908 code0 = TREE_CODE (type0);
8909 code1 = TREE_CODE (type1);
8911 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8912 /* Choose the wider of two real types. */
8913 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8914 ? type0 : type1;
8915 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8916 cmp_type = type0;
8917 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8918 cmp_type = type1;
8920 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8921 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8923 if (unordered_code == UNORDERED_EXPR)
8925 if (!HONOR_NANS (arg0))
8926 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8927 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8930 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8931 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8932 fold_build2_loc (loc, code, type, arg0, arg1));
8935 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8936 arithmetics if it can never overflow, or into internal functions that
8937 return both result of arithmetics and overflowed boolean flag in
8938 a complex integer result, or some other check for overflow.
8939 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8940 checking part of that. */
8942 static tree
8943 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8944 tree arg0, tree arg1, tree arg2)
8946 enum internal_fn ifn = IFN_LAST;
8947 /* The code of the expression corresponding to the type-generic
8948 built-in, or ERROR_MARK for the type-specific ones. */
8949 enum tree_code opcode = ERROR_MARK;
8950 bool ovf_only = false;
8952 switch (fcode)
8954 case BUILT_IN_ADD_OVERFLOW_P:
8955 ovf_only = true;
8956 /* FALLTHRU */
8957 case BUILT_IN_ADD_OVERFLOW:
8958 opcode = PLUS_EXPR;
8959 /* FALLTHRU */
8960 case BUILT_IN_SADD_OVERFLOW:
8961 case BUILT_IN_SADDL_OVERFLOW:
8962 case BUILT_IN_SADDLL_OVERFLOW:
8963 case BUILT_IN_UADD_OVERFLOW:
8964 case BUILT_IN_UADDL_OVERFLOW:
8965 case BUILT_IN_UADDLL_OVERFLOW:
8966 ifn = IFN_ADD_OVERFLOW;
8967 break;
8968 case BUILT_IN_SUB_OVERFLOW_P:
8969 ovf_only = true;
8970 /* FALLTHRU */
8971 case BUILT_IN_SUB_OVERFLOW:
8972 opcode = MINUS_EXPR;
8973 /* FALLTHRU */
8974 case BUILT_IN_SSUB_OVERFLOW:
8975 case BUILT_IN_SSUBL_OVERFLOW:
8976 case BUILT_IN_SSUBLL_OVERFLOW:
8977 case BUILT_IN_USUB_OVERFLOW:
8978 case BUILT_IN_USUBL_OVERFLOW:
8979 case BUILT_IN_USUBLL_OVERFLOW:
8980 ifn = IFN_SUB_OVERFLOW;
8981 break;
8982 case BUILT_IN_MUL_OVERFLOW_P:
8983 ovf_only = true;
8984 /* FALLTHRU */
8985 case BUILT_IN_MUL_OVERFLOW:
8986 opcode = MULT_EXPR;
8987 /* FALLTHRU */
8988 case BUILT_IN_SMUL_OVERFLOW:
8989 case BUILT_IN_SMULL_OVERFLOW:
8990 case BUILT_IN_SMULLL_OVERFLOW:
8991 case BUILT_IN_UMUL_OVERFLOW:
8992 case BUILT_IN_UMULL_OVERFLOW:
8993 case BUILT_IN_UMULLL_OVERFLOW:
8994 ifn = IFN_MUL_OVERFLOW;
8995 break;
8996 default:
8997 gcc_unreachable ();
9000 /* For the "generic" overloads, the first two arguments can have different
9001 types and the last argument determines the target type to use to check
9002 for overflow. The arguments of the other overloads all have the same
9003 type. */
9004 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9006 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9007 arguments are constant, attempt to fold the built-in call into a constant
9008 expression indicating whether or not it detected an overflow. */
9009 if (ovf_only
9010 && TREE_CODE (arg0) == INTEGER_CST
9011 && TREE_CODE (arg1) == INTEGER_CST)
9012 /* Perform the computation in the target type and check for overflow. */
9013 return omit_one_operand_loc (loc, boolean_type_node,
9014 arith_overflowed_p (opcode, type, arg0, arg1)
9015 ? boolean_true_node : boolean_false_node,
9016 arg2);
9018 tree ctype = build_complex_type (type);
9019 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9020 2, arg0, arg1);
9021 tree tgt = save_expr (call);
9022 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9023 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9024 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9026 if (ovf_only)
9027 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9029 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9030 tree store
9031 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9032 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9035 /* Fold a call to __builtin_FILE to a constant string. */
9037 static inline tree
9038 fold_builtin_FILE (location_t loc)
9040 if (const char *fname = LOCATION_FILE (loc))
9042 /* The documentation says this builtin is equivalent to the preprocessor
9043 __FILE__ macro so it appears appropriate to use the same file prefix
9044 mappings. */
9045 fname = remap_macro_filename (fname);
9046 return build_string_literal (strlen (fname) + 1, fname);
9049 return build_string_literal (1, "");
9052 /* Fold a call to __builtin_FUNCTION to a constant string. */
9054 static inline tree
9055 fold_builtin_FUNCTION ()
9057 const char *name = "";
9059 if (current_function_decl)
9060 name = lang_hooks.decl_printable_name (current_function_decl, 0);
9062 return build_string_literal (strlen (name) + 1, name);
9065 /* Fold a call to __builtin_LINE to an integer constant. */
9067 static inline tree
9068 fold_builtin_LINE (location_t loc, tree type)
9070 return build_int_cst (type, LOCATION_LINE (loc));
9073 /* Fold a call to built-in function FNDECL with 0 arguments.
9074 This function returns NULL_TREE if no simplification was possible. */
9076 static tree
9077 fold_builtin_0 (location_t loc, tree fndecl)
9079 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9080 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9081 switch (fcode)
9083 case BUILT_IN_FILE:
9084 return fold_builtin_FILE (loc);
9086 case BUILT_IN_FUNCTION:
9087 return fold_builtin_FUNCTION ();
9089 case BUILT_IN_LINE:
9090 return fold_builtin_LINE (loc, type);
9092 CASE_FLT_FN (BUILT_IN_INF):
9093 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
9094 case BUILT_IN_INFD32:
9095 case BUILT_IN_INFD64:
9096 case BUILT_IN_INFD128:
9097 return fold_builtin_inf (loc, type, true);
9099 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9100 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
9101 return fold_builtin_inf (loc, type, false);
9103 case BUILT_IN_CLASSIFY_TYPE:
9104 return fold_builtin_classify_type (NULL_TREE);
9106 default:
9107 break;
9109 return NULL_TREE;
9112 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9113 This function returns NULL_TREE if no simplification was possible. */
9115 static tree
9116 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9118 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9119 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9121 if (TREE_CODE (arg0) == ERROR_MARK)
9122 return NULL_TREE;
9124 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
9125 return ret;
9127 switch (fcode)
9129 case BUILT_IN_CONSTANT_P:
9131 tree val = fold_builtin_constant_p (arg0);
9133 /* Gimplification will pull the CALL_EXPR for the builtin out of
9134 an if condition. When not optimizing, we'll not CSE it back.
9135 To avoid link error types of regressions, return false now. */
9136 if (!val && !optimize)
9137 val = integer_zero_node;
9139 return val;
9142 case BUILT_IN_CLASSIFY_TYPE:
9143 return fold_builtin_classify_type (arg0);
9145 case BUILT_IN_STRLEN:
9146 return fold_builtin_strlen (loc, type, arg0);
9148 CASE_FLT_FN (BUILT_IN_FABS):
9149 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9150 case BUILT_IN_FABSD32:
9151 case BUILT_IN_FABSD64:
9152 case BUILT_IN_FABSD128:
9153 return fold_builtin_fabs (loc, arg0, type);
9155 case BUILT_IN_ABS:
9156 case BUILT_IN_LABS:
9157 case BUILT_IN_LLABS:
9158 case BUILT_IN_IMAXABS:
9159 return fold_builtin_abs (loc, arg0, type);
9161 CASE_FLT_FN (BUILT_IN_CONJ):
9162 if (validate_arg (arg0, COMPLEX_TYPE)
9163 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9164 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9165 break;
9167 CASE_FLT_FN (BUILT_IN_CREAL):
9168 if (validate_arg (arg0, COMPLEX_TYPE)
9169 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9170 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9171 break;
9173 CASE_FLT_FN (BUILT_IN_CIMAG):
9174 if (validate_arg (arg0, COMPLEX_TYPE)
9175 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9176 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9177 break;
9179 CASE_FLT_FN (BUILT_IN_CARG):
9180 return fold_builtin_carg (loc, arg0, type);
9182 case BUILT_IN_ISASCII:
9183 return fold_builtin_isascii (loc, arg0);
9185 case BUILT_IN_TOASCII:
9186 return fold_builtin_toascii (loc, arg0);
9188 case BUILT_IN_ISDIGIT:
9189 return fold_builtin_isdigit (loc, arg0);
9191 CASE_FLT_FN (BUILT_IN_FINITE):
9192 case BUILT_IN_FINITED32:
9193 case BUILT_IN_FINITED64:
9194 case BUILT_IN_FINITED128:
9195 case BUILT_IN_ISFINITE:
9197 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9198 if (ret)
9199 return ret;
9200 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9203 CASE_FLT_FN (BUILT_IN_ISINF):
9204 case BUILT_IN_ISINFD32:
9205 case BUILT_IN_ISINFD64:
9206 case BUILT_IN_ISINFD128:
9208 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9209 if (ret)
9210 return ret;
9211 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9214 case BUILT_IN_ISNORMAL:
9215 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9217 case BUILT_IN_ISINF_SIGN:
9218 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9220 CASE_FLT_FN (BUILT_IN_ISNAN):
9221 case BUILT_IN_ISNAND32:
9222 case BUILT_IN_ISNAND64:
9223 case BUILT_IN_ISNAND128:
9224 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9226 case BUILT_IN_FREE:
9227 if (integer_zerop (arg0))
9228 return build_empty_stmt (loc);
9229 break;
9231 default:
9232 break;
9235 return NULL_TREE;
9239 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9240 This function returns NULL_TREE if no simplification was possible. */
9242 static tree
9243 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
9245 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9246 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9248 if (TREE_CODE (arg0) == ERROR_MARK
9249 || TREE_CODE (arg1) == ERROR_MARK)
9250 return NULL_TREE;
9252 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9253 return ret;
9255 switch (fcode)
9257 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9258 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9259 if (validate_arg (arg0, REAL_TYPE)
9260 && validate_arg (arg1, POINTER_TYPE))
9261 return do_mpfr_lgamma_r (arg0, arg1, type);
9262 break;
9264 CASE_FLT_FN (BUILT_IN_FREXP):
9265 return fold_builtin_frexp (loc, arg0, arg1, type);
9267 CASE_FLT_FN (BUILT_IN_MODF):
9268 return fold_builtin_modf (loc, arg0, arg1, type);
9270 case BUILT_IN_STRSPN:
9271 return fold_builtin_strspn (loc, arg0, arg1);
9273 case BUILT_IN_STRCSPN:
9274 return fold_builtin_strcspn (loc, arg0, arg1);
9276 case BUILT_IN_STRPBRK:
9277 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9279 case BUILT_IN_EXPECT:
9280 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
9282 case BUILT_IN_ISGREATER:
9283 return fold_builtin_unordered_cmp (loc, fndecl,
9284 arg0, arg1, UNLE_EXPR, LE_EXPR);
9285 case BUILT_IN_ISGREATEREQUAL:
9286 return fold_builtin_unordered_cmp (loc, fndecl,
9287 arg0, arg1, UNLT_EXPR, LT_EXPR);
9288 case BUILT_IN_ISLESS:
9289 return fold_builtin_unordered_cmp (loc, fndecl,
9290 arg0, arg1, UNGE_EXPR, GE_EXPR);
9291 case BUILT_IN_ISLESSEQUAL:
9292 return fold_builtin_unordered_cmp (loc, fndecl,
9293 arg0, arg1, UNGT_EXPR, GT_EXPR);
9294 case BUILT_IN_ISLESSGREATER:
9295 return fold_builtin_unordered_cmp (loc, fndecl,
9296 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9297 case BUILT_IN_ISUNORDERED:
9298 return fold_builtin_unordered_cmp (loc, fndecl,
9299 arg0, arg1, UNORDERED_EXPR,
9300 NOP_EXPR);
9302 /* We do the folding for va_start in the expander. */
9303 case BUILT_IN_VA_START:
9304 break;
9306 case BUILT_IN_OBJECT_SIZE:
9307 return fold_builtin_object_size (arg0, arg1);
9309 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9310 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9312 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9313 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9315 default:
9316 break;
9318 return NULL_TREE;
9321 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9322 and ARG2.
9323 This function returns NULL_TREE if no simplification was possible. */
9325 static tree
9326 fold_builtin_3 (location_t loc, tree fndecl,
9327 tree arg0, tree arg1, tree arg2)
9329 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9330 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9332 if (TREE_CODE (arg0) == ERROR_MARK
9333 || TREE_CODE (arg1) == ERROR_MARK
9334 || TREE_CODE (arg2) == ERROR_MARK)
9335 return NULL_TREE;
9337 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9338 arg0, arg1, arg2))
9339 return ret;
9341 switch (fcode)
9344 CASE_FLT_FN (BUILT_IN_SINCOS):
9345 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9347 CASE_FLT_FN (BUILT_IN_REMQUO):
9348 if (validate_arg (arg0, REAL_TYPE)
9349 && validate_arg (arg1, REAL_TYPE)
9350 && validate_arg (arg2, POINTER_TYPE))
9351 return do_mpfr_remquo (arg0, arg1, arg2);
9352 break;
9354 case BUILT_IN_MEMCMP:
9355 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9357 case BUILT_IN_EXPECT:
9358 return fold_builtin_expect (loc, arg0, arg1, arg2);
9360 case BUILT_IN_ADD_OVERFLOW:
9361 case BUILT_IN_SUB_OVERFLOW:
9362 case BUILT_IN_MUL_OVERFLOW:
9363 case BUILT_IN_ADD_OVERFLOW_P:
9364 case BUILT_IN_SUB_OVERFLOW_P:
9365 case BUILT_IN_MUL_OVERFLOW_P:
9366 case BUILT_IN_SADD_OVERFLOW:
9367 case BUILT_IN_SADDL_OVERFLOW:
9368 case BUILT_IN_SADDLL_OVERFLOW:
9369 case BUILT_IN_SSUB_OVERFLOW:
9370 case BUILT_IN_SSUBL_OVERFLOW:
9371 case BUILT_IN_SSUBLL_OVERFLOW:
9372 case BUILT_IN_SMUL_OVERFLOW:
9373 case BUILT_IN_SMULL_OVERFLOW:
9374 case BUILT_IN_SMULLL_OVERFLOW:
9375 case BUILT_IN_UADD_OVERFLOW:
9376 case BUILT_IN_UADDL_OVERFLOW:
9377 case BUILT_IN_UADDLL_OVERFLOW:
9378 case BUILT_IN_USUB_OVERFLOW:
9379 case BUILT_IN_USUBL_OVERFLOW:
9380 case BUILT_IN_USUBLL_OVERFLOW:
9381 case BUILT_IN_UMUL_OVERFLOW:
9382 case BUILT_IN_UMULL_OVERFLOW:
9383 case BUILT_IN_UMULLL_OVERFLOW:
9384 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9386 default:
9387 break;
9389 return NULL_TREE;
9392 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9393 arguments. IGNORE is true if the result of the
9394 function call is ignored. This function returns NULL_TREE if no
9395 simplification was possible. */
9397 tree
9398 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9400 tree ret = NULL_TREE;
9402 switch (nargs)
9404 case 0:
9405 ret = fold_builtin_0 (loc, fndecl);
9406 break;
9407 case 1:
9408 ret = fold_builtin_1 (loc, fndecl, args[0]);
9409 break;
9410 case 2:
9411 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9412 break;
9413 case 3:
9414 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9415 break;
9416 default:
9417 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9418 break;
9420 if (ret)
9422 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9423 SET_EXPR_LOCATION (ret, loc);
9424 return ret;
9426 return NULL_TREE;
9429 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9430 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9431 of arguments in ARGS to be omitted. OLDNARGS is the number of
9432 elements in ARGS. */
9434 static tree
9435 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9436 int skip, tree fndecl, int n, va_list newargs)
9438 int nargs = oldnargs - skip + n;
9439 tree *buffer;
9441 if (n > 0)
9443 int i, j;
9445 buffer = XALLOCAVEC (tree, nargs);
9446 for (i = 0; i < n; i++)
9447 buffer[i] = va_arg (newargs, tree);
9448 for (j = skip; j < oldnargs; j++, i++)
9449 buffer[i] = args[j];
9451 else
9452 buffer = args + skip;
9454 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9457 /* Return true if FNDECL shouldn't be folded right now.
9458 If a built-in function has an inline attribute always_inline
9459 wrapper, defer folding it after always_inline functions have
9460 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9461 might not be performed. */
9463 bool
9464 avoid_folding_inline_builtin (tree fndecl)
9466 return (DECL_DECLARED_INLINE_P (fndecl)
9467 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9468 && cfun
9469 && !cfun->always_inline_functions_inlined
9470 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9473 /* A wrapper function for builtin folding that prevents warnings for
9474 "statement without effect" and the like, caused by removing the
9475 call node earlier than the warning is generated. */
9477 tree
9478 fold_call_expr (location_t loc, tree exp, bool ignore)
9480 tree ret = NULL_TREE;
9481 tree fndecl = get_callee_fndecl (exp);
9482 if (fndecl
9483 && TREE_CODE (fndecl) == FUNCTION_DECL
9484 && DECL_BUILT_IN (fndecl)
9485 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9486 yet. Defer folding until we see all the arguments
9487 (after inlining). */
9488 && !CALL_EXPR_VA_ARG_PACK (exp))
9490 int nargs = call_expr_nargs (exp);
9492 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9493 instead last argument is __builtin_va_arg_pack (). Defer folding
9494 even in that case, until arguments are finalized. */
9495 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9497 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9498 if (fndecl2
9499 && TREE_CODE (fndecl2) == FUNCTION_DECL
9500 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9501 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9502 return NULL_TREE;
9505 if (avoid_folding_inline_builtin (fndecl))
9506 return NULL_TREE;
9508 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9509 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9510 CALL_EXPR_ARGP (exp), ignore);
9511 else
9513 tree *args = CALL_EXPR_ARGP (exp);
9514 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9515 if (ret)
9516 return ret;
9519 return NULL_TREE;
9522 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9523 N arguments are passed in the array ARGARRAY. Return a folded
9524 expression or NULL_TREE if no simplification was possible. */
9526 tree
9527 fold_builtin_call_array (location_t loc, tree,
9528 tree fn,
9529 int n,
9530 tree *argarray)
9532 if (TREE_CODE (fn) != ADDR_EXPR)
9533 return NULL_TREE;
9535 tree fndecl = TREE_OPERAND (fn, 0);
9536 if (TREE_CODE (fndecl) == FUNCTION_DECL
9537 && DECL_BUILT_IN (fndecl))
9539 /* If last argument is __builtin_va_arg_pack (), arguments to this
9540 function are not finalized yet. Defer folding until they are. */
9541 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9543 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9544 if (fndecl2
9545 && TREE_CODE (fndecl2) == FUNCTION_DECL
9546 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9547 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9548 return NULL_TREE;
9550 if (avoid_folding_inline_builtin (fndecl))
9551 return NULL_TREE;
9552 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9553 return targetm.fold_builtin (fndecl, n, argarray, false);
9554 else
9555 return fold_builtin_n (loc, fndecl, argarray, n, false);
9558 return NULL_TREE;
9561 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9562 along with N new arguments specified as the "..." parameters. SKIP
9563 is the number of arguments in EXP to be omitted. This function is used
9564 to do varargs-to-varargs transformations. */
9566 static tree
9567 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9569 va_list ap;
9570 tree t;
9572 va_start (ap, n);
9573 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9574 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9575 va_end (ap);
9577 return t;
9580 /* Validate a single argument ARG against a tree code CODE representing
9581 a type. Return true when argument is valid. */
9583 static bool
9584 validate_arg (const_tree arg, enum tree_code code)
9586 if (!arg)
9587 return false;
9588 else if (code == POINTER_TYPE)
9589 return POINTER_TYPE_P (TREE_TYPE (arg));
9590 else if (code == INTEGER_TYPE)
9591 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9592 return code == TREE_CODE (TREE_TYPE (arg));
9595 /* This function validates the types of a function call argument list
9596 against a specified list of tree_codes. If the last specifier is a 0,
9597 that represents an ellipses, otherwise the last specifier must be a
9598 VOID_TYPE.
9600 This is the GIMPLE version of validate_arglist. Eventually we want to
9601 completely convert builtins.c to work from GIMPLEs and the tree based
9602 validate_arglist will then be removed. */
9604 bool
9605 validate_gimple_arglist (const gcall *call, ...)
9607 enum tree_code code;
9608 bool res = 0;
9609 va_list ap;
9610 const_tree arg;
9611 size_t i;
9613 va_start (ap, call);
9614 i = 0;
9618 code = (enum tree_code) va_arg (ap, int);
9619 switch (code)
9621 case 0:
9622 /* This signifies an ellipses, any further arguments are all ok. */
9623 res = true;
9624 goto end;
9625 case VOID_TYPE:
9626 /* This signifies an endlink, if no arguments remain, return
9627 true, otherwise return false. */
9628 res = (i == gimple_call_num_args (call));
9629 goto end;
9630 default:
9631 /* If no parameters remain or the parameter's code does not
9632 match the specified code, return false. Otherwise continue
9633 checking any remaining arguments. */
9634 arg = gimple_call_arg (call, i++);
9635 if (!validate_arg (arg, code))
9636 goto end;
9637 break;
9640 while (1);
9642 /* We need gotos here since we can only have one VA_CLOSE in a
9643 function. */
9644 end: ;
9645 va_end (ap);
9647 return res;
9650 /* Default target-specific builtin expander that does nothing. */
9653 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9654 rtx target ATTRIBUTE_UNUSED,
9655 rtx subtarget ATTRIBUTE_UNUSED,
9656 machine_mode mode ATTRIBUTE_UNUSED,
9657 int ignore ATTRIBUTE_UNUSED)
9659 return NULL_RTX;
9662 /* Returns true is EXP represents data that would potentially reside
9663 in a readonly section. */
9665 bool
9666 readonly_data_expr (tree exp)
9668 STRIP_NOPS (exp);
9670 if (TREE_CODE (exp) != ADDR_EXPR)
9671 return false;
9673 exp = get_base_address (TREE_OPERAND (exp, 0));
9674 if (!exp)
9675 return false;
9677 /* Make sure we call decl_readonly_section only for trees it
9678 can handle (since it returns true for everything it doesn't
9679 understand). */
9680 if (TREE_CODE (exp) == STRING_CST
9681 || TREE_CODE (exp) == CONSTRUCTOR
9682 || (VAR_P (exp) && TREE_STATIC (exp)))
9683 return decl_readonly_section (exp, 0);
9684 else
9685 return false;
9688 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9689 to the call, and TYPE is its return type.
9691 Return NULL_TREE if no simplification was possible, otherwise return the
9692 simplified form of the call as a tree.
9694 The simplified form may be a constant or other expression which
9695 computes the same value, but in a more efficient manner (including
9696 calls to other builtin functions).
9698 The call may contain arguments which need to be evaluated, but
9699 which are not useful to determine the result of the call. In
9700 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9701 COMPOUND_EXPR will be an argument which must be evaluated.
9702 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9703 COMPOUND_EXPR in the chain will contain the tree for the simplified
9704 form of the builtin function call. */
9706 static tree
9707 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9709 if (!validate_arg (s1, POINTER_TYPE)
9710 || !validate_arg (s2, POINTER_TYPE))
9711 return NULL_TREE;
9712 else
9714 tree fn;
9715 const char *p1, *p2;
9717 p2 = c_getstr (s2);
9718 if (p2 == NULL)
9719 return NULL_TREE;
9721 p1 = c_getstr (s1);
9722 if (p1 != NULL)
9724 const char *r = strpbrk (p1, p2);
9725 tree tem;
9727 if (r == NULL)
9728 return build_int_cst (TREE_TYPE (s1), 0);
9730 /* Return an offset into the constant string argument. */
9731 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9732 return fold_convert_loc (loc, type, tem);
9735 if (p2[0] == '\0')
9736 /* strpbrk(x, "") == NULL.
9737 Evaluate and ignore s1 in case it had side-effects. */
9738 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
9740 if (p2[1] != '\0')
9741 return NULL_TREE; /* Really call strpbrk. */
9743 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9744 if (!fn)
9745 return NULL_TREE;
9747 /* New argument list transforming strpbrk(s1, s2) to
9748 strchr(s1, s2[0]). */
9749 return build_call_expr_loc (loc, fn, 2, s1,
9750 build_int_cst (integer_type_node, p2[0]));
9754 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9755 to the call.
9757 Return NULL_TREE if no simplification was possible, otherwise return the
9758 simplified form of the call as a tree.
9760 The simplified form may be a constant or other expression which
9761 computes the same value, but in a more efficient manner (including
9762 calls to other builtin functions).
9764 The call may contain arguments which need to be evaluated, but
9765 which are not useful to determine the result of the call. In
9766 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9767 COMPOUND_EXPR will be an argument which must be evaluated.
9768 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9769 COMPOUND_EXPR in the chain will contain the tree for the simplified
9770 form of the builtin function call. */
9772 static tree
9773 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9775 if (!validate_arg (s1, POINTER_TYPE)
9776 || !validate_arg (s2, POINTER_TYPE))
9777 return NULL_TREE;
9778 else
9780 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9782 /* If either argument is "", return NULL_TREE. */
9783 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9784 /* Evaluate and ignore both arguments in case either one has
9785 side-effects. */
9786 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9787 s1, s2);
9788 return NULL_TREE;
9792 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9793 to the call.
9795 Return NULL_TREE if no simplification was possible, otherwise return the
9796 simplified form of the call as a tree.
9798 The simplified form may be a constant or other expression which
9799 computes the same value, but in a more efficient manner (including
9800 calls to other builtin functions).
9802 The call may contain arguments which need to be evaluated, but
9803 which are not useful to determine the result of the call. In
9804 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9805 COMPOUND_EXPR will be an argument which must be evaluated.
9806 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9807 COMPOUND_EXPR in the chain will contain the tree for the simplified
9808 form of the builtin function call. */
9810 static tree
9811 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9813 if (!validate_arg (s1, POINTER_TYPE)
9814 || !validate_arg (s2, POINTER_TYPE))
9815 return NULL_TREE;
9816 else
9818 /* If the first argument is "", return NULL_TREE. */
9819 const char *p1 = c_getstr (s1);
9820 if (p1 && *p1 == '\0')
9822 /* Evaluate and ignore argument s2 in case it has
9823 side-effects. */
9824 return omit_one_operand_loc (loc, size_type_node,
9825 size_zero_node, s2);
9828 /* If the second argument is "", return __builtin_strlen(s1). */
9829 const char *p2 = c_getstr (s2);
9830 if (p2 && *p2 == '\0')
9832 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9834 /* If the replacement _DECL isn't initialized, don't do the
9835 transformation. */
9836 if (!fn)
9837 return NULL_TREE;
9839 return build_call_expr_loc (loc, fn, 1, s1);
9841 return NULL_TREE;
9845 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9846 produced. False otherwise. This is done so that we don't output the error
9847 or warning twice or three times. */
9849 bool
9850 fold_builtin_next_arg (tree exp, bool va_start_p)
9852 tree fntype = TREE_TYPE (current_function_decl);
9853 int nargs = call_expr_nargs (exp);
9854 tree arg;
9855 /* There is good chance the current input_location points inside the
9856 definition of the va_start macro (perhaps on the token for
9857 builtin) in a system header, so warnings will not be emitted.
9858 Use the location in real source code. */
9859 source_location current_location =
9860 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9861 NULL);
9863 if (!stdarg_p (fntype))
9865 error ("%<va_start%> used in function with fixed args");
9866 return true;
9869 if (va_start_p)
9871 if (va_start_p && (nargs != 2))
9873 error ("wrong number of arguments to function %<va_start%>");
9874 return true;
9876 arg = CALL_EXPR_ARG (exp, 1);
9878 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9879 when we checked the arguments and if needed issued a warning. */
9880 else
9882 if (nargs == 0)
9884 /* Evidently an out of date version of <stdarg.h>; can't validate
9885 va_start's second argument, but can still work as intended. */
9886 warning_at (current_location,
9887 OPT_Wvarargs,
9888 "%<__builtin_next_arg%> called without an argument");
9889 return true;
9891 else if (nargs > 1)
9893 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9894 return true;
9896 arg = CALL_EXPR_ARG (exp, 0);
9899 if (TREE_CODE (arg) == SSA_NAME)
9900 arg = SSA_NAME_VAR (arg);
9902 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9903 or __builtin_next_arg (0) the first time we see it, after checking
9904 the arguments and if needed issuing a warning. */
9905 if (!integer_zerop (arg))
9907 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9909 /* Strip off all nops for the sake of the comparison. This
9910 is not quite the same as STRIP_NOPS. It does more.
9911 We must also strip off INDIRECT_EXPR for C++ reference
9912 parameters. */
9913 while (CONVERT_EXPR_P (arg)
9914 || TREE_CODE (arg) == INDIRECT_REF)
9915 arg = TREE_OPERAND (arg, 0);
9916 if (arg != last_parm)
9918 /* FIXME: Sometimes with the tree optimizers we can get the
9919 not the last argument even though the user used the last
9920 argument. We just warn and set the arg to be the last
9921 argument so that we will get wrong-code because of
9922 it. */
9923 warning_at (current_location,
9924 OPT_Wvarargs,
9925 "second parameter of %<va_start%> not last named argument");
9928 /* Undefined by C99 7.15.1.4p4 (va_start):
9929 "If the parameter parmN is declared with the register storage
9930 class, with a function or array type, or with a type that is
9931 not compatible with the type that results after application of
9932 the default argument promotions, the behavior is undefined."
9934 else if (DECL_REGISTER (arg))
9936 warning_at (current_location,
9937 OPT_Wvarargs,
9938 "undefined behavior when second parameter of "
9939 "%<va_start%> is declared with %<register%> storage");
9942 /* We want to verify the second parameter just once before the tree
9943 optimizers are run and then avoid keeping it in the tree,
9944 as otherwise we could warn even for correct code like:
9945 void foo (int i, ...)
9946 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9947 if (va_start_p)
9948 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9949 else
9950 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9952 return false;
9956 /* Expand a call EXP to __builtin_object_size. */
9958 static rtx
9959 expand_builtin_object_size (tree exp)
9961 tree ost;
9962 int object_size_type;
9963 tree fndecl = get_callee_fndecl (exp);
9965 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9967 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
9968 exp, fndecl);
9969 expand_builtin_trap ();
9970 return const0_rtx;
9973 ost = CALL_EXPR_ARG (exp, 1);
9974 STRIP_NOPS (ost);
9976 if (TREE_CODE (ost) != INTEGER_CST
9977 || tree_int_cst_sgn (ost) < 0
9978 || compare_tree_int (ost, 3) > 0)
9980 error ("%Klast argument of %qD is not integer constant between 0 and 3",
9981 exp, fndecl);
9982 expand_builtin_trap ();
9983 return const0_rtx;
9986 object_size_type = tree_to_shwi (ost);
9988 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9991 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9992 FCODE is the BUILT_IN_* to use.
9993 Return NULL_RTX if we failed; the caller should emit a normal call,
9994 otherwise try to get the result in TARGET, if convenient (and in
9995 mode MODE if that's convenient). */
9997 static rtx
9998 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9999 enum built_in_function fcode)
10001 if (!validate_arglist (exp,
10002 POINTER_TYPE,
10003 fcode == BUILT_IN_MEMSET_CHK
10004 ? INTEGER_TYPE : POINTER_TYPE,
10005 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10006 return NULL_RTX;
10008 tree dest = CALL_EXPR_ARG (exp, 0);
10009 tree src = CALL_EXPR_ARG (exp, 1);
10010 tree len = CALL_EXPR_ARG (exp, 2);
10011 tree size = CALL_EXPR_ARG (exp, 3);
10013 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
10014 /*str=*/NULL_TREE, size);
10016 if (!tree_fits_uhwi_p (size))
10017 return NULL_RTX;
10019 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10021 /* Avoid transforming the checking call to an ordinary one when
10022 an overflow has been detected or when the call couldn't be
10023 validated because the size is not constant. */
10024 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10025 return NULL_RTX;
10027 tree fn = NULL_TREE;
10028 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10029 mem{cpy,pcpy,move,set} is available. */
10030 switch (fcode)
10032 case BUILT_IN_MEMCPY_CHK:
10033 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10034 break;
10035 case BUILT_IN_MEMPCPY_CHK:
10036 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10037 break;
10038 case BUILT_IN_MEMMOVE_CHK:
10039 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10040 break;
10041 case BUILT_IN_MEMSET_CHK:
10042 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10043 break;
10044 default:
10045 break;
10048 if (! fn)
10049 return NULL_RTX;
10051 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10052 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10053 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10054 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10056 else if (fcode == BUILT_IN_MEMSET_CHK)
10057 return NULL_RTX;
10058 else
10060 unsigned int dest_align = get_pointer_alignment (dest);
10062 /* If DEST is not a pointer type, call the normal function. */
10063 if (dest_align == 0)
10064 return NULL_RTX;
10066 /* If SRC and DEST are the same (and not volatile), do nothing. */
10067 if (operand_equal_p (src, dest, 0))
10069 tree expr;
10071 if (fcode != BUILT_IN_MEMPCPY_CHK)
10073 /* Evaluate and ignore LEN in case it has side-effects. */
10074 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10075 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10078 expr = fold_build_pointer_plus (dest, len);
10079 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10082 /* __memmove_chk special case. */
10083 if (fcode == BUILT_IN_MEMMOVE_CHK)
10085 unsigned int src_align = get_pointer_alignment (src);
10087 if (src_align == 0)
10088 return NULL_RTX;
10090 /* If src is categorized for a readonly section we can use
10091 normal __memcpy_chk. */
10092 if (readonly_data_expr (src))
10094 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10095 if (!fn)
10096 return NULL_RTX;
10097 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10098 dest, src, len, size);
10099 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10100 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10101 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10104 return NULL_RTX;
10108 /* Emit warning if a buffer overflow is detected at compile time. */
10110 static void
10111 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10113 /* The source string. */
10114 tree srcstr = NULL_TREE;
10115 /* The size of the destination object. */
10116 tree objsize = NULL_TREE;
10117 /* The string that is being concatenated with (as in __strcat_chk)
10118 or null if it isn't. */
10119 tree catstr = NULL_TREE;
10120 /* The maximum length of the source sequence in a bounded operation
10121 (such as __strncat_chk) or null if the operation isn't bounded
10122 (such as __strcat_chk). */
10123 tree maxread = NULL_TREE;
10124 /* The exact size of the access (such as in __strncpy_chk). */
10125 tree size = NULL_TREE;
10127 switch (fcode)
10129 case BUILT_IN_STRCPY_CHK:
10130 case BUILT_IN_STPCPY_CHK:
10131 srcstr = CALL_EXPR_ARG (exp, 1);
10132 objsize = CALL_EXPR_ARG (exp, 2);
10133 break;
10135 case BUILT_IN_STRCAT_CHK:
10136 /* For __strcat_chk the warning will be emitted only if overflowing
10137 by at least strlen (dest) + 1 bytes. */
10138 catstr = CALL_EXPR_ARG (exp, 0);
10139 srcstr = CALL_EXPR_ARG (exp, 1);
10140 objsize = CALL_EXPR_ARG (exp, 2);
10141 break;
10143 case BUILT_IN_STRNCAT_CHK:
10144 catstr = CALL_EXPR_ARG (exp, 0);
10145 srcstr = CALL_EXPR_ARG (exp, 1);
10146 maxread = CALL_EXPR_ARG (exp, 2);
10147 objsize = CALL_EXPR_ARG (exp, 3);
10148 break;
10150 case BUILT_IN_STRNCPY_CHK:
10151 case BUILT_IN_STPNCPY_CHK:
10152 srcstr = CALL_EXPR_ARG (exp, 1);
10153 size = CALL_EXPR_ARG (exp, 2);
10154 objsize = CALL_EXPR_ARG (exp, 3);
10155 break;
10157 case BUILT_IN_SNPRINTF_CHK:
10158 case BUILT_IN_VSNPRINTF_CHK:
10159 maxread = CALL_EXPR_ARG (exp, 1);
10160 objsize = CALL_EXPR_ARG (exp, 3);
10161 break;
10162 default:
10163 gcc_unreachable ();
10166 if (catstr && maxread)
10168 /* Check __strncat_chk. There is no way to determine the length
10169 of the string to which the source string is being appended so
10170 just warn when the length of the source string is not known. */
10171 check_strncat_sizes (exp, objsize);
10172 return;
10175 /* The destination argument is the first one for all built-ins above. */
10176 tree dst = CALL_EXPR_ARG (exp, 0);
10178 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
10181 /* Emit warning if a buffer overflow is detected at compile time
10182 in __sprintf_chk/__vsprintf_chk calls. */
10184 static void
10185 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10187 tree size, len, fmt;
10188 const char *fmt_str;
10189 int nargs = call_expr_nargs (exp);
10191 /* Verify the required arguments in the original call. */
10193 if (nargs < 4)
10194 return;
10195 size = CALL_EXPR_ARG (exp, 2);
10196 fmt = CALL_EXPR_ARG (exp, 3);
10198 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10199 return;
10201 /* Check whether the format is a literal string constant. */
10202 fmt_str = c_getstr (fmt);
10203 if (fmt_str == NULL)
10204 return;
10206 if (!init_target_chars ())
10207 return;
10209 /* If the format doesn't contain % args or %%, we know its size. */
10210 if (strchr (fmt_str, target_percent) == 0)
10211 len = build_int_cstu (size_type_node, strlen (fmt_str));
10212 /* If the format is "%s" and first ... argument is a string literal,
10213 we know it too. */
10214 else if (fcode == BUILT_IN_SPRINTF_CHK
10215 && strcmp (fmt_str, target_percent_s) == 0)
10217 tree arg;
10219 if (nargs < 5)
10220 return;
10221 arg = CALL_EXPR_ARG (exp, 4);
10222 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10223 return;
10225 len = c_strlen (arg, 1);
10226 if (!len || ! tree_fits_uhwi_p (len))
10227 return;
10229 else
10230 return;
10232 /* Add one for the terminating nul. */
10233 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10235 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
10236 /*maxread=*/NULL_TREE, len, size);
10239 /* Emit warning if a free is called with address of a variable. */
10241 static void
10242 maybe_emit_free_warning (tree exp)
10244 tree arg = CALL_EXPR_ARG (exp, 0);
10246 STRIP_NOPS (arg);
10247 if (TREE_CODE (arg) != ADDR_EXPR)
10248 return;
10250 arg = get_base_address (TREE_OPERAND (arg, 0));
10251 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
10252 return;
10254 if (SSA_VAR_P (arg))
10255 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10256 "%Kattempt to free a non-heap object %qD", exp, arg);
10257 else
10258 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10259 "%Kattempt to free a non-heap object", exp);
10262 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10263 if possible. */
10265 static tree
10266 fold_builtin_object_size (tree ptr, tree ost)
10268 unsigned HOST_WIDE_INT bytes;
10269 int object_size_type;
10271 if (!validate_arg (ptr, POINTER_TYPE)
10272 || !validate_arg (ost, INTEGER_TYPE))
10273 return NULL_TREE;
10275 STRIP_NOPS (ost);
10277 if (TREE_CODE (ost) != INTEGER_CST
10278 || tree_int_cst_sgn (ost) < 0
10279 || compare_tree_int (ost, 3) > 0)
10280 return NULL_TREE;
10282 object_size_type = tree_to_shwi (ost);
10284 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10285 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10286 and (size_t) 0 for types 2 and 3. */
10287 if (TREE_SIDE_EFFECTS (ptr))
10288 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10290 if (TREE_CODE (ptr) == ADDR_EXPR)
10292 compute_builtin_object_size (ptr, object_size_type, &bytes);
10293 if (wi::fits_to_tree_p (bytes, size_type_node))
10294 return build_int_cstu (size_type_node, bytes);
10296 else if (TREE_CODE (ptr) == SSA_NAME)
10298 /* If object size is not known yet, delay folding until
10299 later. Maybe subsequent passes will help determining
10300 it. */
10301 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10302 && wi::fits_to_tree_p (bytes, size_type_node))
10303 return build_int_cstu (size_type_node, bytes);
10306 return NULL_TREE;
10309 /* Builtins with folding operations that operate on "..." arguments
10310 need special handling; we need to store the arguments in a convenient
10311 data structure before attempting any folding. Fortunately there are
10312 only a few builtins that fall into this category. FNDECL is the
10313 function, EXP is the CALL_EXPR for the call. */
10315 static tree
10316 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10318 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10319 tree ret = NULL_TREE;
10321 switch (fcode)
10323 case BUILT_IN_FPCLASSIFY:
10324 ret = fold_builtin_fpclassify (loc, args, nargs);
10325 break;
10327 default:
10328 break;
10330 if (ret)
10332 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10333 SET_EXPR_LOCATION (ret, loc);
10334 TREE_NO_WARNING (ret) = 1;
10335 return ret;
10337 return NULL_TREE;
10340 /* Initialize format string characters in the target charset. */
10342 bool
10343 init_target_chars (void)
10345 static bool init;
10346 if (!init)
10348 target_newline = lang_hooks.to_target_charset ('\n');
10349 target_percent = lang_hooks.to_target_charset ('%');
10350 target_c = lang_hooks.to_target_charset ('c');
10351 target_s = lang_hooks.to_target_charset ('s');
10352 if (target_newline == 0 || target_percent == 0 || target_c == 0
10353 || target_s == 0)
10354 return false;
10356 target_percent_c[0] = target_percent;
10357 target_percent_c[1] = target_c;
10358 target_percent_c[2] = '\0';
10360 target_percent_s[0] = target_percent;
10361 target_percent_s[1] = target_s;
10362 target_percent_s[2] = '\0';
10364 target_percent_s_newline[0] = target_percent;
10365 target_percent_s_newline[1] = target_s;
10366 target_percent_s_newline[2] = target_newline;
10367 target_percent_s_newline[3] = '\0';
10369 init = true;
10371 return true;
10374 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10375 and no overflow/underflow occurred. INEXACT is true if M was not
10376 exactly calculated. TYPE is the tree type for the result. This
10377 function assumes that you cleared the MPFR flags and then
10378 calculated M to see if anything subsequently set a flag prior to
10379 entering this function. Return NULL_TREE if any checks fail. */
10381 static tree
10382 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10384 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10385 overflow/underflow occurred. If -frounding-math, proceed iff the
10386 result of calling FUNC was exact. */
10387 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10388 && (!flag_rounding_math || !inexact))
10390 REAL_VALUE_TYPE rr;
10392 real_from_mpfr (&rr, m, type, GMP_RNDN);
10393 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10394 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10395 but the mpft_t is not, then we underflowed in the
10396 conversion. */
10397 if (real_isfinite (&rr)
10398 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10400 REAL_VALUE_TYPE rmode;
10402 real_convert (&rmode, TYPE_MODE (type), &rr);
10403 /* Proceed iff the specified mode can hold the value. */
10404 if (real_identical (&rmode, &rr))
10405 return build_real (type, rmode);
10408 return NULL_TREE;
10411 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10412 number and no overflow/underflow occurred. INEXACT is true if M
10413 was not exactly calculated. TYPE is the tree type for the result.
10414 This function assumes that you cleared the MPFR flags and then
10415 calculated M to see if anything subsequently set a flag prior to
10416 entering this function. Return NULL_TREE if any checks fail, if
10417 FORCE_CONVERT is true, then bypass the checks. */
10419 static tree
10420 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10422 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10423 overflow/underflow occurred. If -frounding-math, proceed iff the
10424 result of calling FUNC was exact. */
10425 if (force_convert
10426 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10427 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10428 && (!flag_rounding_math || !inexact)))
10430 REAL_VALUE_TYPE re, im;
10432 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10433 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10434 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10435 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10436 but the mpft_t is not, then we underflowed in the
10437 conversion. */
10438 if (force_convert
10439 || (real_isfinite (&re) && real_isfinite (&im)
10440 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10441 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10443 REAL_VALUE_TYPE re_mode, im_mode;
10445 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10446 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10447 /* Proceed iff the specified mode can hold the value. */
10448 if (force_convert
10449 || (real_identical (&re_mode, &re)
10450 && real_identical (&im_mode, &im)))
10451 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10452 build_real (TREE_TYPE (type), im_mode));
10455 return NULL_TREE;
10458 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10459 the pointer *(ARG_QUO) and return the result. The type is taken
10460 from the type of ARG0 and is used for setting the precision of the
10461 calculation and results. */
10463 static tree
10464 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10466 tree const type = TREE_TYPE (arg0);
10467 tree result = NULL_TREE;
10469 STRIP_NOPS (arg0);
10470 STRIP_NOPS (arg1);
10472 /* To proceed, MPFR must exactly represent the target floating point
10473 format, which only happens when the target base equals two. */
10474 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10475 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10476 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10478 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10479 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10481 if (real_isfinite (ra0) && real_isfinite (ra1))
10483 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10484 const int prec = fmt->p;
10485 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10486 tree result_rem;
10487 long integer_quo;
10488 mpfr_t m0, m1;
10490 mpfr_inits2 (prec, m0, m1, NULL);
10491 mpfr_from_real (m0, ra0, GMP_RNDN);
10492 mpfr_from_real (m1, ra1, GMP_RNDN);
10493 mpfr_clear_flags ();
10494 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10495 /* Remquo is independent of the rounding mode, so pass
10496 inexact=0 to do_mpfr_ckconv(). */
10497 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10498 mpfr_clears (m0, m1, NULL);
10499 if (result_rem)
10501 /* MPFR calculates quo in the host's long so it may
10502 return more bits in quo than the target int can hold
10503 if sizeof(host long) > sizeof(target int). This can
10504 happen even for native compilers in LP64 mode. In
10505 these cases, modulo the quo value with the largest
10506 number that the target int can hold while leaving one
10507 bit for the sign. */
10508 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10509 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10511 /* Dereference the quo pointer argument. */
10512 arg_quo = build_fold_indirect_ref (arg_quo);
10513 /* Proceed iff a valid pointer type was passed in. */
10514 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10516 /* Set the value. */
10517 tree result_quo
10518 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10519 build_int_cst (TREE_TYPE (arg_quo),
10520 integer_quo));
10521 TREE_SIDE_EFFECTS (result_quo) = 1;
10522 /* Combine the quo assignment with the rem. */
10523 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10524 result_quo, result_rem));
10529 return result;
10532 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10533 resulting value as a tree with type TYPE. The mpfr precision is
10534 set to the precision of TYPE. We assume that this mpfr function
10535 returns zero if the result could be calculated exactly within the
10536 requested precision. In addition, the integer pointer represented
10537 by ARG_SG will be dereferenced and set to the appropriate signgam
10538 (-1,1) value. */
10540 static tree
10541 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10543 tree result = NULL_TREE;
10545 STRIP_NOPS (arg);
10547 /* To proceed, MPFR must exactly represent the target floating point
10548 format, which only happens when the target base equals two. Also
10549 verify ARG is a constant and that ARG_SG is an int pointer. */
10550 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10551 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10552 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10553 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10555 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10557 /* In addition to NaN and Inf, the argument cannot be zero or a
10558 negative integer. */
10559 if (real_isfinite (ra)
10560 && ra->cl != rvc_zero
10561 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10563 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10564 const int prec = fmt->p;
10565 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10566 int inexact, sg;
10567 mpfr_t m;
10568 tree result_lg;
10570 mpfr_init2 (m, prec);
10571 mpfr_from_real (m, ra, GMP_RNDN);
10572 mpfr_clear_flags ();
10573 inexact = mpfr_lgamma (m, &sg, m, rnd);
10574 result_lg = do_mpfr_ckconv (m, type, inexact);
10575 mpfr_clear (m);
10576 if (result_lg)
10578 tree result_sg;
10580 /* Dereference the arg_sg pointer argument. */
10581 arg_sg = build_fold_indirect_ref (arg_sg);
10582 /* Assign the signgam value into *arg_sg. */
10583 result_sg = fold_build2 (MODIFY_EXPR,
10584 TREE_TYPE (arg_sg), arg_sg,
10585 build_int_cst (TREE_TYPE (arg_sg), sg));
10586 TREE_SIDE_EFFECTS (result_sg) = 1;
10587 /* Combine the signgam assignment with the lgamma result. */
10588 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10589 result_sg, result_lg));
10594 return result;
10597 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10598 mpc function FUNC on it and return the resulting value as a tree
10599 with type TYPE. The mpfr precision is set to the precision of
10600 TYPE. We assume that function FUNC returns zero if the result
10601 could be calculated exactly within the requested precision. If
10602 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10603 in the arguments and/or results. */
10605 tree
10606 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10607 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10609 tree result = NULL_TREE;
10611 STRIP_NOPS (arg0);
10612 STRIP_NOPS (arg1);
10614 /* To proceed, MPFR must exactly represent the target floating point
10615 format, which only happens when the target base equals two. */
10616 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10617 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10618 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10619 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10620 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10622 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10623 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10624 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10625 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10627 if (do_nonfinite
10628 || (real_isfinite (re0) && real_isfinite (im0)
10629 && real_isfinite (re1) && real_isfinite (im1)))
10631 const struct real_format *const fmt =
10632 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10633 const int prec = fmt->p;
10634 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10635 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10636 int inexact;
10637 mpc_t m0, m1;
10639 mpc_init2 (m0, prec);
10640 mpc_init2 (m1, prec);
10641 mpfr_from_real (mpc_realref (m0), re0, rnd);
10642 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10643 mpfr_from_real (mpc_realref (m1), re1, rnd);
10644 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10645 mpfr_clear_flags ();
10646 inexact = func (m0, m0, m1, crnd);
10647 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10648 mpc_clear (m0);
10649 mpc_clear (m1);
10653 return result;
10656 /* A wrapper function for builtin folding that prevents warnings for
10657 "statement without effect" and the like, caused by removing the
10658 call node earlier than the warning is generated. */
10660 tree
10661 fold_call_stmt (gcall *stmt, bool ignore)
10663 tree ret = NULL_TREE;
10664 tree fndecl = gimple_call_fndecl (stmt);
10665 location_t loc = gimple_location (stmt);
10666 if (fndecl
10667 && TREE_CODE (fndecl) == FUNCTION_DECL
10668 && DECL_BUILT_IN (fndecl)
10669 && !gimple_call_va_arg_pack_p (stmt))
10671 int nargs = gimple_call_num_args (stmt);
10672 tree *args = (nargs > 0
10673 ? gimple_call_arg_ptr (stmt, 0)
10674 : &error_mark_node);
10676 if (avoid_folding_inline_builtin (fndecl))
10677 return NULL_TREE;
10678 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10680 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10682 else
10684 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10685 if (ret)
10687 /* Propagate location information from original call to
10688 expansion of builtin. Otherwise things like
10689 maybe_emit_chk_warning, that operate on the expansion
10690 of a builtin, will use the wrong location information. */
10691 if (gimple_has_location (stmt))
10693 tree realret = ret;
10694 if (TREE_CODE (ret) == NOP_EXPR)
10695 realret = TREE_OPERAND (ret, 0);
10696 if (CAN_HAVE_LOCATION_P (realret)
10697 && !EXPR_HAS_LOCATION (realret))
10698 SET_EXPR_LOCATION (realret, loc);
10699 return realret;
10701 return ret;
10705 return NULL_TREE;
10708 /* Look up the function in builtin_decl that corresponds to DECL
10709 and set ASMSPEC as its user assembler name. DECL must be a
10710 function decl that declares a builtin. */
10712 void
10713 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10715 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10716 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10717 && asmspec != 0);
10719 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10720 set_user_assembler_name (builtin, asmspec);
10722 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10723 && INT_TYPE_SIZE < BITS_PER_WORD)
10725 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
10726 set_user_assembler_libfunc ("ffs", asmspec);
10727 set_optab_libfunc (ffs_optab, mode, "ffs");
10731 /* Return true if DECL is a builtin that expands to a constant or similarly
10732 simple code. */
10733 bool
10734 is_simple_builtin (tree decl)
10736 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10737 switch (DECL_FUNCTION_CODE (decl))
10739 /* Builtins that expand to constants. */
10740 case BUILT_IN_CONSTANT_P:
10741 case BUILT_IN_EXPECT:
10742 case BUILT_IN_OBJECT_SIZE:
10743 case BUILT_IN_UNREACHABLE:
10744 /* Simple register moves or loads from stack. */
10745 case BUILT_IN_ASSUME_ALIGNED:
10746 case BUILT_IN_RETURN_ADDRESS:
10747 case BUILT_IN_EXTRACT_RETURN_ADDR:
10748 case BUILT_IN_FROB_RETURN_ADDR:
10749 case BUILT_IN_RETURN:
10750 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10751 case BUILT_IN_FRAME_ADDRESS:
10752 case BUILT_IN_VA_END:
10753 case BUILT_IN_STACK_SAVE:
10754 case BUILT_IN_STACK_RESTORE:
10755 /* Exception state returns or moves registers around. */
10756 case BUILT_IN_EH_FILTER:
10757 case BUILT_IN_EH_POINTER:
10758 case BUILT_IN_EH_COPY_VALUES:
10759 return true;
10761 default:
10762 return false;
10765 return false;
10768 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10769 most probably expanded inline into reasonably simple code. This is a
10770 superset of is_simple_builtin. */
10771 bool
10772 is_inexpensive_builtin (tree decl)
10774 if (!decl)
10775 return false;
10776 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10777 return true;
10778 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10779 switch (DECL_FUNCTION_CODE (decl))
10781 case BUILT_IN_ABS:
10782 CASE_BUILT_IN_ALLOCA:
10783 case BUILT_IN_BSWAP16:
10784 case BUILT_IN_BSWAP32:
10785 case BUILT_IN_BSWAP64:
10786 case BUILT_IN_CLZ:
10787 case BUILT_IN_CLZIMAX:
10788 case BUILT_IN_CLZL:
10789 case BUILT_IN_CLZLL:
10790 case BUILT_IN_CTZ:
10791 case BUILT_IN_CTZIMAX:
10792 case BUILT_IN_CTZL:
10793 case BUILT_IN_CTZLL:
10794 case BUILT_IN_FFS:
10795 case BUILT_IN_FFSIMAX:
10796 case BUILT_IN_FFSL:
10797 case BUILT_IN_FFSLL:
10798 case BUILT_IN_IMAXABS:
10799 case BUILT_IN_FINITE:
10800 case BUILT_IN_FINITEF:
10801 case BUILT_IN_FINITEL:
10802 case BUILT_IN_FINITED32:
10803 case BUILT_IN_FINITED64:
10804 case BUILT_IN_FINITED128:
10805 case BUILT_IN_FPCLASSIFY:
10806 case BUILT_IN_ISFINITE:
10807 case BUILT_IN_ISINF_SIGN:
10808 case BUILT_IN_ISINF:
10809 case BUILT_IN_ISINFF:
10810 case BUILT_IN_ISINFL:
10811 case BUILT_IN_ISINFD32:
10812 case BUILT_IN_ISINFD64:
10813 case BUILT_IN_ISINFD128:
10814 case BUILT_IN_ISNAN:
10815 case BUILT_IN_ISNANF:
10816 case BUILT_IN_ISNANL:
10817 case BUILT_IN_ISNAND32:
10818 case BUILT_IN_ISNAND64:
10819 case BUILT_IN_ISNAND128:
10820 case BUILT_IN_ISNORMAL:
10821 case BUILT_IN_ISGREATER:
10822 case BUILT_IN_ISGREATEREQUAL:
10823 case BUILT_IN_ISLESS:
10824 case BUILT_IN_ISLESSEQUAL:
10825 case BUILT_IN_ISLESSGREATER:
10826 case BUILT_IN_ISUNORDERED:
10827 case BUILT_IN_VA_ARG_PACK:
10828 case BUILT_IN_VA_ARG_PACK_LEN:
10829 case BUILT_IN_VA_COPY:
10830 case BUILT_IN_TRAP:
10831 case BUILT_IN_SAVEREGS:
10832 case BUILT_IN_POPCOUNTL:
10833 case BUILT_IN_POPCOUNTLL:
10834 case BUILT_IN_POPCOUNTIMAX:
10835 case BUILT_IN_POPCOUNT:
10836 case BUILT_IN_PARITYL:
10837 case BUILT_IN_PARITYLL:
10838 case BUILT_IN_PARITYIMAX:
10839 case BUILT_IN_PARITY:
10840 case BUILT_IN_LABS:
10841 case BUILT_IN_LLABS:
10842 case BUILT_IN_PREFETCH:
10843 case BUILT_IN_ACC_ON_DEVICE:
10844 return true;
10846 default:
10847 return is_simple_builtin (decl);
10850 return false;
10853 /* Return true if T is a constant and the value cast to a target char
10854 can be represented by a host char.
10855 Store the casted char constant in *P if so. */
10857 bool
10858 target_char_cst_p (tree t, char *p)
10860 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
10861 return false;
10863 *p = (char)tree_to_uhwi (t);
10864 return true;
10867 /* Return the maximum object size. */
10869 tree
10870 max_object_size (void)
10872 /* To do: Make this a configurable parameter. */
10873 return TYPE_MAX_VALUE (ptrdiff_type_node);