2018-06-04 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / builtins.c
blobd4150d877dda02b1b4a6a71122bae4723cd22b53
1 /* Expand builtin functions.
2 Copyright (C) 1988-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-restrict.h"
47 #include "stor-layout.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "tree-object-size.h"
51 #include "realmpfr.h"
52 #include "cfgrtl.h"
53 #include "except.h"
54 #include "dojump.h"
55 #include "explow.h"
56 #include "stmt.h"
57 #include "expr.h"
58 #include "libfuncs.h"
59 #include "output.h"
60 #include "typeclass.h"
61 #include "langhooks.h"
62 #include "value-prof.h"
63 #include "builtins.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "asan.h"
67 #include "tree-chkp.h"
68 #include "rtl-chkp.h"
69 #include "internal-fn.h"
70 #include "case-cfn-macros.h"
71 #include "gimple-fold.h"
72 #include "intl.h"
73 #include "file-prefix-map.h" /* remap_macro_filename() */
74 #include "gomp-constants.h"
75 #include "omp-general.h"
77 struct target_builtins default_target_builtins;
78 #if SWITCHABLE_TARGET
79 struct target_builtins *this_target_builtins = &default_target_builtins;
80 #endif
82 /* Define the names of the builtin function types and codes. */
83 const char *const built_in_class_names[BUILT_IN_LAST]
84 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
86 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
87 const char * built_in_names[(int) END_BUILTINS] =
89 #include "builtins.def"
92 /* Setup an array of builtin_info_type, make sure each element decl is
93 initialized to NULL_TREE. */
94 builtin_info_type builtin_info[(int)END_BUILTINS];
96 /* Non-zero if __builtin_constant_p should be folded right away. */
97 bool force_folding_builtin_constant_p;
99 static rtx c_readstr (const char *, scalar_int_mode);
100 static int target_char_cast (tree, char *);
101 static rtx get_memory_rtx (tree, tree);
102 static int apply_args_size (void);
103 static int apply_result_size (void);
104 static rtx result_vector (int, rtx);
105 static void expand_builtin_prefetch (tree);
106 static rtx expand_builtin_apply_args (void);
107 static rtx expand_builtin_apply_args_1 (void);
108 static rtx expand_builtin_apply (rtx, rtx, rtx);
109 static void expand_builtin_return (rtx);
110 static enum type_class type_to_class (tree);
111 static rtx expand_builtin_classify_type (tree);
112 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
113 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
114 static rtx expand_builtin_interclass_mathfn (tree, rtx);
115 static rtx expand_builtin_sincos (tree);
116 static rtx expand_builtin_cexpi (tree, rtx);
117 static rtx expand_builtin_int_roundingfn (tree, rtx);
118 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
119 static rtx expand_builtin_next_arg (void);
120 static rtx expand_builtin_va_start (tree);
121 static rtx expand_builtin_va_end (tree);
122 static rtx expand_builtin_va_copy (tree);
123 static rtx expand_builtin_strcmp (tree, rtx);
124 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
125 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
126 static rtx expand_builtin_memchr (tree, rtx);
127 static rtx expand_builtin_memcpy (tree, rtx);
128 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
129 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
130 rtx target, tree exp, int endp);
131 static rtx expand_builtin_memmove (tree, rtx);
132 static rtx expand_builtin_mempcpy (tree, rtx);
133 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx);
134 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, int);
135 static rtx expand_builtin_strcat (tree, rtx);
136 static rtx expand_builtin_strcpy (tree, rtx);
137 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
138 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
139 static rtx expand_builtin_stpncpy (tree, rtx);
140 static rtx expand_builtin_strncat (tree, rtx);
141 static rtx expand_builtin_strncpy (tree, rtx);
142 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
143 static rtx expand_builtin_memset (tree, rtx, machine_mode);
144 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
145 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
146 static rtx expand_builtin_bzero (tree);
147 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
148 static rtx expand_builtin_alloca (tree);
149 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
150 static rtx expand_builtin_frame_address (tree, tree);
151 static tree stabilize_va_list_loc (location_t, tree, int);
152 static rtx expand_builtin_expect (tree, rtx);
153 static tree fold_builtin_constant_p (tree);
154 static tree fold_builtin_classify_type (tree);
155 static tree fold_builtin_strlen (location_t, tree, tree);
156 static tree fold_builtin_inf (location_t, tree, int);
157 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
158 static bool validate_arg (const_tree, enum tree_code code);
159 static rtx expand_builtin_fabs (tree, rtx, rtx);
160 static rtx expand_builtin_signbit (tree, rtx);
161 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
162 static tree fold_builtin_isascii (location_t, tree);
163 static tree fold_builtin_toascii (location_t, tree);
164 static tree fold_builtin_isdigit (location_t, tree);
165 static tree fold_builtin_fabs (location_t, tree, tree);
166 static tree fold_builtin_abs (location_t, tree, tree);
167 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
168 enum tree_code);
169 static tree fold_builtin_0 (location_t, tree);
170 static tree fold_builtin_1 (location_t, tree, tree);
171 static tree fold_builtin_2 (location_t, tree, tree, tree);
172 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
173 static tree fold_builtin_varargs (location_t, tree, tree*, int);
175 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
176 static tree fold_builtin_strspn (location_t, tree, tree);
177 static tree fold_builtin_strcspn (location_t, tree, tree);
179 static rtx expand_builtin_object_size (tree);
180 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
181 enum built_in_function);
182 static void maybe_emit_chk_warning (tree, enum built_in_function);
183 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
184 static void maybe_emit_free_warning (tree);
185 static tree fold_builtin_object_size (tree, tree);
187 unsigned HOST_WIDE_INT target_newline;
188 unsigned HOST_WIDE_INT target_percent;
189 static unsigned HOST_WIDE_INT target_c;
190 static unsigned HOST_WIDE_INT target_s;
191 char target_percent_c[3];
192 char target_percent_s[3];
193 char target_percent_s_newline[4];
194 static tree do_mpfr_remquo (tree, tree, tree);
195 static tree do_mpfr_lgamma_r (tree, tree, tree);
196 static void expand_builtin_sync_synchronize (void);
198 /* Return true if NAME starts with __builtin_ or __sync_. */
200 static bool
201 is_builtin_name (const char *name)
203 if (strncmp (name, "__builtin_", 10) == 0)
204 return true;
205 if (strncmp (name, "__sync_", 7) == 0)
206 return true;
207 if (strncmp (name, "__atomic_", 9) == 0)
208 return true;
209 return false;
213 /* Return true if DECL is a function symbol representing a built-in. */
215 bool
216 is_builtin_fn (tree decl)
218 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
221 /* Return true if NODE should be considered for inline expansion regardless
222 of the optimization level. This means whenever a function is invoked with
223 its "internal" name, which normally contains the prefix "__builtin". */
225 bool
226 called_as_built_in (tree node)
228 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
229 we want the name used to call the function, not the name it
230 will have. */
231 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
232 return is_builtin_name (name);
235 /* Compute values M and N such that M divides (address of EXP - N) and such
236 that N < M. If these numbers can be determined, store M in alignp and N in
237 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
238 *alignp and any bit-offset to *bitposp.
240 Note that the address (and thus the alignment) computed here is based
241 on the address to which a symbol resolves, whereas DECL_ALIGN is based
242 on the address at which an object is actually located. These two
243 addresses are not always the same. For example, on ARM targets,
244 the address &foo of a Thumb function foo() has the lowest bit set,
245 whereas foo() itself starts on an even address.
247 If ADDR_P is true we are taking the address of the memory reference EXP
248 and thus cannot rely on the access taking place. */
250 static bool
251 get_object_alignment_2 (tree exp, unsigned int *alignp,
252 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
254 poly_int64 bitsize, bitpos;
255 tree offset;
256 machine_mode mode;
257 int unsignedp, reversep, volatilep;
258 unsigned int align = BITS_PER_UNIT;
259 bool known_alignment = false;
261 /* Get the innermost object and the constant (bitpos) and possibly
262 variable (offset) offset of the access. */
263 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
264 &unsignedp, &reversep, &volatilep);
266 /* Extract alignment information from the innermost object and
267 possibly adjust bitpos and offset. */
268 if (TREE_CODE (exp) == FUNCTION_DECL)
270 /* Function addresses can encode extra information besides their
271 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
272 allows the low bit to be used as a virtual bit, we know
273 that the address itself must be at least 2-byte aligned. */
274 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
275 align = 2 * BITS_PER_UNIT;
277 else if (TREE_CODE (exp) == LABEL_DECL)
279 else if (TREE_CODE (exp) == CONST_DECL)
281 /* The alignment of a CONST_DECL is determined by its initializer. */
282 exp = DECL_INITIAL (exp);
283 align = TYPE_ALIGN (TREE_TYPE (exp));
284 if (CONSTANT_CLASS_P (exp))
285 align = targetm.constant_alignment (exp, align);
287 known_alignment = true;
289 else if (DECL_P (exp))
291 align = DECL_ALIGN (exp);
292 known_alignment = true;
294 else if (TREE_CODE (exp) == INDIRECT_REF
295 || TREE_CODE (exp) == MEM_REF
296 || TREE_CODE (exp) == TARGET_MEM_REF)
298 tree addr = TREE_OPERAND (exp, 0);
299 unsigned ptr_align;
300 unsigned HOST_WIDE_INT ptr_bitpos;
301 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
303 /* If the address is explicitely aligned, handle that. */
304 if (TREE_CODE (addr) == BIT_AND_EXPR
305 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
307 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
308 ptr_bitmask *= BITS_PER_UNIT;
309 align = least_bit_hwi (ptr_bitmask);
310 addr = TREE_OPERAND (addr, 0);
313 known_alignment
314 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
315 align = MAX (ptr_align, align);
317 /* Re-apply explicit alignment to the bitpos. */
318 ptr_bitpos &= ptr_bitmask;
320 /* The alignment of the pointer operand in a TARGET_MEM_REF
321 has to take the variable offset parts into account. */
322 if (TREE_CODE (exp) == TARGET_MEM_REF)
324 if (TMR_INDEX (exp))
326 unsigned HOST_WIDE_INT step = 1;
327 if (TMR_STEP (exp))
328 step = TREE_INT_CST_LOW (TMR_STEP (exp));
329 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
331 if (TMR_INDEX2 (exp))
332 align = BITS_PER_UNIT;
333 known_alignment = false;
336 /* When EXP is an actual memory reference then we can use
337 TYPE_ALIGN of a pointer indirection to derive alignment.
338 Do so only if get_pointer_alignment_1 did not reveal absolute
339 alignment knowledge and if using that alignment would
340 improve the situation. */
341 unsigned int talign;
342 if (!addr_p && !known_alignment
343 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
344 && talign > align)
345 align = talign;
346 else
348 /* Else adjust bitpos accordingly. */
349 bitpos += ptr_bitpos;
350 if (TREE_CODE (exp) == MEM_REF
351 || TREE_CODE (exp) == TARGET_MEM_REF)
352 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
355 else if (TREE_CODE (exp) == STRING_CST)
357 /* STRING_CST are the only constant objects we allow to be not
358 wrapped inside a CONST_DECL. */
359 align = TYPE_ALIGN (TREE_TYPE (exp));
360 if (CONSTANT_CLASS_P (exp))
361 align = targetm.constant_alignment (exp, align);
363 known_alignment = true;
366 /* If there is a non-constant offset part extract the maximum
367 alignment that can prevail. */
368 if (offset)
370 unsigned int trailing_zeros = tree_ctz (offset);
371 if (trailing_zeros < HOST_BITS_PER_INT)
373 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
374 if (inner)
375 align = MIN (align, inner);
379 /* Account for the alignment of runtime coefficients, so that the constant
380 bitpos is guaranteed to be accurate. */
381 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
382 if (alt_align != 0 && alt_align < align)
384 align = alt_align;
385 known_alignment = false;
388 *alignp = align;
389 *bitposp = bitpos.coeffs[0] & (align - 1);
390 return known_alignment;
393 /* For a memory reference expression EXP compute values M and N such that M
394 divides (&EXP - N) and such that N < M. If these numbers can be determined,
395 store M in alignp and N in *BITPOSP and return true. Otherwise return false
396 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
398 bool
399 get_object_alignment_1 (tree exp, unsigned int *alignp,
400 unsigned HOST_WIDE_INT *bitposp)
402 return get_object_alignment_2 (exp, alignp, bitposp, false);
405 /* Return the alignment in bits of EXP, an object. */
407 unsigned int
408 get_object_alignment (tree exp)
410 unsigned HOST_WIDE_INT bitpos = 0;
411 unsigned int align;
413 get_object_alignment_1 (exp, &align, &bitpos);
415 /* align and bitpos now specify known low bits of the pointer.
416 ptr & (align - 1) == bitpos. */
418 if (bitpos != 0)
419 align = least_bit_hwi (bitpos);
420 return align;
423 /* For a pointer valued expression EXP compute values M and N such that M
424 divides (EXP - N) and such that N < M. If these numbers can be determined,
425 store M in alignp and N in *BITPOSP and return true. Return false if
426 the results are just a conservative approximation.
428 If EXP is not a pointer, false is returned too. */
430 bool
431 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
432 unsigned HOST_WIDE_INT *bitposp)
434 STRIP_NOPS (exp);
436 if (TREE_CODE (exp) == ADDR_EXPR)
437 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
438 alignp, bitposp, true);
439 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
441 unsigned int align;
442 unsigned HOST_WIDE_INT bitpos;
443 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
444 &align, &bitpos);
445 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
446 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
447 else
449 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
450 if (trailing_zeros < HOST_BITS_PER_INT)
452 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
453 if (inner)
454 align = MIN (align, inner);
457 *alignp = align;
458 *bitposp = bitpos & (align - 1);
459 return res;
461 else if (TREE_CODE (exp) == SSA_NAME
462 && POINTER_TYPE_P (TREE_TYPE (exp)))
464 unsigned int ptr_align, ptr_misalign;
465 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
467 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
469 *bitposp = ptr_misalign * BITS_PER_UNIT;
470 *alignp = ptr_align * BITS_PER_UNIT;
471 /* Make sure to return a sensible alignment when the multiplication
472 by BITS_PER_UNIT overflowed. */
473 if (*alignp == 0)
474 *alignp = 1u << (HOST_BITS_PER_INT - 1);
475 /* We cannot really tell whether this result is an approximation. */
476 return false;
478 else
480 *bitposp = 0;
481 *alignp = BITS_PER_UNIT;
482 return false;
485 else if (TREE_CODE (exp) == INTEGER_CST)
487 *alignp = BIGGEST_ALIGNMENT;
488 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
489 & (BIGGEST_ALIGNMENT - 1));
490 return true;
493 *bitposp = 0;
494 *alignp = BITS_PER_UNIT;
495 return false;
498 /* Return the alignment in bits of EXP, a pointer valued expression.
499 The alignment returned is, by default, the alignment of the thing that
500 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
502 Otherwise, look at the expression to see if we can do better, i.e., if the
503 expression is actually pointing at an object whose alignment is tighter. */
505 unsigned int
506 get_pointer_alignment (tree exp)
508 unsigned HOST_WIDE_INT bitpos = 0;
509 unsigned int align;
511 get_pointer_alignment_1 (exp, &align, &bitpos);
513 /* align and bitpos now specify known low bits of the pointer.
514 ptr & (align - 1) == bitpos. */
516 if (bitpos != 0)
517 align = least_bit_hwi (bitpos);
519 return align;
522 /* Return the number of non-zero elements in the sequence
523 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
524 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
526 static unsigned
527 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
529 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
531 unsigned n;
533 if (eltsize == 1)
535 /* Optimize the common case of plain char. */
536 for (n = 0; n < maxelts; n++)
538 const char *elt = (const char*) ptr + n;
539 if (!*elt)
540 break;
543 else
545 for (n = 0; n < maxelts; n++)
547 const char *elt = (const char*) ptr + n * eltsize;
548 if (!memcmp (elt, "\0\0\0\0", eltsize))
549 break;
552 return n;
555 /* Compute the length of a null-terminated character string or wide
556 character string handling character sizes of 1, 2, and 4 bytes.
557 TREE_STRING_LENGTH is not the right way because it evaluates to
558 the size of the character array in bytes (as opposed to characters)
559 and because it can contain a zero byte in the middle.
561 ONLY_VALUE should be nonzero if the result is not going to be emitted
562 into the instruction stream and zero if it is going to be expanded.
563 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
564 is returned, otherwise NULL, since
565 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
566 evaluate the side-effects.
568 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
569 accesses. Note that this implies the result is not going to be emitted
570 into the instruction stream.
572 The value returned is of type `ssizetype'.
574 Unfortunately, string_constant can't access the values of const char
575 arrays with initializers, so neither can we do so here. */
577 tree
578 c_strlen (tree src, int only_value)
580 STRIP_NOPS (src);
581 if (TREE_CODE (src) == COND_EXPR
582 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
584 tree len1, len2;
586 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
587 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
588 if (tree_int_cst_equal (len1, len2))
589 return len1;
592 if (TREE_CODE (src) == COMPOUND_EXPR
593 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
594 return c_strlen (TREE_OPERAND (src, 1), only_value);
596 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
598 /* Offset from the beginning of the string in bytes. */
599 tree byteoff;
600 src = string_constant (src, &byteoff);
601 if (src == 0)
602 return NULL_TREE;
604 /* Determine the size of the string element. */
605 unsigned eltsize
606 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src))));
608 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
609 length of SRC. */
610 unsigned maxelts = TREE_STRING_LENGTH (src) / eltsize - 1;
612 /* PTR can point to the byte representation of any string type, including
613 char* and wchar_t*. */
614 const char *ptr = TREE_STRING_POINTER (src);
616 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
618 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
619 compute the offset to the following null if we don't know where to
620 start searching for it. */
621 if (string_length (ptr, eltsize, maxelts) < maxelts)
623 /* Return when an embedded null character is found. */
624 return NULL_TREE;
627 if (!maxelts)
628 return ssize_int (0);
630 /* We don't know the starting offset, but we do know that the string
631 has no internal zero bytes. We can assume that the offset falls
632 within the bounds of the string; otherwise, the programmer deserves
633 what he gets. Subtract the offset from the length of the string,
634 and return that. This would perhaps not be valid if we were dealing
635 with named arrays in addition to literal string constants. */
637 return size_diffop_loc (loc, size_int (maxelts * eltsize), byteoff);
640 /* Offset from the beginning of the string in elements. */
641 HOST_WIDE_INT eltoff;
643 /* We have a known offset into the string. Start searching there for
644 a null character if we can represent it as a single HOST_WIDE_INT. */
645 if (byteoff == 0)
646 eltoff = 0;
647 else if (! tree_fits_shwi_p (byteoff))
648 eltoff = -1;
649 else
650 eltoff = tree_to_shwi (byteoff) / eltsize;
652 /* If the offset is known to be out of bounds, warn, and call strlen at
653 runtime. */
654 if (eltoff < 0 || eltoff > maxelts)
656 /* Suppress multiple warnings for propagated constant strings. */
657 if (only_value != 2
658 && !TREE_NO_WARNING (src))
660 warning_at (loc, OPT_Warray_bounds,
661 "offset %qwi outside bounds of constant string",
662 eltoff);
663 TREE_NO_WARNING (src) = 1;
665 return NULL_TREE;
668 /* Use strlen to search for the first zero byte. Since any strings
669 constructed with build_string will have nulls appended, we win even
670 if we get handed something like (char[4])"abcd".
672 Since ELTOFF is our starting index into the string, no further
673 calculation is needed. */
674 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
675 maxelts - eltoff);
677 return ssize_int (len);
680 /* Return a constant integer corresponding to target reading
681 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
683 static rtx
684 c_readstr (const char *str, scalar_int_mode mode)
686 HOST_WIDE_INT ch;
687 unsigned int i, j;
688 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
690 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
691 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
692 / HOST_BITS_PER_WIDE_INT;
694 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
695 for (i = 0; i < len; i++)
696 tmp[i] = 0;
698 ch = 1;
699 for (i = 0; i < GET_MODE_SIZE (mode); i++)
701 j = i;
702 if (WORDS_BIG_ENDIAN)
703 j = GET_MODE_SIZE (mode) - i - 1;
704 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
705 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
706 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
707 j *= BITS_PER_UNIT;
709 if (ch)
710 ch = (unsigned char) str[i];
711 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
714 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
715 return immed_wide_int_const (c, mode);
718 /* Cast a target constant CST to target CHAR and if that value fits into
719 host char type, return zero and put that value into variable pointed to by
720 P. */
722 static int
723 target_char_cast (tree cst, char *p)
725 unsigned HOST_WIDE_INT val, hostval;
727 if (TREE_CODE (cst) != INTEGER_CST
728 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
729 return 1;
731 /* Do not care if it fits or not right here. */
732 val = TREE_INT_CST_LOW (cst);
734 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
735 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
737 hostval = val;
738 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
739 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
741 if (val != hostval)
742 return 1;
744 *p = hostval;
745 return 0;
748 /* Similar to save_expr, but assumes that arbitrary code is not executed
749 in between the multiple evaluations. In particular, we assume that a
750 non-addressable local variable will not be modified. */
752 static tree
753 builtin_save_expr (tree exp)
755 if (TREE_CODE (exp) == SSA_NAME
756 || (TREE_ADDRESSABLE (exp) == 0
757 && (TREE_CODE (exp) == PARM_DECL
758 || (VAR_P (exp) && !TREE_STATIC (exp)))))
759 return exp;
761 return save_expr (exp);
764 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
765 times to get the address of either a higher stack frame, or a return
766 address located within it (depending on FNDECL_CODE). */
768 static rtx
769 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
771 int i;
772 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
773 if (tem == NULL_RTX)
775 /* For a zero count with __builtin_return_address, we don't care what
776 frame address we return, because target-specific definitions will
777 override us. Therefore frame pointer elimination is OK, and using
778 the soft frame pointer is OK.
780 For a nonzero count, or a zero count with __builtin_frame_address,
781 we require a stable offset from the current frame pointer to the
782 previous one, so we must use the hard frame pointer, and
783 we must disable frame pointer elimination. */
784 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
785 tem = frame_pointer_rtx;
786 else
788 tem = hard_frame_pointer_rtx;
790 /* Tell reload not to eliminate the frame pointer. */
791 crtl->accesses_prior_frames = 1;
795 if (count > 0)
796 SETUP_FRAME_ADDRESSES ();
798 /* On the SPARC, the return address is not in the frame, it is in a
799 register. There is no way to access it off of the current frame
800 pointer, but it can be accessed off the previous frame pointer by
801 reading the value from the register window save area. */
802 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
803 count--;
805 /* Scan back COUNT frames to the specified frame. */
806 for (i = 0; i < count; i++)
808 /* Assume the dynamic chain pointer is in the word that the
809 frame address points to, unless otherwise specified. */
810 tem = DYNAMIC_CHAIN_ADDRESS (tem);
811 tem = memory_address (Pmode, tem);
812 tem = gen_frame_mem (Pmode, tem);
813 tem = copy_to_reg (tem);
816 /* For __builtin_frame_address, return what we've got. But, on
817 the SPARC for example, we may have to add a bias. */
818 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
819 return FRAME_ADDR_RTX (tem);
821 /* For __builtin_return_address, get the return address from that frame. */
822 #ifdef RETURN_ADDR_RTX
823 tem = RETURN_ADDR_RTX (count, tem);
824 #else
825 tem = memory_address (Pmode,
826 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
827 tem = gen_frame_mem (Pmode, tem);
828 #endif
829 return tem;
832 /* Alias set used for setjmp buffer. */
833 static alias_set_type setjmp_alias_set = -1;
835 /* Construct the leading half of a __builtin_setjmp call. Control will
836 return to RECEIVER_LABEL. This is also called directly by the SJLJ
837 exception handling code. */
839 void
840 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
842 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
843 rtx stack_save;
844 rtx mem;
846 if (setjmp_alias_set == -1)
847 setjmp_alias_set = new_alias_set ();
849 buf_addr = convert_memory_address (Pmode, buf_addr);
851 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
853 /* We store the frame pointer and the address of receiver_label in
854 the buffer and use the rest of it for the stack save area, which
855 is machine-dependent. */
857 mem = gen_rtx_MEM (Pmode, buf_addr);
858 set_mem_alias_set (mem, setjmp_alias_set);
859 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
861 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
862 GET_MODE_SIZE (Pmode))),
863 set_mem_alias_set (mem, setjmp_alias_set);
865 emit_move_insn (validize_mem (mem),
866 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
868 stack_save = gen_rtx_MEM (sa_mode,
869 plus_constant (Pmode, buf_addr,
870 2 * GET_MODE_SIZE (Pmode)));
871 set_mem_alias_set (stack_save, setjmp_alias_set);
872 emit_stack_save (SAVE_NONLOCAL, &stack_save);
874 /* If there is further processing to do, do it. */
875 if (targetm.have_builtin_setjmp_setup ())
876 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
878 /* We have a nonlocal label. */
879 cfun->has_nonlocal_label = 1;
882 /* Construct the trailing part of a __builtin_setjmp call. This is
883 also called directly by the SJLJ exception handling code.
884 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
886 void
887 expand_builtin_setjmp_receiver (rtx receiver_label)
889 rtx chain;
891 /* Mark the FP as used when we get here, so we have to make sure it's
892 marked as used by this function. */
893 emit_use (hard_frame_pointer_rtx);
895 /* Mark the static chain as clobbered here so life information
896 doesn't get messed up for it. */
897 chain = rtx_for_static_chain (current_function_decl, true);
898 if (chain && REG_P (chain))
899 emit_clobber (chain);
901 /* Now put in the code to restore the frame pointer, and argument
902 pointer, if needed. */
903 if (! targetm.have_nonlocal_goto ())
905 /* First adjust our frame pointer to its actual value. It was
906 previously set to the start of the virtual area corresponding to
907 the stacked variables when we branched here and now needs to be
908 adjusted to the actual hardware fp value.
910 Assignments to virtual registers are converted by
911 instantiate_virtual_regs into the corresponding assignment
912 to the underlying register (fp in this case) that makes
913 the original assignment true.
914 So the following insn will actually be decrementing fp by
915 TARGET_STARTING_FRAME_OFFSET. */
916 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
918 /* Restoring the frame pointer also modifies the hard frame pointer.
919 Mark it used (so that the previous assignment remains live once
920 the frame pointer is eliminated) and clobbered (to represent the
921 implicit update from the assignment). */
922 emit_use (hard_frame_pointer_rtx);
923 emit_clobber (hard_frame_pointer_rtx);
926 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
928 /* If the argument pointer can be eliminated in favor of the
929 frame pointer, we don't need to restore it. We assume here
930 that if such an elimination is present, it can always be used.
931 This is the case on all known machines; if we don't make this
932 assumption, we do unnecessary saving on many machines. */
933 size_t i;
934 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
936 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
937 if (elim_regs[i].from == ARG_POINTER_REGNUM
938 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
939 break;
941 if (i == ARRAY_SIZE (elim_regs))
943 /* Now restore our arg pointer from the address at which it
944 was saved in our stack frame. */
945 emit_move_insn (crtl->args.internal_arg_pointer,
946 copy_to_reg (get_arg_pointer_save_area ()));
950 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
951 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
952 else if (targetm.have_nonlocal_goto_receiver ())
953 emit_insn (targetm.gen_nonlocal_goto_receiver ());
954 else
955 { /* Nothing */ }
957 /* We must not allow the code we just generated to be reordered by
958 scheduling. Specifically, the update of the frame pointer must
959 happen immediately, not later. */
960 emit_insn (gen_blockage ());
963 /* __builtin_longjmp is passed a pointer to an array of five words (not
964 all will be used on all machines). It operates similarly to the C
965 library function of the same name, but is more efficient. Much of
966 the code below is copied from the handling of non-local gotos. */
968 static void
969 expand_builtin_longjmp (rtx buf_addr, rtx value)
971 rtx fp, lab, stack;
972 rtx_insn *insn, *last;
973 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
975 /* DRAP is needed for stack realign if longjmp is expanded to current
976 function */
977 if (SUPPORTS_STACK_ALIGNMENT)
978 crtl->need_drap = true;
980 if (setjmp_alias_set == -1)
981 setjmp_alias_set = new_alias_set ();
983 buf_addr = convert_memory_address (Pmode, buf_addr);
985 buf_addr = force_reg (Pmode, buf_addr);
987 /* We require that the user must pass a second argument of 1, because
988 that is what builtin_setjmp will return. */
989 gcc_assert (value == const1_rtx);
991 last = get_last_insn ();
992 if (targetm.have_builtin_longjmp ())
993 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
994 else
996 fp = gen_rtx_MEM (Pmode, buf_addr);
997 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
998 GET_MODE_SIZE (Pmode)));
1000 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1001 2 * GET_MODE_SIZE (Pmode)));
1002 set_mem_alias_set (fp, setjmp_alias_set);
1003 set_mem_alias_set (lab, setjmp_alias_set);
1004 set_mem_alias_set (stack, setjmp_alias_set);
1006 /* Pick up FP, label, and SP from the block and jump. This code is
1007 from expand_goto in stmt.c; see there for detailed comments. */
1008 if (targetm.have_nonlocal_goto ())
1009 /* We have to pass a value to the nonlocal_goto pattern that will
1010 get copied into the static_chain pointer, but it does not matter
1011 what that value is, because builtin_setjmp does not use it. */
1012 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1013 else
1015 lab = copy_to_reg (lab);
1017 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1018 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1020 emit_move_insn (hard_frame_pointer_rtx, fp);
1021 emit_stack_restore (SAVE_NONLOCAL, stack);
1023 emit_use (hard_frame_pointer_rtx);
1024 emit_use (stack_pointer_rtx);
1025 emit_indirect_jump (lab);
1029 /* Search backwards and mark the jump insn as a non-local goto.
1030 Note that this precludes the use of __builtin_longjmp to a
1031 __builtin_setjmp target in the same function. However, we've
1032 already cautioned the user that these functions are for
1033 internal exception handling use only. */
1034 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1036 gcc_assert (insn != last);
1038 if (JUMP_P (insn))
1040 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1041 break;
1043 else if (CALL_P (insn))
1044 break;
1048 static inline bool
1049 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1051 return (iter->i < iter->n);
1054 /* This function validates the types of a function call argument list
1055 against a specified list of tree_codes. If the last specifier is a 0,
1056 that represents an ellipsis, otherwise the last specifier must be a
1057 VOID_TYPE. */
1059 static bool
1060 validate_arglist (const_tree callexpr, ...)
1062 enum tree_code code;
1063 bool res = 0;
1064 va_list ap;
1065 const_call_expr_arg_iterator iter;
1066 const_tree arg;
1068 va_start (ap, callexpr);
1069 init_const_call_expr_arg_iterator (callexpr, &iter);
1071 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1072 tree fn = CALL_EXPR_FN (callexpr);
1073 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1075 for (unsigned argno = 1; ; ++argno)
1077 code = (enum tree_code) va_arg (ap, int);
1079 switch (code)
1081 case 0:
1082 /* This signifies an ellipses, any further arguments are all ok. */
1083 res = true;
1084 goto end;
1085 case VOID_TYPE:
1086 /* This signifies an endlink, if no arguments remain, return
1087 true, otherwise return false. */
1088 res = !more_const_call_expr_args_p (&iter);
1089 goto end;
1090 case POINTER_TYPE:
1091 /* The actual argument must be nonnull when either the whole
1092 called function has been declared nonnull, or when the formal
1093 argument corresponding to the actual argument has been. */
1094 if (argmap
1095 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1097 arg = next_const_call_expr_arg (&iter);
1098 if (!validate_arg (arg, code) || integer_zerop (arg))
1099 goto end;
1100 break;
1102 /* FALLTHRU */
1103 default:
1104 /* If no parameters remain or the parameter's code does not
1105 match the specified code, return false. Otherwise continue
1106 checking any remaining arguments. */
1107 arg = next_const_call_expr_arg (&iter);
1108 if (!validate_arg (arg, code))
1109 goto end;
1110 break;
1114 /* We need gotos here since we can only have one VA_CLOSE in a
1115 function. */
1116 end: ;
1117 va_end (ap);
1119 BITMAP_FREE (argmap);
1121 return res;
1124 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1125 and the address of the save area. */
1127 static rtx
1128 expand_builtin_nonlocal_goto (tree exp)
1130 tree t_label, t_save_area;
1131 rtx r_label, r_save_area, r_fp, r_sp;
1132 rtx_insn *insn;
1134 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1135 return NULL_RTX;
1137 t_label = CALL_EXPR_ARG (exp, 0);
1138 t_save_area = CALL_EXPR_ARG (exp, 1);
1140 r_label = expand_normal (t_label);
1141 r_label = convert_memory_address (Pmode, r_label);
1142 r_save_area = expand_normal (t_save_area);
1143 r_save_area = convert_memory_address (Pmode, r_save_area);
1144 /* Copy the address of the save location to a register just in case it was
1145 based on the frame pointer. */
1146 r_save_area = copy_to_reg (r_save_area);
1147 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1148 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1149 plus_constant (Pmode, r_save_area,
1150 GET_MODE_SIZE (Pmode)));
1152 crtl->has_nonlocal_goto = 1;
1154 /* ??? We no longer need to pass the static chain value, afaik. */
1155 if (targetm.have_nonlocal_goto ())
1156 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1157 else
1159 r_label = copy_to_reg (r_label);
1161 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1162 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1164 /* Restore frame pointer for containing function. */
1165 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1166 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1168 /* USE of hard_frame_pointer_rtx added for consistency;
1169 not clear if really needed. */
1170 emit_use (hard_frame_pointer_rtx);
1171 emit_use (stack_pointer_rtx);
1173 /* If the architecture is using a GP register, we must
1174 conservatively assume that the target function makes use of it.
1175 The prologue of functions with nonlocal gotos must therefore
1176 initialize the GP register to the appropriate value, and we
1177 must then make sure that this value is live at the point
1178 of the jump. (Note that this doesn't necessarily apply
1179 to targets with a nonlocal_goto pattern; they are free
1180 to implement it in their own way. Note also that this is
1181 a no-op if the GP register is a global invariant.) */
1182 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1183 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1184 emit_use (pic_offset_table_rtx);
1186 emit_indirect_jump (r_label);
1189 /* Search backwards to the jump insn and mark it as a
1190 non-local goto. */
1191 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1193 if (JUMP_P (insn))
1195 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1196 break;
1198 else if (CALL_P (insn))
1199 break;
1202 return const0_rtx;
1205 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1206 (not all will be used on all machines) that was passed to __builtin_setjmp.
1207 It updates the stack pointer in that block to the current value. This is
1208 also called directly by the SJLJ exception handling code. */
1210 void
1211 expand_builtin_update_setjmp_buf (rtx buf_addr)
1213 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1214 buf_addr = convert_memory_address (Pmode, buf_addr);
1215 rtx stack_save
1216 = gen_rtx_MEM (sa_mode,
1217 memory_address
1218 (sa_mode,
1219 plus_constant (Pmode, buf_addr,
1220 2 * GET_MODE_SIZE (Pmode))));
1222 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1225 /* Expand a call to __builtin_prefetch. For a target that does not support
1226 data prefetch, evaluate the memory address argument in case it has side
1227 effects. */
1229 static void
1230 expand_builtin_prefetch (tree exp)
1232 tree arg0, arg1, arg2;
1233 int nargs;
1234 rtx op0, op1, op2;
1236 if (!validate_arglist (exp, POINTER_TYPE, 0))
1237 return;
1239 arg0 = CALL_EXPR_ARG (exp, 0);
1241 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1242 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1243 locality). */
1244 nargs = call_expr_nargs (exp);
1245 if (nargs > 1)
1246 arg1 = CALL_EXPR_ARG (exp, 1);
1247 else
1248 arg1 = integer_zero_node;
1249 if (nargs > 2)
1250 arg2 = CALL_EXPR_ARG (exp, 2);
1251 else
1252 arg2 = integer_three_node;
1254 /* Argument 0 is an address. */
1255 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1257 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1258 if (TREE_CODE (arg1) != INTEGER_CST)
1260 error ("second argument to %<__builtin_prefetch%> must be a constant");
1261 arg1 = integer_zero_node;
1263 op1 = expand_normal (arg1);
1264 /* Argument 1 must be either zero or one. */
1265 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1267 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1268 " using zero");
1269 op1 = const0_rtx;
1272 /* Argument 2 (locality) must be a compile-time constant int. */
1273 if (TREE_CODE (arg2) != INTEGER_CST)
1275 error ("third argument to %<__builtin_prefetch%> must be a constant");
1276 arg2 = integer_zero_node;
1278 op2 = expand_normal (arg2);
1279 /* Argument 2 must be 0, 1, 2, or 3. */
1280 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1282 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1283 op2 = const0_rtx;
1286 if (targetm.have_prefetch ())
1288 struct expand_operand ops[3];
1290 create_address_operand (&ops[0], op0);
1291 create_integer_operand (&ops[1], INTVAL (op1));
1292 create_integer_operand (&ops[2], INTVAL (op2));
1293 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1294 return;
1297 /* Don't do anything with direct references to volatile memory, but
1298 generate code to handle other side effects. */
1299 if (!MEM_P (op0) && side_effects_p (op0))
1300 emit_insn (op0);
1303 /* Get a MEM rtx for expression EXP which is the address of an operand
1304 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1305 the maximum length of the block of memory that might be accessed or
1306 NULL if unknown. */
1308 static rtx
1309 get_memory_rtx (tree exp, tree len)
1311 tree orig_exp = exp;
1312 rtx addr, mem;
1314 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1315 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1316 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1317 exp = TREE_OPERAND (exp, 0);
1319 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1320 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1322 /* Get an expression we can use to find the attributes to assign to MEM.
1323 First remove any nops. */
1324 while (CONVERT_EXPR_P (exp)
1325 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1326 exp = TREE_OPERAND (exp, 0);
1328 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1329 (as builtin stringops may alias with anything). */
1330 exp = fold_build2 (MEM_REF,
1331 build_array_type (char_type_node,
1332 build_range_type (sizetype,
1333 size_one_node, len)),
1334 exp, build_int_cst (ptr_type_node, 0));
1336 /* If the MEM_REF has no acceptable address, try to get the base object
1337 from the original address we got, and build an all-aliasing
1338 unknown-sized access to that one. */
1339 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1340 set_mem_attributes (mem, exp, 0);
1341 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1342 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1343 0))))
1345 exp = build_fold_addr_expr (exp);
1346 exp = fold_build2 (MEM_REF,
1347 build_array_type (char_type_node,
1348 build_range_type (sizetype,
1349 size_zero_node,
1350 NULL)),
1351 exp, build_int_cst (ptr_type_node, 0));
1352 set_mem_attributes (mem, exp, 0);
1354 set_mem_alias_set (mem, 0);
1355 return mem;
1358 /* Built-in functions to perform an untyped call and return. */
1360 #define apply_args_mode \
1361 (this_target_builtins->x_apply_args_mode)
1362 #define apply_result_mode \
1363 (this_target_builtins->x_apply_result_mode)
1365 /* Return the size required for the block returned by __builtin_apply_args,
1366 and initialize apply_args_mode. */
1368 static int
1369 apply_args_size (void)
1371 static int size = -1;
1372 int align;
1373 unsigned int regno;
1375 /* The values computed by this function never change. */
1376 if (size < 0)
1378 /* The first value is the incoming arg-pointer. */
1379 size = GET_MODE_SIZE (Pmode);
1381 /* The second value is the structure value address unless this is
1382 passed as an "invisible" first argument. */
1383 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1384 size += GET_MODE_SIZE (Pmode);
1386 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1387 if (FUNCTION_ARG_REGNO_P (regno))
1389 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1391 gcc_assert (mode != VOIDmode);
1393 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1394 if (size % align != 0)
1395 size = CEIL (size, align) * align;
1396 size += GET_MODE_SIZE (mode);
1397 apply_args_mode[regno] = mode;
1399 else
1401 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1404 return size;
1407 /* Return the size required for the block returned by __builtin_apply,
1408 and initialize apply_result_mode. */
1410 static int
1411 apply_result_size (void)
1413 static int size = -1;
1414 int align, regno;
1416 /* The values computed by this function never change. */
1417 if (size < 0)
1419 size = 0;
1421 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1422 if (targetm.calls.function_value_regno_p (regno))
1424 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1426 gcc_assert (mode != VOIDmode);
1428 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1429 if (size % align != 0)
1430 size = CEIL (size, align) * align;
1431 size += GET_MODE_SIZE (mode);
1432 apply_result_mode[regno] = mode;
1434 else
1435 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1437 /* Allow targets that use untyped_call and untyped_return to override
1438 the size so that machine-specific information can be stored here. */
1439 #ifdef APPLY_RESULT_SIZE
1440 size = APPLY_RESULT_SIZE;
1441 #endif
1443 return size;
1446 /* Create a vector describing the result block RESULT. If SAVEP is true,
1447 the result block is used to save the values; otherwise it is used to
1448 restore the values. */
1450 static rtx
1451 result_vector (int savep, rtx result)
1453 int regno, size, align, nelts;
1454 fixed_size_mode mode;
1455 rtx reg, mem;
1456 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1458 size = nelts = 0;
1459 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1460 if ((mode = apply_result_mode[regno]) != VOIDmode)
1462 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1463 if (size % align != 0)
1464 size = CEIL (size, align) * align;
1465 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1466 mem = adjust_address (result, mode, size);
1467 savevec[nelts++] = (savep
1468 ? gen_rtx_SET (mem, reg)
1469 : gen_rtx_SET (reg, mem));
1470 size += GET_MODE_SIZE (mode);
1472 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1475 /* Save the state required to perform an untyped call with the same
1476 arguments as were passed to the current function. */
1478 static rtx
1479 expand_builtin_apply_args_1 (void)
1481 rtx registers, tem;
1482 int size, align, regno;
1483 fixed_size_mode mode;
1484 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1486 /* Create a block where the arg-pointer, structure value address,
1487 and argument registers can be saved. */
1488 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1490 /* Walk past the arg-pointer and structure value address. */
1491 size = GET_MODE_SIZE (Pmode);
1492 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1493 size += GET_MODE_SIZE (Pmode);
1495 /* Save each register used in calling a function to the block. */
1496 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1497 if ((mode = apply_args_mode[regno]) != VOIDmode)
1499 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1500 if (size % align != 0)
1501 size = CEIL (size, align) * align;
1503 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1505 emit_move_insn (adjust_address (registers, mode, size), tem);
1506 size += GET_MODE_SIZE (mode);
1509 /* Save the arg pointer to the block. */
1510 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1511 /* We need the pointer as the caller actually passed them to us, not
1512 as we might have pretended they were passed. Make sure it's a valid
1513 operand, as emit_move_insn isn't expected to handle a PLUS. */
1514 if (STACK_GROWS_DOWNWARD)
1516 = force_operand (plus_constant (Pmode, tem,
1517 crtl->args.pretend_args_size),
1518 NULL_RTX);
1519 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1521 size = GET_MODE_SIZE (Pmode);
1523 /* Save the structure value address unless this is passed as an
1524 "invisible" first argument. */
1525 if (struct_incoming_value)
1527 emit_move_insn (adjust_address (registers, Pmode, size),
1528 copy_to_reg (struct_incoming_value));
1529 size += GET_MODE_SIZE (Pmode);
1532 /* Return the address of the block. */
1533 return copy_addr_to_reg (XEXP (registers, 0));
1536 /* __builtin_apply_args returns block of memory allocated on
1537 the stack into which is stored the arg pointer, structure
1538 value address, static chain, and all the registers that might
1539 possibly be used in performing a function call. The code is
1540 moved to the start of the function so the incoming values are
1541 saved. */
1543 static rtx
1544 expand_builtin_apply_args (void)
1546 /* Don't do __builtin_apply_args more than once in a function.
1547 Save the result of the first call and reuse it. */
1548 if (apply_args_value != 0)
1549 return apply_args_value;
1551 /* When this function is called, it means that registers must be
1552 saved on entry to this function. So we migrate the
1553 call to the first insn of this function. */
1554 rtx temp;
1556 start_sequence ();
1557 temp = expand_builtin_apply_args_1 ();
1558 rtx_insn *seq = get_insns ();
1559 end_sequence ();
1561 apply_args_value = temp;
1563 /* Put the insns after the NOTE that starts the function.
1564 If this is inside a start_sequence, make the outer-level insn
1565 chain current, so the code is placed at the start of the
1566 function. If internal_arg_pointer is a non-virtual pseudo,
1567 it needs to be placed after the function that initializes
1568 that pseudo. */
1569 push_topmost_sequence ();
1570 if (REG_P (crtl->args.internal_arg_pointer)
1571 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1572 emit_insn_before (seq, parm_birth_insn);
1573 else
1574 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1575 pop_topmost_sequence ();
1576 return temp;
1580 /* Perform an untyped call and save the state required to perform an
1581 untyped return of whatever value was returned by the given function. */
1583 static rtx
1584 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1586 int size, align, regno;
1587 fixed_size_mode mode;
1588 rtx incoming_args, result, reg, dest, src;
1589 rtx_call_insn *call_insn;
1590 rtx old_stack_level = 0;
1591 rtx call_fusage = 0;
1592 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1594 arguments = convert_memory_address (Pmode, arguments);
1596 /* Create a block where the return registers can be saved. */
1597 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1599 /* Fetch the arg pointer from the ARGUMENTS block. */
1600 incoming_args = gen_reg_rtx (Pmode);
1601 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1602 if (!STACK_GROWS_DOWNWARD)
1603 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1604 incoming_args, 0, OPTAB_LIB_WIDEN);
1606 /* Push a new argument block and copy the arguments. Do not allow
1607 the (potential) memcpy call below to interfere with our stack
1608 manipulations. */
1609 do_pending_stack_adjust ();
1610 NO_DEFER_POP;
1612 /* Save the stack with nonlocal if available. */
1613 if (targetm.have_save_stack_nonlocal ())
1614 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1615 else
1616 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1618 /* Allocate a block of memory onto the stack and copy the memory
1619 arguments to the outgoing arguments address. We can pass TRUE
1620 as the 4th argument because we just saved the stack pointer
1621 and will restore it right after the call. */
1622 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1624 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1625 may have already set current_function_calls_alloca to true.
1626 current_function_calls_alloca won't be set if argsize is zero,
1627 so we have to guarantee need_drap is true here. */
1628 if (SUPPORTS_STACK_ALIGNMENT)
1629 crtl->need_drap = true;
1631 dest = virtual_outgoing_args_rtx;
1632 if (!STACK_GROWS_DOWNWARD)
1634 if (CONST_INT_P (argsize))
1635 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1636 else
1637 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1639 dest = gen_rtx_MEM (BLKmode, dest);
1640 set_mem_align (dest, PARM_BOUNDARY);
1641 src = gen_rtx_MEM (BLKmode, incoming_args);
1642 set_mem_align (src, PARM_BOUNDARY);
1643 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1645 /* Refer to the argument block. */
1646 apply_args_size ();
1647 arguments = gen_rtx_MEM (BLKmode, arguments);
1648 set_mem_align (arguments, PARM_BOUNDARY);
1650 /* Walk past the arg-pointer and structure value address. */
1651 size = GET_MODE_SIZE (Pmode);
1652 if (struct_value)
1653 size += GET_MODE_SIZE (Pmode);
1655 /* Restore each of the registers previously saved. Make USE insns
1656 for each of these registers for use in making the call. */
1657 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1658 if ((mode = apply_args_mode[regno]) != VOIDmode)
1660 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1661 if (size % align != 0)
1662 size = CEIL (size, align) * align;
1663 reg = gen_rtx_REG (mode, regno);
1664 emit_move_insn (reg, adjust_address (arguments, mode, size));
1665 use_reg (&call_fusage, reg);
1666 size += GET_MODE_SIZE (mode);
1669 /* Restore the structure value address unless this is passed as an
1670 "invisible" first argument. */
1671 size = GET_MODE_SIZE (Pmode);
1672 if (struct_value)
1674 rtx value = gen_reg_rtx (Pmode);
1675 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1676 emit_move_insn (struct_value, value);
1677 if (REG_P (struct_value))
1678 use_reg (&call_fusage, struct_value);
1679 size += GET_MODE_SIZE (Pmode);
1682 /* All arguments and registers used for the call are set up by now! */
1683 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1685 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1686 and we don't want to load it into a register as an optimization,
1687 because prepare_call_address already did it if it should be done. */
1688 if (GET_CODE (function) != SYMBOL_REF)
1689 function = memory_address (FUNCTION_MODE, function);
1691 /* Generate the actual call instruction and save the return value. */
1692 if (targetm.have_untyped_call ())
1694 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1695 emit_call_insn (targetm.gen_untyped_call (mem, result,
1696 result_vector (1, result)));
1698 else if (targetm.have_call_value ())
1700 rtx valreg = 0;
1702 /* Locate the unique return register. It is not possible to
1703 express a call that sets more than one return register using
1704 call_value; use untyped_call for that. In fact, untyped_call
1705 only needs to save the return registers in the given block. */
1706 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1707 if ((mode = apply_result_mode[regno]) != VOIDmode)
1709 gcc_assert (!valreg); /* have_untyped_call required. */
1711 valreg = gen_rtx_REG (mode, regno);
1714 emit_insn (targetm.gen_call_value (valreg,
1715 gen_rtx_MEM (FUNCTION_MODE, function),
1716 const0_rtx, NULL_RTX, const0_rtx));
1718 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1720 else
1721 gcc_unreachable ();
1723 /* Find the CALL insn we just emitted, and attach the register usage
1724 information. */
1725 call_insn = last_call_insn ();
1726 add_function_usage_to (call_insn, call_fusage);
1728 /* Restore the stack. */
1729 if (targetm.have_save_stack_nonlocal ())
1730 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1731 else
1732 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1733 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1735 OK_DEFER_POP;
1737 /* Return the address of the result block. */
1738 result = copy_addr_to_reg (XEXP (result, 0));
1739 return convert_memory_address (ptr_mode, result);
1742 /* Perform an untyped return. */
1744 static void
1745 expand_builtin_return (rtx result)
1747 int size, align, regno;
1748 fixed_size_mode mode;
1749 rtx reg;
1750 rtx_insn *call_fusage = 0;
1752 result = convert_memory_address (Pmode, result);
1754 apply_result_size ();
1755 result = gen_rtx_MEM (BLKmode, result);
1757 if (targetm.have_untyped_return ())
1759 rtx vector = result_vector (0, result);
1760 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1761 emit_barrier ();
1762 return;
1765 /* Restore the return value and note that each value is used. */
1766 size = 0;
1767 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1768 if ((mode = apply_result_mode[regno]) != VOIDmode)
1770 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1771 if (size % align != 0)
1772 size = CEIL (size, align) * align;
1773 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1774 emit_move_insn (reg, adjust_address (result, mode, size));
1776 push_to_sequence (call_fusage);
1777 emit_use (reg);
1778 call_fusage = get_insns ();
1779 end_sequence ();
1780 size += GET_MODE_SIZE (mode);
1783 /* Put the USE insns before the return. */
1784 emit_insn (call_fusage);
1786 /* Return whatever values was restored by jumping directly to the end
1787 of the function. */
1788 expand_naked_return ();
1791 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1793 static enum type_class
1794 type_to_class (tree type)
1796 switch (TREE_CODE (type))
1798 case VOID_TYPE: return void_type_class;
1799 case INTEGER_TYPE: return integer_type_class;
1800 case ENUMERAL_TYPE: return enumeral_type_class;
1801 case BOOLEAN_TYPE: return boolean_type_class;
1802 case POINTER_TYPE: return pointer_type_class;
1803 case REFERENCE_TYPE: return reference_type_class;
1804 case OFFSET_TYPE: return offset_type_class;
1805 case REAL_TYPE: return real_type_class;
1806 case COMPLEX_TYPE: return complex_type_class;
1807 case FUNCTION_TYPE: return function_type_class;
1808 case METHOD_TYPE: return method_type_class;
1809 case RECORD_TYPE: return record_type_class;
1810 case UNION_TYPE:
1811 case QUAL_UNION_TYPE: return union_type_class;
1812 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1813 ? string_type_class : array_type_class);
1814 case LANG_TYPE: return lang_type_class;
1815 default: return no_type_class;
1819 /* Expand a call EXP to __builtin_classify_type. */
1821 static rtx
1822 expand_builtin_classify_type (tree exp)
1824 if (call_expr_nargs (exp))
1825 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1826 return GEN_INT (no_type_class);
1829 /* This helper macro, meant to be used in mathfn_built_in below, determines
1830 which among a set of builtin math functions is appropriate for a given type
1831 mode. The `F' (float) and `L' (long double) are automatically generated
1832 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1833 types, there are additional types that are considered with 'F32', 'F64',
1834 'F128', etc. suffixes. */
1835 #define CASE_MATHFN(MATHFN) \
1836 CASE_CFN_##MATHFN: \
1837 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1838 fcodel = BUILT_IN_##MATHFN##L ; break;
1839 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1840 types. */
1841 #define CASE_MATHFN_FLOATN(MATHFN) \
1842 CASE_CFN_##MATHFN: \
1843 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1844 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1845 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1846 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1847 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1848 break;
1849 /* Similar to above, but appends _R after any F/L suffix. */
1850 #define CASE_MATHFN_REENT(MATHFN) \
1851 case CFN_BUILT_IN_##MATHFN##_R: \
1852 case CFN_BUILT_IN_##MATHFN##F_R: \
1853 case CFN_BUILT_IN_##MATHFN##L_R: \
1854 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1855 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1857 /* Return a function equivalent to FN but operating on floating-point
1858 values of type TYPE, or END_BUILTINS if no such function exists.
1859 This is purely an operation on function codes; it does not guarantee
1860 that the target actually has an implementation of the function. */
1862 static built_in_function
1863 mathfn_built_in_2 (tree type, combined_fn fn)
1865 tree mtype;
1866 built_in_function fcode, fcodef, fcodel;
1867 built_in_function fcodef16 = END_BUILTINS;
1868 built_in_function fcodef32 = END_BUILTINS;
1869 built_in_function fcodef64 = END_BUILTINS;
1870 built_in_function fcodef128 = END_BUILTINS;
1871 built_in_function fcodef32x = END_BUILTINS;
1872 built_in_function fcodef64x = END_BUILTINS;
1873 built_in_function fcodef128x = END_BUILTINS;
1875 switch (fn)
1877 CASE_MATHFN (ACOS)
1878 CASE_MATHFN (ACOSH)
1879 CASE_MATHFN (ASIN)
1880 CASE_MATHFN (ASINH)
1881 CASE_MATHFN (ATAN)
1882 CASE_MATHFN (ATAN2)
1883 CASE_MATHFN (ATANH)
1884 CASE_MATHFN (CBRT)
1885 CASE_MATHFN_FLOATN (CEIL)
1886 CASE_MATHFN (CEXPI)
1887 CASE_MATHFN_FLOATN (COPYSIGN)
1888 CASE_MATHFN (COS)
1889 CASE_MATHFN (COSH)
1890 CASE_MATHFN (DREM)
1891 CASE_MATHFN (ERF)
1892 CASE_MATHFN (ERFC)
1893 CASE_MATHFN (EXP)
1894 CASE_MATHFN (EXP10)
1895 CASE_MATHFN (EXP2)
1896 CASE_MATHFN (EXPM1)
1897 CASE_MATHFN (FABS)
1898 CASE_MATHFN (FDIM)
1899 CASE_MATHFN_FLOATN (FLOOR)
1900 CASE_MATHFN_FLOATN (FMA)
1901 CASE_MATHFN_FLOATN (FMAX)
1902 CASE_MATHFN_FLOATN (FMIN)
1903 CASE_MATHFN (FMOD)
1904 CASE_MATHFN (FREXP)
1905 CASE_MATHFN (GAMMA)
1906 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1907 CASE_MATHFN (HUGE_VAL)
1908 CASE_MATHFN (HYPOT)
1909 CASE_MATHFN (ILOGB)
1910 CASE_MATHFN (ICEIL)
1911 CASE_MATHFN (IFLOOR)
1912 CASE_MATHFN (INF)
1913 CASE_MATHFN (IRINT)
1914 CASE_MATHFN (IROUND)
1915 CASE_MATHFN (ISINF)
1916 CASE_MATHFN (J0)
1917 CASE_MATHFN (J1)
1918 CASE_MATHFN (JN)
1919 CASE_MATHFN (LCEIL)
1920 CASE_MATHFN (LDEXP)
1921 CASE_MATHFN (LFLOOR)
1922 CASE_MATHFN (LGAMMA)
1923 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1924 CASE_MATHFN (LLCEIL)
1925 CASE_MATHFN (LLFLOOR)
1926 CASE_MATHFN (LLRINT)
1927 CASE_MATHFN (LLROUND)
1928 CASE_MATHFN (LOG)
1929 CASE_MATHFN (LOG10)
1930 CASE_MATHFN (LOG1P)
1931 CASE_MATHFN (LOG2)
1932 CASE_MATHFN (LOGB)
1933 CASE_MATHFN (LRINT)
1934 CASE_MATHFN (LROUND)
1935 CASE_MATHFN (MODF)
1936 CASE_MATHFN (NAN)
1937 CASE_MATHFN (NANS)
1938 CASE_MATHFN_FLOATN (NEARBYINT)
1939 CASE_MATHFN (NEXTAFTER)
1940 CASE_MATHFN (NEXTTOWARD)
1941 CASE_MATHFN (POW)
1942 CASE_MATHFN (POWI)
1943 CASE_MATHFN (POW10)
1944 CASE_MATHFN (REMAINDER)
1945 CASE_MATHFN (REMQUO)
1946 CASE_MATHFN_FLOATN (RINT)
1947 CASE_MATHFN_FLOATN (ROUND)
1948 CASE_MATHFN (SCALB)
1949 CASE_MATHFN (SCALBLN)
1950 CASE_MATHFN (SCALBN)
1951 CASE_MATHFN (SIGNBIT)
1952 CASE_MATHFN (SIGNIFICAND)
1953 CASE_MATHFN (SIN)
1954 CASE_MATHFN (SINCOS)
1955 CASE_MATHFN (SINH)
1956 CASE_MATHFN_FLOATN (SQRT)
1957 CASE_MATHFN (TAN)
1958 CASE_MATHFN (TANH)
1959 CASE_MATHFN (TGAMMA)
1960 CASE_MATHFN_FLOATN (TRUNC)
1961 CASE_MATHFN (Y0)
1962 CASE_MATHFN (Y1)
1963 CASE_MATHFN (YN)
1965 default:
1966 return END_BUILTINS;
1969 mtype = TYPE_MAIN_VARIANT (type);
1970 if (mtype == double_type_node)
1971 return fcode;
1972 else if (mtype == float_type_node)
1973 return fcodef;
1974 else if (mtype == long_double_type_node)
1975 return fcodel;
1976 else if (mtype == float16_type_node)
1977 return fcodef16;
1978 else if (mtype == float32_type_node)
1979 return fcodef32;
1980 else if (mtype == float64_type_node)
1981 return fcodef64;
1982 else if (mtype == float128_type_node)
1983 return fcodef128;
1984 else if (mtype == float32x_type_node)
1985 return fcodef32x;
1986 else if (mtype == float64x_type_node)
1987 return fcodef64x;
1988 else if (mtype == float128x_type_node)
1989 return fcodef128x;
1990 else
1991 return END_BUILTINS;
1994 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1995 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1996 otherwise use the explicit declaration. If we can't do the conversion,
1997 return null. */
1999 static tree
2000 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2002 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2003 if (fcode2 == END_BUILTINS)
2004 return NULL_TREE;
2006 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2007 return NULL_TREE;
2009 return builtin_decl_explicit (fcode2);
2012 /* Like mathfn_built_in_1, but always use the implicit array. */
2014 tree
2015 mathfn_built_in (tree type, combined_fn fn)
2017 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2020 /* Like mathfn_built_in_1, but take a built_in_function and
2021 always use the implicit array. */
2023 tree
2024 mathfn_built_in (tree type, enum built_in_function fn)
2026 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2029 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2030 return its code, otherwise return IFN_LAST. Note that this function
2031 only tests whether the function is defined in internals.def, not whether
2032 it is actually available on the target. */
2034 internal_fn
2035 associated_internal_fn (tree fndecl)
2037 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2038 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2039 switch (DECL_FUNCTION_CODE (fndecl))
2041 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2042 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2043 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2044 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2045 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2046 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2047 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2048 #include "internal-fn.def"
2050 CASE_FLT_FN (BUILT_IN_POW10):
2051 return IFN_EXP10;
2053 CASE_FLT_FN (BUILT_IN_DREM):
2054 return IFN_REMAINDER;
2056 CASE_FLT_FN (BUILT_IN_SCALBN):
2057 CASE_FLT_FN (BUILT_IN_SCALBLN):
2058 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2059 return IFN_LDEXP;
2060 return IFN_LAST;
2062 default:
2063 return IFN_LAST;
2067 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2068 on the current target by a call to an internal function, return the
2069 code of that internal function, otherwise return IFN_LAST. The caller
2070 is responsible for ensuring that any side-effects of the built-in
2071 call are dealt with correctly. E.g. if CALL sets errno, the caller
2072 must decide that the errno result isn't needed or make it available
2073 in some other way. */
2075 internal_fn
2076 replacement_internal_fn (gcall *call)
2078 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2080 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2081 if (ifn != IFN_LAST)
2083 tree_pair types = direct_internal_fn_types (ifn, call);
2084 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2085 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2086 return ifn;
2089 return IFN_LAST;
2092 /* Expand a call to the builtin trinary math functions (fma).
2093 Return NULL_RTX if a normal call should be emitted rather than expanding the
2094 function in-line. EXP is the expression that is a call to the builtin
2095 function; if convenient, the result should be placed in TARGET.
2096 SUBTARGET may be used as the target for computing one of EXP's
2097 operands. */
2099 static rtx
2100 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2102 optab builtin_optab;
2103 rtx op0, op1, op2, result;
2104 rtx_insn *insns;
2105 tree fndecl = get_callee_fndecl (exp);
2106 tree arg0, arg1, arg2;
2107 machine_mode mode;
2109 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2110 return NULL_RTX;
2112 arg0 = CALL_EXPR_ARG (exp, 0);
2113 arg1 = CALL_EXPR_ARG (exp, 1);
2114 arg2 = CALL_EXPR_ARG (exp, 2);
2116 switch (DECL_FUNCTION_CODE (fndecl))
2118 CASE_FLT_FN (BUILT_IN_FMA):
2119 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2120 builtin_optab = fma_optab; break;
2121 default:
2122 gcc_unreachable ();
2125 /* Make a suitable register to place result in. */
2126 mode = TYPE_MODE (TREE_TYPE (exp));
2128 /* Before working hard, check whether the instruction is available. */
2129 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2130 return NULL_RTX;
2132 result = gen_reg_rtx (mode);
2134 /* Always stabilize the argument list. */
2135 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2136 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2137 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2139 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2140 op1 = expand_normal (arg1);
2141 op2 = expand_normal (arg2);
2143 start_sequence ();
2145 /* Compute into RESULT.
2146 Set RESULT to wherever the result comes back. */
2147 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2148 result, 0);
2150 /* If we were unable to expand via the builtin, stop the sequence
2151 (without outputting the insns) and call to the library function
2152 with the stabilized argument list. */
2153 if (result == 0)
2155 end_sequence ();
2156 return expand_call (exp, target, target == const0_rtx);
2159 /* Output the entire sequence. */
2160 insns = get_insns ();
2161 end_sequence ();
2162 emit_insn (insns);
2164 return result;
2167 /* Expand a call to the builtin sin and cos math functions.
2168 Return NULL_RTX if a normal call should be emitted rather than expanding the
2169 function in-line. EXP is the expression that is a call to the builtin
2170 function; if convenient, the result should be placed in TARGET.
2171 SUBTARGET may be used as the target for computing one of EXP's
2172 operands. */
2174 static rtx
2175 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2177 optab builtin_optab;
2178 rtx op0;
2179 rtx_insn *insns;
2180 tree fndecl = get_callee_fndecl (exp);
2181 machine_mode mode;
2182 tree arg;
2184 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2185 return NULL_RTX;
2187 arg = CALL_EXPR_ARG (exp, 0);
2189 switch (DECL_FUNCTION_CODE (fndecl))
2191 CASE_FLT_FN (BUILT_IN_SIN):
2192 CASE_FLT_FN (BUILT_IN_COS):
2193 builtin_optab = sincos_optab; break;
2194 default:
2195 gcc_unreachable ();
2198 /* Make a suitable register to place result in. */
2199 mode = TYPE_MODE (TREE_TYPE (exp));
2201 /* Check if sincos insn is available, otherwise fallback
2202 to sin or cos insn. */
2203 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2204 switch (DECL_FUNCTION_CODE (fndecl))
2206 CASE_FLT_FN (BUILT_IN_SIN):
2207 builtin_optab = sin_optab; break;
2208 CASE_FLT_FN (BUILT_IN_COS):
2209 builtin_optab = cos_optab; break;
2210 default:
2211 gcc_unreachable ();
2214 /* Before working hard, check whether the instruction is available. */
2215 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2217 rtx result = gen_reg_rtx (mode);
2219 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2220 need to expand the argument again. This way, we will not perform
2221 side-effects more the once. */
2222 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2224 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2226 start_sequence ();
2228 /* Compute into RESULT.
2229 Set RESULT to wherever the result comes back. */
2230 if (builtin_optab == sincos_optab)
2232 int ok;
2234 switch (DECL_FUNCTION_CODE (fndecl))
2236 CASE_FLT_FN (BUILT_IN_SIN):
2237 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2238 break;
2239 CASE_FLT_FN (BUILT_IN_COS):
2240 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2241 break;
2242 default:
2243 gcc_unreachable ();
2245 gcc_assert (ok);
2247 else
2248 result = expand_unop (mode, builtin_optab, op0, result, 0);
2250 if (result != 0)
2252 /* Output the entire sequence. */
2253 insns = get_insns ();
2254 end_sequence ();
2255 emit_insn (insns);
2256 return result;
2259 /* If we were unable to expand via the builtin, stop the sequence
2260 (without outputting the insns) and call to the library function
2261 with the stabilized argument list. */
2262 end_sequence ();
2265 return expand_call (exp, target, target == const0_rtx);
2268 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2269 return an RTL instruction code that implements the functionality.
2270 If that isn't possible or available return CODE_FOR_nothing. */
2272 static enum insn_code
2273 interclass_mathfn_icode (tree arg, tree fndecl)
2275 bool errno_set = false;
2276 optab builtin_optab = unknown_optab;
2277 machine_mode mode;
2279 switch (DECL_FUNCTION_CODE (fndecl))
2281 CASE_FLT_FN (BUILT_IN_ILOGB):
2282 errno_set = true; builtin_optab = ilogb_optab; break;
2283 CASE_FLT_FN (BUILT_IN_ISINF):
2284 builtin_optab = isinf_optab; break;
2285 case BUILT_IN_ISNORMAL:
2286 case BUILT_IN_ISFINITE:
2287 CASE_FLT_FN (BUILT_IN_FINITE):
2288 case BUILT_IN_FINITED32:
2289 case BUILT_IN_FINITED64:
2290 case BUILT_IN_FINITED128:
2291 case BUILT_IN_ISINFD32:
2292 case BUILT_IN_ISINFD64:
2293 case BUILT_IN_ISINFD128:
2294 /* These builtins have no optabs (yet). */
2295 break;
2296 default:
2297 gcc_unreachable ();
2300 /* There's no easy way to detect the case we need to set EDOM. */
2301 if (flag_errno_math && errno_set)
2302 return CODE_FOR_nothing;
2304 /* Optab mode depends on the mode of the input argument. */
2305 mode = TYPE_MODE (TREE_TYPE (arg));
2307 if (builtin_optab)
2308 return optab_handler (builtin_optab, mode);
2309 return CODE_FOR_nothing;
2312 /* Expand a call to one of the builtin math functions that operate on
2313 floating point argument and output an integer result (ilogb, isinf,
2314 isnan, etc).
2315 Return 0 if a normal call should be emitted rather than expanding the
2316 function in-line. EXP is the expression that is a call to the builtin
2317 function; if convenient, the result should be placed in TARGET. */
2319 static rtx
2320 expand_builtin_interclass_mathfn (tree exp, rtx target)
2322 enum insn_code icode = CODE_FOR_nothing;
2323 rtx op0;
2324 tree fndecl = get_callee_fndecl (exp);
2325 machine_mode mode;
2326 tree arg;
2328 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2329 return NULL_RTX;
2331 arg = CALL_EXPR_ARG (exp, 0);
2332 icode = interclass_mathfn_icode (arg, fndecl);
2333 mode = TYPE_MODE (TREE_TYPE (arg));
2335 if (icode != CODE_FOR_nothing)
2337 struct expand_operand ops[1];
2338 rtx_insn *last = get_last_insn ();
2339 tree orig_arg = arg;
2341 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2342 need to expand the argument again. This way, we will not perform
2343 side-effects more the once. */
2344 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2346 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2348 if (mode != GET_MODE (op0))
2349 op0 = convert_to_mode (mode, op0, 0);
2351 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2352 if (maybe_legitimize_operands (icode, 0, 1, ops)
2353 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2354 return ops[0].value;
2356 delete_insns_since (last);
2357 CALL_EXPR_ARG (exp, 0) = orig_arg;
2360 return NULL_RTX;
2363 /* Expand a call to the builtin sincos math function.
2364 Return NULL_RTX if a normal call should be emitted rather than expanding the
2365 function in-line. EXP is the expression that is a call to the builtin
2366 function. */
2368 static rtx
2369 expand_builtin_sincos (tree exp)
2371 rtx op0, op1, op2, target1, target2;
2372 machine_mode mode;
2373 tree arg, sinp, cosp;
2374 int result;
2375 location_t loc = EXPR_LOCATION (exp);
2376 tree alias_type, alias_off;
2378 if (!validate_arglist (exp, REAL_TYPE,
2379 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2380 return NULL_RTX;
2382 arg = CALL_EXPR_ARG (exp, 0);
2383 sinp = CALL_EXPR_ARG (exp, 1);
2384 cosp = CALL_EXPR_ARG (exp, 2);
2386 /* Make a suitable register to place result in. */
2387 mode = TYPE_MODE (TREE_TYPE (arg));
2389 /* Check if sincos insn is available, otherwise emit the call. */
2390 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2391 return NULL_RTX;
2393 target1 = gen_reg_rtx (mode);
2394 target2 = gen_reg_rtx (mode);
2396 op0 = expand_normal (arg);
2397 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2398 alias_off = build_int_cst (alias_type, 0);
2399 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2400 sinp, alias_off));
2401 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2402 cosp, alias_off));
2404 /* Compute into target1 and target2.
2405 Set TARGET to wherever the result comes back. */
2406 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2407 gcc_assert (result);
2409 /* Move target1 and target2 to the memory locations indicated
2410 by op1 and op2. */
2411 emit_move_insn (op1, target1);
2412 emit_move_insn (op2, target2);
2414 return const0_rtx;
2417 /* Expand a call to the internal cexpi builtin to the sincos math function.
2418 EXP is the expression that is a call to the builtin function; if convenient,
2419 the result should be placed in TARGET. */
2421 static rtx
2422 expand_builtin_cexpi (tree exp, rtx target)
2424 tree fndecl = get_callee_fndecl (exp);
2425 tree arg, type;
2426 machine_mode mode;
2427 rtx op0, op1, op2;
2428 location_t loc = EXPR_LOCATION (exp);
2430 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2431 return NULL_RTX;
2433 arg = CALL_EXPR_ARG (exp, 0);
2434 type = TREE_TYPE (arg);
2435 mode = TYPE_MODE (TREE_TYPE (arg));
2437 /* Try expanding via a sincos optab, fall back to emitting a libcall
2438 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2439 is only generated from sincos, cexp or if we have either of them. */
2440 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2442 op1 = gen_reg_rtx (mode);
2443 op2 = gen_reg_rtx (mode);
2445 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2447 /* Compute into op1 and op2. */
2448 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2450 else if (targetm.libc_has_function (function_sincos))
2452 tree call, fn = NULL_TREE;
2453 tree top1, top2;
2454 rtx op1a, op2a;
2456 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2457 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2458 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2459 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2460 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2461 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2462 else
2463 gcc_unreachable ();
2465 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2466 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2467 op1a = copy_addr_to_reg (XEXP (op1, 0));
2468 op2a = copy_addr_to_reg (XEXP (op2, 0));
2469 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2470 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2472 /* Make sure not to fold the sincos call again. */
2473 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2474 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2475 call, 3, arg, top1, top2));
2477 else
2479 tree call, fn = NULL_TREE, narg;
2480 tree ctype = build_complex_type (type);
2482 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2483 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2484 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2485 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2486 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2487 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2488 else
2489 gcc_unreachable ();
2491 /* If we don't have a decl for cexp create one. This is the
2492 friendliest fallback if the user calls __builtin_cexpi
2493 without full target C99 function support. */
2494 if (fn == NULL_TREE)
2496 tree fntype;
2497 const char *name = NULL;
2499 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2500 name = "cexpf";
2501 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2502 name = "cexp";
2503 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2504 name = "cexpl";
2506 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2507 fn = build_fn_decl (name, fntype);
2510 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2511 build_real (type, dconst0), arg);
2513 /* Make sure not to fold the cexp call again. */
2514 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2515 return expand_expr (build_call_nary (ctype, call, 1, narg),
2516 target, VOIDmode, EXPAND_NORMAL);
2519 /* Now build the proper return type. */
2520 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2521 make_tree (TREE_TYPE (arg), op2),
2522 make_tree (TREE_TYPE (arg), op1)),
2523 target, VOIDmode, EXPAND_NORMAL);
2526 /* Conveniently construct a function call expression. FNDECL names the
2527 function to be called, N is the number of arguments, and the "..."
2528 parameters are the argument expressions. Unlike build_call_exr
2529 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2531 static tree
2532 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2534 va_list ap;
2535 tree fntype = TREE_TYPE (fndecl);
2536 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2538 va_start (ap, n);
2539 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2540 va_end (ap);
2541 SET_EXPR_LOCATION (fn, loc);
2542 return fn;
2545 /* Expand a call to one of the builtin rounding functions gcc defines
2546 as an extension (lfloor and lceil). As these are gcc extensions we
2547 do not need to worry about setting errno to EDOM.
2548 If expanding via optab fails, lower expression to (int)(floor(x)).
2549 EXP is the expression that is a call to the builtin function;
2550 if convenient, the result should be placed in TARGET. */
2552 static rtx
2553 expand_builtin_int_roundingfn (tree exp, rtx target)
2555 convert_optab builtin_optab;
2556 rtx op0, tmp;
2557 rtx_insn *insns;
2558 tree fndecl = get_callee_fndecl (exp);
2559 enum built_in_function fallback_fn;
2560 tree fallback_fndecl;
2561 machine_mode mode;
2562 tree arg;
2564 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2565 gcc_unreachable ();
2567 arg = CALL_EXPR_ARG (exp, 0);
2569 switch (DECL_FUNCTION_CODE (fndecl))
2571 CASE_FLT_FN (BUILT_IN_ICEIL):
2572 CASE_FLT_FN (BUILT_IN_LCEIL):
2573 CASE_FLT_FN (BUILT_IN_LLCEIL):
2574 builtin_optab = lceil_optab;
2575 fallback_fn = BUILT_IN_CEIL;
2576 break;
2578 CASE_FLT_FN (BUILT_IN_IFLOOR):
2579 CASE_FLT_FN (BUILT_IN_LFLOOR):
2580 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2581 builtin_optab = lfloor_optab;
2582 fallback_fn = BUILT_IN_FLOOR;
2583 break;
2585 default:
2586 gcc_unreachable ();
2589 /* Make a suitable register to place result in. */
2590 mode = TYPE_MODE (TREE_TYPE (exp));
2592 target = gen_reg_rtx (mode);
2594 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2595 need to expand the argument again. This way, we will not perform
2596 side-effects more the once. */
2597 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2599 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2601 start_sequence ();
2603 /* Compute into TARGET. */
2604 if (expand_sfix_optab (target, op0, builtin_optab))
2606 /* Output the entire sequence. */
2607 insns = get_insns ();
2608 end_sequence ();
2609 emit_insn (insns);
2610 return target;
2613 /* If we were unable to expand via the builtin, stop the sequence
2614 (without outputting the insns). */
2615 end_sequence ();
2617 /* Fall back to floating point rounding optab. */
2618 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2620 /* For non-C99 targets we may end up without a fallback fndecl here
2621 if the user called __builtin_lfloor directly. In this case emit
2622 a call to the floor/ceil variants nevertheless. This should result
2623 in the best user experience for not full C99 targets. */
2624 if (fallback_fndecl == NULL_TREE)
2626 tree fntype;
2627 const char *name = NULL;
2629 switch (DECL_FUNCTION_CODE (fndecl))
2631 case BUILT_IN_ICEIL:
2632 case BUILT_IN_LCEIL:
2633 case BUILT_IN_LLCEIL:
2634 name = "ceil";
2635 break;
2636 case BUILT_IN_ICEILF:
2637 case BUILT_IN_LCEILF:
2638 case BUILT_IN_LLCEILF:
2639 name = "ceilf";
2640 break;
2641 case BUILT_IN_ICEILL:
2642 case BUILT_IN_LCEILL:
2643 case BUILT_IN_LLCEILL:
2644 name = "ceill";
2645 break;
2646 case BUILT_IN_IFLOOR:
2647 case BUILT_IN_LFLOOR:
2648 case BUILT_IN_LLFLOOR:
2649 name = "floor";
2650 break;
2651 case BUILT_IN_IFLOORF:
2652 case BUILT_IN_LFLOORF:
2653 case BUILT_IN_LLFLOORF:
2654 name = "floorf";
2655 break;
2656 case BUILT_IN_IFLOORL:
2657 case BUILT_IN_LFLOORL:
2658 case BUILT_IN_LLFLOORL:
2659 name = "floorl";
2660 break;
2661 default:
2662 gcc_unreachable ();
2665 fntype = build_function_type_list (TREE_TYPE (arg),
2666 TREE_TYPE (arg), NULL_TREE);
2667 fallback_fndecl = build_fn_decl (name, fntype);
2670 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2672 tmp = expand_normal (exp);
2673 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2675 /* Truncate the result of floating point optab to integer
2676 via expand_fix (). */
2677 target = gen_reg_rtx (mode);
2678 expand_fix (target, tmp, 0);
2680 return target;
2683 /* Expand a call to one of the builtin math functions doing integer
2684 conversion (lrint).
2685 Return 0 if a normal call should be emitted rather than expanding the
2686 function in-line. EXP is the expression that is a call to the builtin
2687 function; if convenient, the result should be placed in TARGET. */
2689 static rtx
2690 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2692 convert_optab builtin_optab;
2693 rtx op0;
2694 rtx_insn *insns;
2695 tree fndecl = get_callee_fndecl (exp);
2696 tree arg;
2697 machine_mode mode;
2698 enum built_in_function fallback_fn = BUILT_IN_NONE;
2700 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2701 gcc_unreachable ();
2703 arg = CALL_EXPR_ARG (exp, 0);
2705 switch (DECL_FUNCTION_CODE (fndecl))
2707 CASE_FLT_FN (BUILT_IN_IRINT):
2708 fallback_fn = BUILT_IN_LRINT;
2709 gcc_fallthrough ();
2710 CASE_FLT_FN (BUILT_IN_LRINT):
2711 CASE_FLT_FN (BUILT_IN_LLRINT):
2712 builtin_optab = lrint_optab;
2713 break;
2715 CASE_FLT_FN (BUILT_IN_IROUND):
2716 fallback_fn = BUILT_IN_LROUND;
2717 gcc_fallthrough ();
2718 CASE_FLT_FN (BUILT_IN_LROUND):
2719 CASE_FLT_FN (BUILT_IN_LLROUND):
2720 builtin_optab = lround_optab;
2721 break;
2723 default:
2724 gcc_unreachable ();
2727 /* There's no easy way to detect the case we need to set EDOM. */
2728 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2729 return NULL_RTX;
2731 /* Make a suitable register to place result in. */
2732 mode = TYPE_MODE (TREE_TYPE (exp));
2734 /* There's no easy way to detect the case we need to set EDOM. */
2735 if (!flag_errno_math)
2737 rtx result = gen_reg_rtx (mode);
2739 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2740 need to expand the argument again. This way, we will not perform
2741 side-effects more the once. */
2742 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2744 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2746 start_sequence ();
2748 if (expand_sfix_optab (result, op0, builtin_optab))
2750 /* Output the entire sequence. */
2751 insns = get_insns ();
2752 end_sequence ();
2753 emit_insn (insns);
2754 return result;
2757 /* If we were unable to expand via the builtin, stop the sequence
2758 (without outputting the insns) and call to the library function
2759 with the stabilized argument list. */
2760 end_sequence ();
2763 if (fallback_fn != BUILT_IN_NONE)
2765 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2766 targets, (int) round (x) should never be transformed into
2767 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2768 a call to lround in the hope that the target provides at least some
2769 C99 functions. This should result in the best user experience for
2770 not full C99 targets. */
2771 tree fallback_fndecl = mathfn_built_in_1
2772 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2774 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2775 fallback_fndecl, 1, arg);
2777 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2778 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2779 return convert_to_mode (mode, target, 0);
2782 return expand_call (exp, target, target == const0_rtx);
2785 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2786 a normal call should be emitted rather than expanding the function
2787 in-line. EXP is the expression that is a call to the builtin
2788 function; if convenient, the result should be placed in TARGET. */
2790 static rtx
2791 expand_builtin_powi (tree exp, rtx target)
2793 tree arg0, arg1;
2794 rtx op0, op1;
2795 machine_mode mode;
2796 machine_mode mode2;
2798 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2799 return NULL_RTX;
2801 arg0 = CALL_EXPR_ARG (exp, 0);
2802 arg1 = CALL_EXPR_ARG (exp, 1);
2803 mode = TYPE_MODE (TREE_TYPE (exp));
2805 /* Emit a libcall to libgcc. */
2807 /* Mode of the 2nd argument must match that of an int. */
2808 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2810 if (target == NULL_RTX)
2811 target = gen_reg_rtx (mode);
2813 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2814 if (GET_MODE (op0) != mode)
2815 op0 = convert_to_mode (mode, op0, 0);
2816 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2817 if (GET_MODE (op1) != mode2)
2818 op1 = convert_to_mode (mode2, op1, 0);
2820 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2821 target, LCT_CONST, mode,
2822 op0, mode, op1, mode2);
2824 return target;
2827 /* Expand expression EXP which is a call to the strlen builtin. Return
2828 NULL_RTX if we failed the caller should emit a normal call, otherwise
2829 try to get the result in TARGET, if convenient. */
2831 static rtx
2832 expand_builtin_strlen (tree exp, rtx target,
2833 machine_mode target_mode)
2835 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2836 return NULL_RTX;
2838 struct expand_operand ops[4];
2839 rtx pat;
2840 tree len;
2841 tree src = CALL_EXPR_ARG (exp, 0);
2842 rtx src_reg;
2843 rtx_insn *before_strlen;
2844 machine_mode insn_mode;
2845 enum insn_code icode = CODE_FOR_nothing;
2846 unsigned int align;
2848 /* If the length can be computed at compile-time, return it. */
2849 len = c_strlen (src, 0);
2850 if (len)
2851 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2853 /* If the length can be computed at compile-time and is constant
2854 integer, but there are side-effects in src, evaluate
2855 src for side-effects, then return len.
2856 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2857 can be optimized into: i++; x = 3; */
2858 len = c_strlen (src, 1);
2859 if (len && TREE_CODE (len) == INTEGER_CST)
2861 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2862 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2865 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2867 /* If SRC is not a pointer type, don't do this operation inline. */
2868 if (align == 0)
2869 return NULL_RTX;
2871 /* Bail out if we can't compute strlen in the right mode. */
2872 FOR_EACH_MODE_FROM (insn_mode, target_mode)
2874 icode = optab_handler (strlen_optab, insn_mode);
2875 if (icode != CODE_FOR_nothing)
2876 break;
2878 if (insn_mode == VOIDmode)
2879 return NULL_RTX;
2881 /* Make a place to hold the source address. We will not expand
2882 the actual source until we are sure that the expansion will
2883 not fail -- there are trees that cannot be expanded twice. */
2884 src_reg = gen_reg_rtx (Pmode);
2886 /* Mark the beginning of the strlen sequence so we can emit the
2887 source operand later. */
2888 before_strlen = get_last_insn ();
2890 create_output_operand (&ops[0], target, insn_mode);
2891 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2892 create_integer_operand (&ops[2], 0);
2893 create_integer_operand (&ops[3], align);
2894 if (!maybe_expand_insn (icode, 4, ops))
2895 return NULL_RTX;
2897 /* Check to see if the argument was declared attribute nonstring
2898 and if so, issue a warning since at this point it's not known
2899 to be nul-terminated. */
2900 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
2902 /* Now that we are assured of success, expand the source. */
2903 start_sequence ();
2904 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2905 if (pat != src_reg)
2907 #ifdef POINTERS_EXTEND_UNSIGNED
2908 if (GET_MODE (pat) != Pmode)
2909 pat = convert_to_mode (Pmode, pat,
2910 POINTERS_EXTEND_UNSIGNED);
2911 #endif
2912 emit_move_insn (src_reg, pat);
2914 pat = get_insns ();
2915 end_sequence ();
2917 if (before_strlen)
2918 emit_insn_after (pat, before_strlen);
2919 else
2920 emit_insn_before (pat, get_insns ());
2922 /* Return the value in the proper mode for this function. */
2923 if (GET_MODE (ops[0].value) == target_mode)
2924 target = ops[0].value;
2925 else if (target != 0)
2926 convert_move (target, ops[0].value, 0);
2927 else
2928 target = convert_to_mode (target_mode, ops[0].value, 0);
2930 return target;
2933 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2934 bytes from constant string DATA + OFFSET and return it as target
2935 constant. */
2937 static rtx
2938 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2939 scalar_int_mode mode)
2941 const char *str = (const char *) data;
2943 gcc_assert (offset >= 0
2944 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2945 <= strlen (str) + 1));
2947 return c_readstr (str + offset, mode);
2950 /* LEN specify length of the block of memcpy/memset operation.
2951 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2952 In some cases we can make very likely guess on max size, then we
2953 set it into PROBABLE_MAX_SIZE. */
2955 static void
2956 determine_block_size (tree len, rtx len_rtx,
2957 unsigned HOST_WIDE_INT *min_size,
2958 unsigned HOST_WIDE_INT *max_size,
2959 unsigned HOST_WIDE_INT *probable_max_size)
2961 if (CONST_INT_P (len_rtx))
2963 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
2964 return;
2966 else
2968 wide_int min, max;
2969 enum value_range_type range_type = VR_UNDEFINED;
2971 /* Determine bounds from the type. */
2972 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2973 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2974 else
2975 *min_size = 0;
2976 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2977 *probable_max_size = *max_size
2978 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
2979 else
2980 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2982 if (TREE_CODE (len) == SSA_NAME)
2983 range_type = get_range_info (len, &min, &max);
2984 if (range_type == VR_RANGE)
2986 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
2987 *min_size = min.to_uhwi ();
2988 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
2989 *probable_max_size = *max_size = max.to_uhwi ();
2991 else if (range_type == VR_ANTI_RANGE)
2993 /* Anti range 0...N lets us to determine minimal size to N+1. */
2994 if (min == 0)
2996 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2997 *min_size = max.to_uhwi () + 1;
2999 /* Code like
3001 int n;
3002 if (n < 100)
3003 memcpy (a, b, n)
3005 Produce anti range allowing negative values of N. We still
3006 can use the information and make a guess that N is not negative.
3008 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3009 *probable_max_size = min.to_uhwi () - 1;
3012 gcc_checking_assert (*max_size <=
3013 (unsigned HOST_WIDE_INT)
3014 GET_MODE_MASK (GET_MODE (len_rtx)));
3017 /* Try to verify that the sizes and lengths of the arguments to a string
3018 manipulation function given by EXP are within valid bounds and that
3019 the operation does not lead to buffer overflow or read past the end.
3020 Arguments other than EXP may be null. When non-null, the arguments
3021 have the following meaning:
3022 DST is the destination of a copy call or NULL otherwise.
3023 SRC is the source of a copy call or NULL otherwise.
3024 DSTWRITE is the number of bytes written into the destination obtained
3025 from the user-supplied size argument to the function (such as in
3026 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3027 MAXREAD is the user-supplied bound on the length of the source sequence
3028 (such as in strncat(d, s, N). It specifies the upper limit on the number
3029 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3030 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3031 expression EXP is a string function call (as opposed to a memory call
3032 like memcpy). As an exception, SRCSTR can also be an integer denoting
3033 the precomputed size of the source string or object (for functions like
3034 memcpy).
3035 DSTSIZE is the size of the destination object specified by the last
3036 argument to the _chk builtins, typically resulting from the expansion
3037 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3038 DSTSIZE).
3040 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3041 SIZE_MAX.
3043 If the call is successfully verified as safe return true, otherwise
3044 return false. */
3046 static bool
3047 check_access (tree exp, tree, tree, tree dstwrite,
3048 tree maxread, tree srcstr, tree dstsize)
3050 int opt = OPT_Wstringop_overflow_;
3052 /* The size of the largest object is half the address space, or
3053 PTRDIFF_MAX. (This is way too permissive.) */
3054 tree maxobjsize = max_object_size ();
3056 /* Either the length of the source string for string functions or
3057 the size of the source object for raw memory functions. */
3058 tree slen = NULL_TREE;
3060 tree range[2] = { NULL_TREE, NULL_TREE };
3062 /* Set to true when the exact number of bytes written by a string
3063 function like strcpy is not known and the only thing that is
3064 known is that it must be at least one (for the terminating nul). */
3065 bool at_least_one = false;
3066 if (srcstr)
3068 /* SRCSTR is normally a pointer to string but as a special case
3069 it can be an integer denoting the length of a string. */
3070 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
3072 /* Try to determine the range of lengths the source string
3073 refers to. If it can be determined and is less than
3074 the upper bound given by MAXREAD add one to it for
3075 the terminating nul. Otherwise, set it to one for
3076 the same reason, or to MAXREAD as appropriate. */
3077 get_range_strlen (srcstr, range);
3078 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
3080 if (maxread && tree_int_cst_le (maxread, range[0]))
3081 range[0] = range[1] = maxread;
3082 else
3083 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3084 range[0], size_one_node);
3086 if (maxread && tree_int_cst_le (maxread, range[1]))
3087 range[1] = maxread;
3088 else if (!integer_all_onesp (range[1]))
3089 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3090 range[1], size_one_node);
3092 slen = range[0];
3094 else
3096 at_least_one = true;
3097 slen = size_one_node;
3100 else
3101 slen = srcstr;
3104 if (!dstwrite && !maxread)
3106 /* When the only available piece of data is the object size
3107 there is nothing to do. */
3108 if (!slen)
3109 return true;
3111 /* Otherwise, when the length of the source sequence is known
3112 (as with strlen), set DSTWRITE to it. */
3113 if (!range[0])
3114 dstwrite = slen;
3117 if (!dstsize)
3118 dstsize = maxobjsize;
3120 if (dstwrite)
3121 get_size_range (dstwrite, range);
3123 tree func = get_callee_fndecl (exp);
3125 /* First check the number of bytes to be written against the maximum
3126 object size. */
3127 if (range[0] && tree_int_cst_lt (maxobjsize, range[0]))
3129 location_t loc = tree_nonartificial_location (exp);
3130 loc = expansion_point_location_if_in_system_header (loc);
3132 if (range[0] == range[1])
3133 warning_at (loc, opt,
3134 "%K%qD specified size %E "
3135 "exceeds maximum object size %E",
3136 exp, func, range[0], maxobjsize);
3137 else
3138 warning_at (loc, opt,
3139 "%K%qD specified size between %E and %E "
3140 "exceeds maximum object size %E",
3141 exp, func,
3142 range[0], range[1], maxobjsize);
3143 return false;
3146 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3147 constant, and in range of unsigned HOST_WIDE_INT. */
3148 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3150 /* Next check the number of bytes to be written against the destination
3151 object size. */
3152 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
3154 if (range[0]
3155 && ((tree_fits_uhwi_p (dstsize)
3156 && tree_int_cst_lt (dstsize, range[0]))
3157 || (tree_fits_uhwi_p (dstwrite)
3158 && tree_int_cst_lt (dstwrite, range[0]))))
3160 if (TREE_NO_WARNING (exp))
3161 return false;
3163 location_t loc = tree_nonartificial_location (exp);
3164 loc = expansion_point_location_if_in_system_header (loc);
3166 if (dstwrite == slen && at_least_one)
3168 /* This is a call to strcpy with a destination of 0 size
3169 and a source of unknown length. The call will write
3170 at least one byte past the end of the destination. */
3171 warning_at (loc, opt,
3172 "%K%qD writing %E or more bytes into a region "
3173 "of size %E overflows the destination",
3174 exp, func, range[0], dstsize);
3176 else if (tree_int_cst_equal (range[0], range[1]))
3177 warning_n (loc, opt, tree_to_uhwi (range[0]),
3178 "%K%qD writing %E byte into a region "
3179 "of size %E overflows the destination",
3180 "%K%qD writing %E bytes into a region "
3181 "of size %E overflows the destination",
3182 exp, func, range[0], dstsize);
3183 else if (tree_int_cst_sign_bit (range[1]))
3185 /* Avoid printing the upper bound if it's invalid. */
3186 warning_at (loc, opt,
3187 "%K%qD writing %E or more bytes into a region "
3188 "of size %E overflows the destination",
3189 exp, func, range[0], dstsize);
3191 else
3192 warning_at (loc, opt,
3193 "%K%qD writing between %E and %E bytes into "
3194 "a region of size %E overflows the destination",
3195 exp, func, range[0], range[1],
3196 dstsize);
3198 /* Return error when an overflow has been detected. */
3199 return false;
3203 /* Check the maximum length of the source sequence against the size
3204 of the destination object if known, or against the maximum size
3205 of an object. */
3206 if (maxread)
3208 get_size_range (maxread, range);
3210 /* Use the lower end for MAXREAD from now on. */
3211 if (range[0])
3212 maxread = range[0];
3214 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
3216 location_t loc = tree_nonartificial_location (exp);
3217 loc = expansion_point_location_if_in_system_header (loc);
3219 if (tree_int_cst_lt (maxobjsize, range[0]))
3221 if (TREE_NO_WARNING (exp))
3222 return false;
3224 /* Warn about crazy big sizes first since that's more
3225 likely to be meaningful than saying that the bound
3226 is greater than the object size if both are big. */
3227 if (range[0] == range[1])
3228 warning_at (loc, opt,
3229 "%K%qD specified bound %E "
3230 "exceeds maximum object size %E",
3231 exp, func,
3232 range[0], maxobjsize);
3233 else
3234 warning_at (loc, opt,
3235 "%K%qD specified bound between %E and %E "
3236 "exceeds maximum object size %E",
3237 exp, func,
3238 range[0], range[1], maxobjsize);
3240 return false;
3243 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
3245 if (TREE_NO_WARNING (exp))
3246 return false;
3248 if (tree_int_cst_equal (range[0], range[1]))
3249 warning_at (loc, opt,
3250 "%K%qD specified bound %E "
3251 "exceeds destination size %E",
3252 exp, func,
3253 range[0], dstsize);
3254 else
3255 warning_at (loc, opt,
3256 "%K%qD specified bound between %E and %E "
3257 "exceeds destination size %E",
3258 exp, func,
3259 range[0], range[1], dstsize);
3260 return false;
3265 /* Check for reading past the end of SRC. */
3266 if (slen
3267 && slen == srcstr
3268 && dstwrite && range[0]
3269 && tree_int_cst_lt (slen, range[0]))
3271 if (TREE_NO_WARNING (exp))
3272 return false;
3274 location_t loc = tree_nonartificial_location (exp);
3276 if (tree_int_cst_equal (range[0], range[1]))
3277 warning_n (loc, opt, tree_to_uhwi (range[0]),
3278 "%K%qD reading %E byte from a region of size %E",
3279 "%K%qD reading %E bytes from a region of size %E",
3280 exp, func, range[0], slen);
3281 else if (tree_int_cst_sign_bit (range[1]))
3283 /* Avoid printing the upper bound if it's invalid. */
3284 warning_at (loc, opt,
3285 "%K%qD reading %E or more bytes from a region "
3286 "of size %E",
3287 exp, func, range[0], slen);
3289 else
3290 warning_at (loc, opt,
3291 "%K%qD reading between %E and %E bytes from a region "
3292 "of size %E",
3293 exp, func, range[0], range[1], slen);
3294 return false;
3297 return true;
3300 /* Helper to compute the size of the object referenced by the DEST
3301 expression which must have pointer type, using Object Size type
3302 OSTYPE (only the least significant 2 bits are used). Return
3303 an estimate of the size of the object if successful or NULL when
3304 the size cannot be determined. When the referenced object involves
3305 a non-constant offset in some range the returned value represents
3306 the largest size given the smallest non-negative offset in the
3307 range. The function is intended for diagnostics and should not
3308 be used to influence code generation or optimization. */
3310 tree
3311 compute_objsize (tree dest, int ostype)
3313 unsigned HOST_WIDE_INT size;
3315 /* Only the two least significant bits are meaningful. */
3316 ostype &= 3;
3318 if (compute_builtin_object_size (dest, ostype, &size))
3319 return build_int_cst (sizetype, size);
3321 if (TREE_CODE (dest) == SSA_NAME)
3323 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3324 if (!is_gimple_assign (stmt))
3325 return NULL_TREE;
3327 dest = gimple_assign_rhs1 (stmt);
3329 tree_code code = gimple_assign_rhs_code (stmt);
3330 if (code == POINTER_PLUS_EXPR)
3332 /* compute_builtin_object_size fails for addresses with
3333 non-constant offsets. Try to determine the range of
3334 such an offset here and use it to adjus the constant
3335 size. */
3336 tree off = gimple_assign_rhs2 (stmt);
3337 if (TREE_CODE (off) == SSA_NAME
3338 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3340 wide_int min, max;
3341 enum value_range_type rng = get_range_info (off, &min, &max);
3343 if (rng == VR_RANGE)
3345 if (tree size = compute_objsize (dest, ostype))
3347 wide_int wisiz = wi::to_wide (size);
3349 /* Ignore negative offsets for now. For others,
3350 use the lower bound as the most optimistic
3351 estimate of the (remaining)size. */
3352 if (wi::sign_mask (min))
3354 else if (wi::ltu_p (min, wisiz))
3355 return wide_int_to_tree (TREE_TYPE (size),
3356 wi::sub (wisiz, min));
3357 else
3358 return size_zero_node;
3363 else if (code != ADDR_EXPR)
3364 return NULL_TREE;
3367 /* Unless computing the largest size (for memcpy and other raw memory
3368 functions), try to determine the size of the object from its type. */
3369 if (!ostype)
3370 return NULL_TREE;
3372 if (TREE_CODE (dest) != ADDR_EXPR)
3373 return NULL_TREE;
3375 tree type = TREE_TYPE (dest);
3376 if (TREE_CODE (type) == POINTER_TYPE)
3377 type = TREE_TYPE (type);
3379 type = TYPE_MAIN_VARIANT (type);
3381 if (TREE_CODE (type) == ARRAY_TYPE
3382 && !array_at_struct_end_p (TREE_OPERAND (dest, 0)))
3384 /* Return the constant size unless it's zero (that's a zero-length
3385 array likely at the end of a struct). */
3386 tree size = TYPE_SIZE_UNIT (type);
3387 if (size && TREE_CODE (size) == INTEGER_CST
3388 && !integer_zerop (size))
3389 return size;
3392 return NULL_TREE;
3395 /* Helper to determine and check the sizes of the source and the destination
3396 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3397 call expression, DEST is the destination argument, SRC is the source
3398 argument or null, and LEN is the number of bytes. Use Object Size type-0
3399 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3400 (no overflow or invalid sizes), false otherwise. */
3402 static bool
3403 check_memop_access (tree exp, tree dest, tree src, tree size)
3405 /* For functions like memset and memcpy that operate on raw memory
3406 try to determine the size of the largest source and destination
3407 object using type-0 Object Size regardless of the object size
3408 type specified by the option. */
3409 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3410 tree dstsize = compute_objsize (dest, 0);
3412 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
3413 srcsize, dstsize);
3416 /* Validate memchr arguments without performing any expansion.
3417 Return NULL_RTX. */
3419 static rtx
3420 expand_builtin_memchr (tree exp, rtx)
3422 if (!validate_arglist (exp,
3423 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3424 return NULL_RTX;
3426 tree arg1 = CALL_EXPR_ARG (exp, 0);
3427 tree len = CALL_EXPR_ARG (exp, 2);
3429 /* Diagnose calls where the specified length exceeds the size
3430 of the object. */
3431 if (warn_stringop_overflow)
3433 tree size = compute_objsize (arg1, 0);
3434 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
3435 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
3438 return NULL_RTX;
3441 /* Expand a call EXP to the memcpy builtin.
3442 Return NULL_RTX if we failed, the caller should emit a normal call,
3443 otherwise try to get the result in TARGET, if convenient (and in
3444 mode MODE if that's convenient). */
3446 static rtx
3447 expand_builtin_memcpy (tree exp, rtx target)
3449 if (!validate_arglist (exp,
3450 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3451 return NULL_RTX;
3453 tree dest = CALL_EXPR_ARG (exp, 0);
3454 tree src = CALL_EXPR_ARG (exp, 1);
3455 tree len = CALL_EXPR_ARG (exp, 2);
3457 check_memop_access (exp, dest, src, len);
3459 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3460 /*endp=*/ 0);
3463 /* Check a call EXP to the memmove built-in for validity.
3464 Return NULL_RTX on both success and failure. */
3466 static rtx
3467 expand_builtin_memmove (tree exp, rtx)
3469 if (!validate_arglist (exp,
3470 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3471 return NULL_RTX;
3473 tree dest = CALL_EXPR_ARG (exp, 0);
3474 tree src = CALL_EXPR_ARG (exp, 1);
3475 tree len = CALL_EXPR_ARG (exp, 2);
3477 check_memop_access (exp, dest, src, len);
3479 return NULL_RTX;
3482 /* Expand an instrumented call EXP to the memcpy builtin.
3483 Return NULL_RTX if we failed, the caller should emit a normal call,
3484 otherwise try to get the result in TARGET, if convenient (and in
3485 mode MODE if that's convenient). */
3487 static rtx
3488 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3490 if (!validate_arglist (exp,
3491 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3492 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3493 INTEGER_TYPE, VOID_TYPE))
3494 return NULL_RTX;
3495 else
3497 tree dest = CALL_EXPR_ARG (exp, 0);
3498 tree src = CALL_EXPR_ARG (exp, 2);
3499 tree len = CALL_EXPR_ARG (exp, 4);
3500 rtx res = expand_builtin_memory_copy_args (dest, src, len, target, exp,
3501 /*end_p=*/ 0);
3503 /* Return src bounds with the result. */
3504 if (res)
3506 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3507 expand_normal (CALL_EXPR_ARG (exp, 1)));
3508 res = chkp_join_splitted_slot (res, bnd);
3510 return res;
3514 /* Expand a call EXP to the mempcpy builtin.
3515 Return NULL_RTX if we failed; the caller should emit a normal call,
3516 otherwise try to get the result in TARGET, if convenient (and in
3517 mode MODE if that's convenient). If ENDP is 0 return the
3518 destination pointer, if ENDP is 1 return the end pointer ala
3519 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3520 stpcpy. */
3522 static rtx
3523 expand_builtin_mempcpy (tree exp, rtx target)
3525 if (!validate_arglist (exp,
3526 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3527 return NULL_RTX;
3529 tree dest = CALL_EXPR_ARG (exp, 0);
3530 tree src = CALL_EXPR_ARG (exp, 1);
3531 tree len = CALL_EXPR_ARG (exp, 2);
3533 /* Policy does not generally allow using compute_objsize (which
3534 is used internally by check_memop_size) to change code generation
3535 or drive optimization decisions.
3537 In this instance it is safe because the code we generate has
3538 the same semantics regardless of the return value of
3539 check_memop_sizes. Exactly the same amount of data is copied
3540 and the return value is exactly the same in both cases.
3542 Furthermore, check_memop_size always uses mode 0 for the call to
3543 compute_objsize, so the imprecise nature of compute_objsize is
3544 avoided. */
3546 /* Avoid expanding mempcpy into memcpy when the call is determined
3547 to overflow the buffer. This also prevents the same overflow
3548 from being diagnosed again when expanding memcpy. */
3549 if (!check_memop_access (exp, dest, src, len))
3550 return NULL_RTX;
3552 return expand_builtin_mempcpy_args (dest, src, len,
3553 target, exp, /*endp=*/ 1);
3556 /* Expand an instrumented call EXP to the mempcpy builtin.
3557 Return NULL_RTX if we failed, the caller should emit a normal call,
3558 otherwise try to get the result in TARGET, if convenient (and in
3559 mode MODE if that's convenient). */
3561 static rtx
3562 expand_builtin_mempcpy_with_bounds (tree exp, rtx target)
3564 if (!validate_arglist (exp,
3565 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3566 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3567 INTEGER_TYPE, VOID_TYPE))
3568 return NULL_RTX;
3569 else
3571 tree dest = CALL_EXPR_ARG (exp, 0);
3572 tree src = CALL_EXPR_ARG (exp, 2);
3573 tree len = CALL_EXPR_ARG (exp, 4);
3574 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3575 exp, 1);
3577 /* Return src bounds with the result. */
3578 if (res)
3580 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3581 expand_normal (CALL_EXPR_ARG (exp, 1)));
3582 res = chkp_join_splitted_slot (res, bnd);
3584 return res;
3588 /* Helper function to do the actual work for expand of memory copy family
3589 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3590 of memory from SRC to DEST and assign to TARGET if convenient.
3591 If ENDP is 0 return the
3592 destination pointer, if ENDP is 1 return the end pointer ala
3593 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3594 stpcpy. */
3596 static rtx
3597 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3598 rtx target, tree exp, int endp)
3600 const char *src_str;
3601 unsigned int src_align = get_pointer_alignment (src);
3602 unsigned int dest_align = get_pointer_alignment (dest);
3603 rtx dest_mem, src_mem, dest_addr, len_rtx;
3604 HOST_WIDE_INT expected_size = -1;
3605 unsigned int expected_align = 0;
3606 unsigned HOST_WIDE_INT min_size;
3607 unsigned HOST_WIDE_INT max_size;
3608 unsigned HOST_WIDE_INT probable_max_size;
3610 /* If DEST is not a pointer type, call the normal function. */
3611 if (dest_align == 0)
3612 return NULL_RTX;
3614 /* If either SRC is not a pointer type, don't do this
3615 operation in-line. */
3616 if (src_align == 0)
3617 return NULL_RTX;
3619 if (currently_expanding_gimple_stmt)
3620 stringop_block_profile (currently_expanding_gimple_stmt,
3621 &expected_align, &expected_size);
3623 if (expected_align < dest_align)
3624 expected_align = dest_align;
3625 dest_mem = get_memory_rtx (dest, len);
3626 set_mem_align (dest_mem, dest_align);
3627 len_rtx = expand_normal (len);
3628 determine_block_size (len, len_rtx, &min_size, &max_size,
3629 &probable_max_size);
3630 src_str = c_getstr (src);
3632 /* If SRC is a string constant and block move would be done
3633 by pieces, we can avoid loading the string from memory
3634 and only stored the computed constants. */
3635 if (src_str
3636 && CONST_INT_P (len_rtx)
3637 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3638 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3639 CONST_CAST (char *, src_str),
3640 dest_align, false))
3642 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3643 builtin_memcpy_read_str,
3644 CONST_CAST (char *, src_str),
3645 dest_align, false, endp);
3646 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3647 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3648 return dest_mem;
3651 src_mem = get_memory_rtx (src, len);
3652 set_mem_align (src_mem, src_align);
3654 /* Copy word part most expediently. */
3655 enum block_op_methods method = BLOCK_OP_NORMAL;
3656 if (CALL_EXPR_TAILCALL (exp) && (endp == 0 || target == const0_rtx))
3657 method = BLOCK_OP_TAILCALL;
3658 if (endp == 1 && target != const0_rtx)
3659 method = BLOCK_OP_NO_LIBCALL_RET;
3660 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3661 expected_align, expected_size,
3662 min_size, max_size, probable_max_size);
3663 if (dest_addr == pc_rtx)
3664 return NULL_RTX;
3666 if (dest_addr == 0)
3668 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3669 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3672 if (endp && target != const0_rtx)
3674 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3675 /* stpcpy pointer to last byte. */
3676 if (endp == 2)
3677 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3680 return dest_addr;
3683 static rtx
3684 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3685 rtx target, tree orig_exp, int endp)
3687 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3688 endp);
3691 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3692 we failed, the caller should emit a normal call, otherwise try to
3693 get the result in TARGET, if convenient. If ENDP is 0 return the
3694 destination pointer, if ENDP is 1 return the end pointer ala
3695 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3696 stpcpy. */
3698 static rtx
3699 expand_movstr (tree dest, tree src, rtx target, int endp)
3701 struct expand_operand ops[3];
3702 rtx dest_mem;
3703 rtx src_mem;
3705 if (!targetm.have_movstr ())
3706 return NULL_RTX;
3708 dest_mem = get_memory_rtx (dest, NULL);
3709 src_mem = get_memory_rtx (src, NULL);
3710 if (!endp)
3712 target = force_reg (Pmode, XEXP (dest_mem, 0));
3713 dest_mem = replace_equiv_address (dest_mem, target);
3716 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3717 create_fixed_operand (&ops[1], dest_mem);
3718 create_fixed_operand (&ops[2], src_mem);
3719 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3720 return NULL_RTX;
3722 if (endp && target != const0_rtx)
3724 target = ops[0].value;
3725 /* movstr is supposed to set end to the address of the NUL
3726 terminator. If the caller requested a mempcpy-like return value,
3727 adjust it. */
3728 if (endp == 1)
3730 rtx tem = plus_constant (GET_MODE (target),
3731 gen_lowpart (GET_MODE (target), target), 1);
3732 emit_move_insn (target, force_operand (tem, NULL_RTX));
3735 return target;
3738 /* Do some very basic size validation of a call to the strcpy builtin
3739 given by EXP. Return NULL_RTX to have the built-in expand to a call
3740 to the library function. */
3742 static rtx
3743 expand_builtin_strcat (tree exp, rtx)
3745 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3746 || !warn_stringop_overflow)
3747 return NULL_RTX;
3749 tree dest = CALL_EXPR_ARG (exp, 0);
3750 tree src = CALL_EXPR_ARG (exp, 1);
3752 /* There is no way here to determine the length of the string in
3753 the destination to which the SRC string is being appended so
3754 just diagnose cases when the souce string is longer than
3755 the destination object. */
3757 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3759 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
3760 destsize);
3762 return NULL_RTX;
3765 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3766 NULL_RTX if we failed the caller should emit a normal call, otherwise
3767 try to get the result in TARGET, if convenient (and in mode MODE if that's
3768 convenient). */
3770 static rtx
3771 expand_builtin_strcpy (tree exp, rtx target)
3773 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3774 return NULL_RTX;
3776 tree dest = CALL_EXPR_ARG (exp, 0);
3777 tree src = CALL_EXPR_ARG (exp, 1);
3779 if (warn_stringop_overflow)
3781 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3782 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3783 src, destsize);
3786 if (rtx ret = expand_builtin_strcpy_args (dest, src, target))
3788 /* Check to see if the argument was declared attribute nonstring
3789 and if so, issue a warning since at this point it's not known
3790 to be nul-terminated. */
3791 tree fndecl = get_callee_fndecl (exp);
3792 maybe_warn_nonstring_arg (fndecl, exp);
3793 return ret;
3796 return NULL_RTX;
3799 /* Helper function to do the actual work for expand_builtin_strcpy. The
3800 arguments to the builtin_strcpy call DEST and SRC are broken out
3801 so that this can also be called without constructing an actual CALL_EXPR.
3802 The other arguments and return value are the same as for
3803 expand_builtin_strcpy. */
3805 static rtx
3806 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3808 return expand_movstr (dest, src, target, /*endp=*/0);
3811 /* Expand a call EXP to the stpcpy builtin.
3812 Return NULL_RTX if we failed the caller should emit a normal call,
3813 otherwise try to get the result in TARGET, if convenient (and in
3814 mode MODE if that's convenient). */
3816 static rtx
3817 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
3819 tree dst, src;
3820 location_t loc = EXPR_LOCATION (exp);
3822 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3823 return NULL_RTX;
3825 dst = CALL_EXPR_ARG (exp, 0);
3826 src = CALL_EXPR_ARG (exp, 1);
3828 if (warn_stringop_overflow)
3830 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
3831 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3832 src, destsize);
3835 /* If return value is ignored, transform stpcpy into strcpy. */
3836 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3838 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3839 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3840 return expand_expr (result, target, mode, EXPAND_NORMAL);
3842 else
3844 tree len, lenp1;
3845 rtx ret;
3847 /* Ensure we get an actual string whose length can be evaluated at
3848 compile-time, not an expression containing a string. This is
3849 because the latter will potentially produce pessimized code
3850 when used to produce the return value. */
3851 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3852 return expand_movstr (dst, src, target, /*endp=*/2);
3854 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3855 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3856 target, exp, /*endp=*/2);
3858 if (ret)
3859 return ret;
3861 if (TREE_CODE (len) == INTEGER_CST)
3863 rtx len_rtx = expand_normal (len);
3865 if (CONST_INT_P (len_rtx))
3867 ret = expand_builtin_strcpy_args (dst, src, target);
3869 if (ret)
3871 if (! target)
3873 if (mode != VOIDmode)
3874 target = gen_reg_rtx (mode);
3875 else
3876 target = gen_reg_rtx (GET_MODE (ret));
3878 if (GET_MODE (target) != GET_MODE (ret))
3879 ret = gen_lowpart (GET_MODE (target), ret);
3881 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3882 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3883 gcc_assert (ret);
3885 return target;
3890 return expand_movstr (dst, src, target, /*endp=*/2);
3894 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
3895 arguments while being careful to avoid duplicate warnings (which could
3896 be issued if the expander were to expand the call, resulting in it
3897 being emitted in expand_call(). */
3899 static rtx
3900 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3902 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
3904 /* The call has been successfully expanded. Check for nonstring
3905 arguments and issue warnings as appropriate. */
3906 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3907 return ret;
3910 return NULL_RTX;
3913 /* Check a call EXP to the stpncpy built-in for validity.
3914 Return NULL_RTX on both success and failure. */
3916 static rtx
3917 expand_builtin_stpncpy (tree exp, rtx)
3919 if (!validate_arglist (exp,
3920 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3921 || !warn_stringop_overflow)
3922 return NULL_RTX;
3924 /* The source and destination of the call. */
3925 tree dest = CALL_EXPR_ARG (exp, 0);
3926 tree src = CALL_EXPR_ARG (exp, 1);
3928 /* The exact number of bytes to write (not the maximum). */
3929 tree len = CALL_EXPR_ARG (exp, 2);
3931 /* The size of the destination object. */
3932 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3934 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
3936 return NULL_RTX;
3939 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3940 bytes from constant string DATA + OFFSET and return it as target
3941 constant. */
3944 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3945 scalar_int_mode mode)
3947 const char *str = (const char *) data;
3949 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3950 return const0_rtx;
3952 return c_readstr (str + offset, mode);
3955 /* Helper to check the sizes of sequences and the destination of calls
3956 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3957 success (no overflow or invalid sizes), false otherwise. */
3959 static bool
3960 check_strncat_sizes (tree exp, tree objsize)
3962 tree dest = CALL_EXPR_ARG (exp, 0);
3963 tree src = CALL_EXPR_ARG (exp, 1);
3964 tree maxread = CALL_EXPR_ARG (exp, 2);
3966 /* Try to determine the range of lengths that the source expression
3967 refers to. */
3968 tree lenrange[2];
3969 get_range_strlen (src, lenrange);
3971 /* Try to verify that the destination is big enough for the shortest
3972 string. */
3974 if (!objsize && warn_stringop_overflow)
3976 /* If it hasn't been provided by __strncat_chk, try to determine
3977 the size of the destination object into which the source is
3978 being copied. */
3979 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
3982 /* Add one for the terminating nul. */
3983 tree srclen = (lenrange[0]
3984 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3985 size_one_node)
3986 : NULL_TREE);
3988 /* The strncat function copies at most MAXREAD bytes and always appends
3989 the terminating nul so the specified upper bound should never be equal
3990 to (or greater than) the size of the destination. */
3991 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
3992 && tree_int_cst_equal (objsize, maxread))
3994 location_t loc = tree_nonartificial_location (exp);
3995 loc = expansion_point_location_if_in_system_header (loc);
3997 warning_at (loc, OPT_Wstringop_overflow_,
3998 "%K%qD specified bound %E equals destination size",
3999 exp, get_callee_fndecl (exp), maxread);
4001 return false;
4004 if (!srclen
4005 || (maxread && tree_fits_uhwi_p (maxread)
4006 && tree_fits_uhwi_p (srclen)
4007 && tree_int_cst_lt (maxread, srclen)))
4008 srclen = maxread;
4010 /* The number of bytes to write is LEN but check_access will also
4011 check SRCLEN if LEN's value isn't known. */
4012 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
4013 objsize);
4016 /* Similar to expand_builtin_strcat, do some very basic size validation
4017 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
4018 the built-in expand to a call to the library function. */
4020 static rtx
4021 expand_builtin_strncat (tree exp, rtx)
4023 if (!validate_arglist (exp,
4024 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4025 || !warn_stringop_overflow)
4026 return NULL_RTX;
4028 tree dest = CALL_EXPR_ARG (exp, 0);
4029 tree src = CALL_EXPR_ARG (exp, 1);
4030 /* The upper bound on the number of bytes to write. */
4031 tree maxread = CALL_EXPR_ARG (exp, 2);
4032 /* The length of the source sequence. */
4033 tree slen = c_strlen (src, 1);
4035 /* Try to determine the range of lengths that the source expression
4036 refers to. */
4037 tree lenrange[2];
4038 if (slen)
4039 lenrange[0] = lenrange[1] = slen;
4040 else
4041 get_range_strlen (src, lenrange);
4043 /* Try to verify that the destination is big enough for the shortest
4044 string. First try to determine the size of the destination object
4045 into which the source is being copied. */
4046 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4048 /* Add one for the terminating nul. */
4049 tree srclen = (lenrange[0]
4050 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4051 size_one_node)
4052 : NULL_TREE);
4054 /* The strncat function copies at most MAXREAD bytes and always appends
4055 the terminating nul so the specified upper bound should never be equal
4056 to (or greater than) the size of the destination. */
4057 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4058 && tree_int_cst_equal (destsize, maxread))
4060 location_t loc = tree_nonartificial_location (exp);
4061 loc = expansion_point_location_if_in_system_header (loc);
4063 warning_at (loc, OPT_Wstringop_overflow_,
4064 "%K%qD specified bound %E equals destination size",
4065 exp, get_callee_fndecl (exp), maxread);
4067 return NULL_RTX;
4070 if (!srclen
4071 || (maxread && tree_fits_uhwi_p (maxread)
4072 && tree_fits_uhwi_p (srclen)
4073 && tree_int_cst_lt (maxread, srclen)))
4074 srclen = maxread;
4076 /* The number of bytes to write is SRCLEN. */
4077 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
4079 return NULL_RTX;
4082 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4083 NULL_RTX if we failed the caller should emit a normal call. */
4085 static rtx
4086 expand_builtin_strncpy (tree exp, rtx target)
4088 location_t loc = EXPR_LOCATION (exp);
4090 if (validate_arglist (exp,
4091 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4093 tree dest = CALL_EXPR_ARG (exp, 0);
4094 tree src = CALL_EXPR_ARG (exp, 1);
4095 /* The number of bytes to write (not the maximum). */
4096 tree len = CALL_EXPR_ARG (exp, 2);
4097 /* The length of the source sequence. */
4098 tree slen = c_strlen (src, 1);
4100 if (warn_stringop_overflow)
4102 tree destsize = compute_objsize (dest,
4103 warn_stringop_overflow - 1);
4105 /* The number of bytes to write is LEN but check_access will also
4106 check SLEN if LEN's value isn't known. */
4107 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4108 destsize);
4111 /* We must be passed a constant len and src parameter. */
4112 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4113 return NULL_RTX;
4115 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4117 /* We're required to pad with trailing zeros if the requested
4118 len is greater than strlen(s2)+1. In that case try to
4119 use store_by_pieces, if it fails, punt. */
4120 if (tree_int_cst_lt (slen, len))
4122 unsigned int dest_align = get_pointer_alignment (dest);
4123 const char *p = c_getstr (src);
4124 rtx dest_mem;
4126 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4127 || !can_store_by_pieces (tree_to_uhwi (len),
4128 builtin_strncpy_read_str,
4129 CONST_CAST (char *, p),
4130 dest_align, false))
4131 return NULL_RTX;
4133 dest_mem = get_memory_rtx (dest, len);
4134 store_by_pieces (dest_mem, tree_to_uhwi (len),
4135 builtin_strncpy_read_str,
4136 CONST_CAST (char *, p), dest_align, false, 0);
4137 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4138 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4139 return dest_mem;
4142 return NULL_RTX;
4145 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4146 bytes from constant string DATA + OFFSET and return it as target
4147 constant. */
4150 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4151 scalar_int_mode mode)
4153 const char *c = (const char *) data;
4154 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4156 memset (p, *c, GET_MODE_SIZE (mode));
4158 return c_readstr (p, mode);
4161 /* Callback routine for store_by_pieces. Return the RTL of a register
4162 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4163 char value given in the RTL register data. For example, if mode is
4164 4 bytes wide, return the RTL for 0x01010101*data. */
4166 static rtx
4167 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4168 scalar_int_mode mode)
4170 rtx target, coeff;
4171 size_t size;
4172 char *p;
4174 size = GET_MODE_SIZE (mode);
4175 if (size == 1)
4176 return (rtx) data;
4178 p = XALLOCAVEC (char, size);
4179 memset (p, 1, size);
4180 coeff = c_readstr (p, mode);
4182 target = convert_to_mode (mode, (rtx) data, 1);
4183 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4184 return force_reg (mode, target);
4187 /* Expand expression EXP, which is a call to the memset builtin. Return
4188 NULL_RTX if we failed the caller should emit a normal call, otherwise
4189 try to get the result in TARGET, if convenient (and in mode MODE if that's
4190 convenient). */
4192 static rtx
4193 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4195 if (!validate_arglist (exp,
4196 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4197 return NULL_RTX;
4199 tree dest = CALL_EXPR_ARG (exp, 0);
4200 tree val = CALL_EXPR_ARG (exp, 1);
4201 tree len = CALL_EXPR_ARG (exp, 2);
4203 check_memop_access (exp, dest, NULL_TREE, len);
4205 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4208 /* Expand expression EXP, which is an instrumented call to the memset builtin.
4209 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
4210 try to get the result in TARGET, if convenient (and in mode MODE if that's
4211 convenient). */
4213 static rtx
4214 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
4216 if (!validate_arglist (exp,
4217 POINTER_TYPE, POINTER_BOUNDS_TYPE,
4218 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4219 return NULL_RTX;
4220 else
4222 tree dest = CALL_EXPR_ARG (exp, 0);
4223 tree val = CALL_EXPR_ARG (exp, 2);
4224 tree len = CALL_EXPR_ARG (exp, 3);
4225 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
4227 /* Return src bounds with the result. */
4228 if (res)
4230 rtx bnd = force_reg (targetm.chkp_bound_mode (),
4231 expand_normal (CALL_EXPR_ARG (exp, 1)));
4232 res = chkp_join_splitted_slot (res, bnd);
4234 return res;
4238 /* Helper function to do the actual work for expand_builtin_memset. The
4239 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4240 so that this can also be called without constructing an actual CALL_EXPR.
4241 The other arguments and return value are the same as for
4242 expand_builtin_memset. */
4244 static rtx
4245 expand_builtin_memset_args (tree dest, tree val, tree len,
4246 rtx target, machine_mode mode, tree orig_exp)
4248 tree fndecl, fn;
4249 enum built_in_function fcode;
4250 machine_mode val_mode;
4251 char c;
4252 unsigned int dest_align;
4253 rtx dest_mem, dest_addr, len_rtx;
4254 HOST_WIDE_INT expected_size = -1;
4255 unsigned int expected_align = 0;
4256 unsigned HOST_WIDE_INT min_size;
4257 unsigned HOST_WIDE_INT max_size;
4258 unsigned HOST_WIDE_INT probable_max_size;
4260 dest_align = get_pointer_alignment (dest);
4262 /* If DEST is not a pointer type, don't do this operation in-line. */
4263 if (dest_align == 0)
4264 return NULL_RTX;
4266 if (currently_expanding_gimple_stmt)
4267 stringop_block_profile (currently_expanding_gimple_stmt,
4268 &expected_align, &expected_size);
4270 if (expected_align < dest_align)
4271 expected_align = dest_align;
4273 /* If the LEN parameter is zero, return DEST. */
4274 if (integer_zerop (len))
4276 /* Evaluate and ignore VAL in case it has side-effects. */
4277 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4278 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4281 /* Stabilize the arguments in case we fail. */
4282 dest = builtin_save_expr (dest);
4283 val = builtin_save_expr (val);
4284 len = builtin_save_expr (len);
4286 len_rtx = expand_normal (len);
4287 determine_block_size (len, len_rtx, &min_size, &max_size,
4288 &probable_max_size);
4289 dest_mem = get_memory_rtx (dest, len);
4290 val_mode = TYPE_MODE (unsigned_char_type_node);
4292 if (TREE_CODE (val) != INTEGER_CST)
4294 rtx val_rtx;
4296 val_rtx = expand_normal (val);
4297 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4299 /* Assume that we can memset by pieces if we can store
4300 * the coefficients by pieces (in the required modes).
4301 * We can't pass builtin_memset_gen_str as that emits RTL. */
4302 c = 1;
4303 if (tree_fits_uhwi_p (len)
4304 && can_store_by_pieces (tree_to_uhwi (len),
4305 builtin_memset_read_str, &c, dest_align,
4306 true))
4308 val_rtx = force_reg (val_mode, val_rtx);
4309 store_by_pieces (dest_mem, tree_to_uhwi (len),
4310 builtin_memset_gen_str, val_rtx, dest_align,
4311 true, 0);
4313 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4314 dest_align, expected_align,
4315 expected_size, min_size, max_size,
4316 probable_max_size))
4317 goto do_libcall;
4319 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4320 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4321 return dest_mem;
4324 if (target_char_cast (val, &c))
4325 goto do_libcall;
4327 if (c)
4329 if (tree_fits_uhwi_p (len)
4330 && can_store_by_pieces (tree_to_uhwi (len),
4331 builtin_memset_read_str, &c, dest_align,
4332 true))
4333 store_by_pieces (dest_mem, tree_to_uhwi (len),
4334 builtin_memset_read_str, &c, dest_align, true, 0);
4335 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4336 gen_int_mode (c, val_mode),
4337 dest_align, expected_align,
4338 expected_size, min_size, max_size,
4339 probable_max_size))
4340 goto do_libcall;
4342 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4343 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4344 return dest_mem;
4347 set_mem_align (dest_mem, dest_align);
4348 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4349 CALL_EXPR_TAILCALL (orig_exp)
4350 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4351 expected_align, expected_size,
4352 min_size, max_size,
4353 probable_max_size);
4355 if (dest_addr == 0)
4357 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4358 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4361 return dest_addr;
4363 do_libcall:
4364 fndecl = get_callee_fndecl (orig_exp);
4365 fcode = DECL_FUNCTION_CODE (fndecl);
4366 if (fcode == BUILT_IN_MEMSET
4367 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
4368 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4369 dest, val, len);
4370 else if (fcode == BUILT_IN_BZERO)
4371 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4372 dest, len);
4373 else
4374 gcc_unreachable ();
4375 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4376 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4377 return expand_call (fn, target, target == const0_rtx);
4380 /* Expand expression EXP, which is a call to the bzero builtin. Return
4381 NULL_RTX if we failed the caller should emit a normal call. */
4383 static rtx
4384 expand_builtin_bzero (tree exp)
4386 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4387 return NULL_RTX;
4389 tree dest = CALL_EXPR_ARG (exp, 0);
4390 tree size = CALL_EXPR_ARG (exp, 1);
4392 check_memop_access (exp, dest, NULL_TREE, size);
4394 /* New argument list transforming bzero(ptr x, int y) to
4395 memset(ptr x, int 0, size_t y). This is done this way
4396 so that if it isn't expanded inline, we fallback to
4397 calling bzero instead of memset. */
4399 location_t loc = EXPR_LOCATION (exp);
4401 return expand_builtin_memset_args (dest, integer_zero_node,
4402 fold_convert_loc (loc,
4403 size_type_node, size),
4404 const0_rtx, VOIDmode, exp);
4407 /* Try to expand cmpstr operation ICODE with the given operands.
4408 Return the result rtx on success, otherwise return null. */
4410 static rtx
4411 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4412 HOST_WIDE_INT align)
4414 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4416 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4417 target = NULL_RTX;
4419 struct expand_operand ops[4];
4420 create_output_operand (&ops[0], target, insn_mode);
4421 create_fixed_operand (&ops[1], arg1_rtx);
4422 create_fixed_operand (&ops[2], arg2_rtx);
4423 create_integer_operand (&ops[3], align);
4424 if (maybe_expand_insn (icode, 4, ops))
4425 return ops[0].value;
4426 return NULL_RTX;
4429 /* Expand expression EXP, which is a call to the memcmp built-in function.
4430 Return NULL_RTX if we failed and the caller should emit a normal call,
4431 otherwise try to get the result in TARGET, if convenient.
4432 RESULT_EQ is true if we can relax the returned value to be either zero
4433 or nonzero, without caring about the sign. */
4435 static rtx
4436 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4438 if (!validate_arglist (exp,
4439 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4440 return NULL_RTX;
4442 tree arg1 = CALL_EXPR_ARG (exp, 0);
4443 tree arg2 = CALL_EXPR_ARG (exp, 1);
4444 tree len = CALL_EXPR_ARG (exp, 2);
4446 /* Diagnose calls where the specified length exceeds the size of either
4447 object. */
4448 if (warn_stringop_overflow)
4450 tree size = compute_objsize (arg1, 0);
4451 if (check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
4452 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE))
4454 size = compute_objsize (arg2, 0);
4455 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
4456 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
4460 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4461 location_t loc = EXPR_LOCATION (exp);
4463 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4464 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4466 /* If we don't have POINTER_TYPE, call the function. */
4467 if (arg1_align == 0 || arg2_align == 0)
4468 return NULL_RTX;
4470 rtx arg1_rtx = get_memory_rtx (arg1, len);
4471 rtx arg2_rtx = get_memory_rtx (arg2, len);
4472 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4474 /* Set MEM_SIZE as appropriate. */
4475 if (CONST_INT_P (len_rtx))
4477 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4478 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4481 by_pieces_constfn constfn = NULL;
4483 const char *src_str = c_getstr (arg2);
4484 if (result_eq && src_str == NULL)
4486 src_str = c_getstr (arg1);
4487 if (src_str != NULL)
4488 std::swap (arg1_rtx, arg2_rtx);
4491 /* If SRC is a string constant and block move would be done
4492 by pieces, we can avoid loading the string from memory
4493 and only stored the computed constants. */
4494 if (src_str
4495 && CONST_INT_P (len_rtx)
4496 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4497 constfn = builtin_memcpy_read_str;
4499 rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4500 TREE_TYPE (len), target,
4501 result_eq, constfn,
4502 CONST_CAST (char *, src_str));
4504 if (result)
4506 /* Return the value in the proper mode for this function. */
4507 if (GET_MODE (result) == mode)
4508 return result;
4510 if (target != 0)
4512 convert_move (target, result, 0);
4513 return target;
4516 return convert_to_mode (mode, result, 0);
4519 return NULL_RTX;
4522 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4523 if we failed the caller should emit a normal call, otherwise try to get
4524 the result in TARGET, if convenient. */
4526 static rtx
4527 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4529 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4530 return NULL_RTX;
4532 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4533 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4534 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4535 return NULL_RTX;
4537 tree arg1 = CALL_EXPR_ARG (exp, 0);
4538 tree arg2 = CALL_EXPR_ARG (exp, 1);
4540 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4541 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4543 /* If we don't have POINTER_TYPE, call the function. */
4544 if (arg1_align == 0 || arg2_align == 0)
4545 return NULL_RTX;
4547 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4548 arg1 = builtin_save_expr (arg1);
4549 arg2 = builtin_save_expr (arg2);
4551 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4552 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4554 rtx result = NULL_RTX;
4555 /* Try to call cmpstrsi. */
4556 if (cmpstr_icode != CODE_FOR_nothing)
4557 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4558 MIN (arg1_align, arg2_align));
4560 /* Try to determine at least one length and call cmpstrnsi. */
4561 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4563 tree len;
4564 rtx arg3_rtx;
4566 tree len1 = c_strlen (arg1, 1);
4567 tree len2 = c_strlen (arg2, 1);
4569 if (len1)
4570 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4571 if (len2)
4572 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4574 /* If we don't have a constant length for the first, use the length
4575 of the second, if we know it. We don't require a constant for
4576 this case; some cost analysis could be done if both are available
4577 but neither is constant. For now, assume they're equally cheap,
4578 unless one has side effects. If both strings have constant lengths,
4579 use the smaller. */
4581 if (!len1)
4582 len = len2;
4583 else if (!len2)
4584 len = len1;
4585 else if (TREE_SIDE_EFFECTS (len1))
4586 len = len2;
4587 else if (TREE_SIDE_EFFECTS (len2))
4588 len = len1;
4589 else if (TREE_CODE (len1) != INTEGER_CST)
4590 len = len2;
4591 else if (TREE_CODE (len2) != INTEGER_CST)
4592 len = len1;
4593 else if (tree_int_cst_lt (len1, len2))
4594 len = len1;
4595 else
4596 len = len2;
4598 /* If both arguments have side effects, we cannot optimize. */
4599 if (len && !TREE_SIDE_EFFECTS (len))
4601 arg3_rtx = expand_normal (len);
4602 result = expand_cmpstrn_or_cmpmem
4603 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4604 arg3_rtx, MIN (arg1_align, arg2_align));
4608 tree fndecl = get_callee_fndecl (exp);
4609 if (result)
4611 /* Check to see if the argument was declared attribute nonstring
4612 and if so, issue a warning since at this point it's not known
4613 to be nul-terminated. */
4614 maybe_warn_nonstring_arg (fndecl, exp);
4616 /* Return the value in the proper mode for this function. */
4617 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4618 if (GET_MODE (result) == mode)
4619 return result;
4620 if (target == 0)
4621 return convert_to_mode (mode, result, 0);
4622 convert_move (target, result, 0);
4623 return target;
4626 /* Expand the library call ourselves using a stabilized argument
4627 list to avoid re-evaluating the function's arguments twice. */
4628 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4629 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4630 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4631 return expand_call (fn, target, target == const0_rtx);
4634 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4635 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4636 the result in TARGET, if convenient. */
4638 static rtx
4639 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4640 ATTRIBUTE_UNUSED machine_mode mode)
4642 if (!validate_arglist (exp,
4643 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4644 return NULL_RTX;
4646 /* If c_strlen can determine an expression for one of the string
4647 lengths, and it doesn't have side effects, then emit cmpstrnsi
4648 using length MIN(strlen(string)+1, arg3). */
4649 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4650 if (cmpstrn_icode == CODE_FOR_nothing)
4651 return NULL_RTX;
4653 tree len;
4655 tree arg1 = CALL_EXPR_ARG (exp, 0);
4656 tree arg2 = CALL_EXPR_ARG (exp, 1);
4657 tree arg3 = CALL_EXPR_ARG (exp, 2);
4659 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4660 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4662 tree len1 = c_strlen (arg1, 1);
4663 tree len2 = c_strlen (arg2, 1);
4665 location_t loc = EXPR_LOCATION (exp);
4667 if (len1)
4668 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4669 if (len2)
4670 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4672 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4674 /* If we don't have a constant length for the first, use the length
4675 of the second, if we know it. If neither string is constant length,
4676 use the given length argument. We don't require a constant for
4677 this case; some cost analysis could be done if both are available
4678 but neither is constant. For now, assume they're equally cheap,
4679 unless one has side effects. If both strings have constant lengths,
4680 use the smaller. */
4682 if (!len1 && !len2)
4683 len = len3;
4684 else if (!len1)
4685 len = len2;
4686 else if (!len2)
4687 len = len1;
4688 else if (TREE_SIDE_EFFECTS (len1))
4689 len = len2;
4690 else if (TREE_SIDE_EFFECTS (len2))
4691 len = len1;
4692 else if (TREE_CODE (len1) != INTEGER_CST)
4693 len = len2;
4694 else if (TREE_CODE (len2) != INTEGER_CST)
4695 len = len1;
4696 else if (tree_int_cst_lt (len1, len2))
4697 len = len1;
4698 else
4699 len = len2;
4701 /* If we are not using the given length, we must incorporate it here.
4702 The actual new length parameter will be MIN(len,arg3) in this case. */
4703 if (len != len3)
4704 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4705 rtx arg1_rtx = get_memory_rtx (arg1, len);
4706 rtx arg2_rtx = get_memory_rtx (arg2, len);
4707 rtx arg3_rtx = expand_normal (len);
4708 rtx result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4709 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4710 MIN (arg1_align, arg2_align));
4712 tree fndecl = get_callee_fndecl (exp);
4713 if (result)
4715 /* Check to see if the argument was declared attribute nonstring
4716 and if so, issue a warning since at this point it's not known
4717 to be nul-terminated. */
4718 maybe_warn_nonstring_arg (fndecl, exp);
4720 /* Return the value in the proper mode for this function. */
4721 mode = TYPE_MODE (TREE_TYPE (exp));
4722 if (GET_MODE (result) == mode)
4723 return result;
4724 if (target == 0)
4725 return convert_to_mode (mode, result, 0);
4726 convert_move (target, result, 0);
4727 return target;
4730 /* Expand the library call ourselves using a stabilized argument
4731 list to avoid re-evaluating the function's arguments twice. */
4732 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4733 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4734 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4735 return expand_call (fn, target, target == const0_rtx);
4738 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4739 if that's convenient. */
4742 expand_builtin_saveregs (void)
4744 rtx val;
4745 rtx_insn *seq;
4747 /* Don't do __builtin_saveregs more than once in a function.
4748 Save the result of the first call and reuse it. */
4749 if (saveregs_value != 0)
4750 return saveregs_value;
4752 /* When this function is called, it means that registers must be
4753 saved on entry to this function. So we migrate the call to the
4754 first insn of this function. */
4756 start_sequence ();
4758 /* Do whatever the machine needs done in this case. */
4759 val = targetm.calls.expand_builtin_saveregs ();
4761 seq = get_insns ();
4762 end_sequence ();
4764 saveregs_value = val;
4766 /* Put the insns after the NOTE that starts the function. If this
4767 is inside a start_sequence, make the outer-level insn chain current, so
4768 the code is placed at the start of the function. */
4769 push_topmost_sequence ();
4770 emit_insn_after (seq, entry_of_function ());
4771 pop_topmost_sequence ();
4773 return val;
4776 /* Expand a call to __builtin_next_arg. */
4778 static rtx
4779 expand_builtin_next_arg (void)
4781 /* Checking arguments is already done in fold_builtin_next_arg
4782 that must be called before this function. */
4783 return expand_binop (ptr_mode, add_optab,
4784 crtl->args.internal_arg_pointer,
4785 crtl->args.arg_offset_rtx,
4786 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4789 /* Make it easier for the backends by protecting the valist argument
4790 from multiple evaluations. */
4792 static tree
4793 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4795 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4797 /* The current way of determining the type of valist is completely
4798 bogus. We should have the information on the va builtin instead. */
4799 if (!vatype)
4800 vatype = targetm.fn_abi_va_list (cfun->decl);
4802 if (TREE_CODE (vatype) == ARRAY_TYPE)
4804 if (TREE_SIDE_EFFECTS (valist))
4805 valist = save_expr (valist);
4807 /* For this case, the backends will be expecting a pointer to
4808 vatype, but it's possible we've actually been given an array
4809 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4810 So fix it. */
4811 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4813 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4814 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4817 else
4819 tree pt = build_pointer_type (vatype);
4821 if (! needs_lvalue)
4823 if (! TREE_SIDE_EFFECTS (valist))
4824 return valist;
4826 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4827 TREE_SIDE_EFFECTS (valist) = 1;
4830 if (TREE_SIDE_EFFECTS (valist))
4831 valist = save_expr (valist);
4832 valist = fold_build2_loc (loc, MEM_REF,
4833 vatype, valist, build_int_cst (pt, 0));
4836 return valist;
4839 /* The "standard" definition of va_list is void*. */
4841 tree
4842 std_build_builtin_va_list (void)
4844 return ptr_type_node;
4847 /* The "standard" abi va_list is va_list_type_node. */
4849 tree
4850 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4852 return va_list_type_node;
4855 /* The "standard" type of va_list is va_list_type_node. */
4857 tree
4858 std_canonical_va_list_type (tree type)
4860 tree wtype, htype;
4862 wtype = va_list_type_node;
4863 htype = type;
4865 if (TREE_CODE (wtype) == ARRAY_TYPE)
4867 /* If va_list is an array type, the argument may have decayed
4868 to a pointer type, e.g. by being passed to another function.
4869 In that case, unwrap both types so that we can compare the
4870 underlying records. */
4871 if (TREE_CODE (htype) == ARRAY_TYPE
4872 || POINTER_TYPE_P (htype))
4874 wtype = TREE_TYPE (wtype);
4875 htype = TREE_TYPE (htype);
4878 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4879 return va_list_type_node;
4881 return NULL_TREE;
4884 /* The "standard" implementation of va_start: just assign `nextarg' to
4885 the variable. */
4887 void
4888 std_expand_builtin_va_start (tree valist, rtx nextarg)
4890 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4891 convert_move (va_r, nextarg, 0);
4893 /* We do not have any valid bounds for the pointer, so
4894 just store zero bounds for it. */
4895 if (chkp_function_instrumented_p (current_function_decl))
4896 chkp_expand_bounds_reset_for_mem (valist,
4897 make_tree (TREE_TYPE (valist),
4898 nextarg));
4901 /* Expand EXP, a call to __builtin_va_start. */
4903 static rtx
4904 expand_builtin_va_start (tree exp)
4906 rtx nextarg;
4907 tree valist;
4908 location_t loc = EXPR_LOCATION (exp);
4910 if (call_expr_nargs (exp) < 2)
4912 error_at (loc, "too few arguments to function %<va_start%>");
4913 return const0_rtx;
4916 if (fold_builtin_next_arg (exp, true))
4917 return const0_rtx;
4919 nextarg = expand_builtin_next_arg ();
4920 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4922 if (targetm.expand_builtin_va_start)
4923 targetm.expand_builtin_va_start (valist, nextarg);
4924 else
4925 std_expand_builtin_va_start (valist, nextarg);
4927 return const0_rtx;
4930 /* Expand EXP, a call to __builtin_va_end. */
4932 static rtx
4933 expand_builtin_va_end (tree exp)
4935 tree valist = CALL_EXPR_ARG (exp, 0);
4937 /* Evaluate for side effects, if needed. I hate macros that don't
4938 do that. */
4939 if (TREE_SIDE_EFFECTS (valist))
4940 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4942 return const0_rtx;
4945 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4946 builtin rather than just as an assignment in stdarg.h because of the
4947 nastiness of array-type va_list types. */
4949 static rtx
4950 expand_builtin_va_copy (tree exp)
4952 tree dst, src, t;
4953 location_t loc = EXPR_LOCATION (exp);
4955 dst = CALL_EXPR_ARG (exp, 0);
4956 src = CALL_EXPR_ARG (exp, 1);
4958 dst = stabilize_va_list_loc (loc, dst, 1);
4959 src = stabilize_va_list_loc (loc, src, 0);
4961 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4963 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4965 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4966 TREE_SIDE_EFFECTS (t) = 1;
4967 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4969 else
4971 rtx dstb, srcb, size;
4973 /* Evaluate to pointers. */
4974 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4975 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4976 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4977 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4979 dstb = convert_memory_address (Pmode, dstb);
4980 srcb = convert_memory_address (Pmode, srcb);
4982 /* "Dereference" to BLKmode memories. */
4983 dstb = gen_rtx_MEM (BLKmode, dstb);
4984 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4985 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4986 srcb = gen_rtx_MEM (BLKmode, srcb);
4987 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4988 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4990 /* Copy. */
4991 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4994 return const0_rtx;
4997 /* Expand a call to one of the builtin functions __builtin_frame_address or
4998 __builtin_return_address. */
5000 static rtx
5001 expand_builtin_frame_address (tree fndecl, tree exp)
5003 /* The argument must be a nonnegative integer constant.
5004 It counts the number of frames to scan up the stack.
5005 The value is either the frame pointer value or the return
5006 address saved in that frame. */
5007 if (call_expr_nargs (exp) == 0)
5008 /* Warning about missing arg was already issued. */
5009 return const0_rtx;
5010 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
5012 error ("invalid argument to %qD", fndecl);
5013 return const0_rtx;
5015 else
5017 /* Number of frames to scan up the stack. */
5018 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5020 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
5022 /* Some ports cannot access arbitrary stack frames. */
5023 if (tem == NULL)
5025 warning (0, "unsupported argument to %qD", fndecl);
5026 return const0_rtx;
5029 if (count)
5031 /* Warn since no effort is made to ensure that any frame
5032 beyond the current one exists or can be safely reached. */
5033 warning (OPT_Wframe_address, "calling %qD with "
5034 "a nonzero argument is unsafe", fndecl);
5037 /* For __builtin_frame_address, return what we've got. */
5038 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5039 return tem;
5041 if (!REG_P (tem)
5042 && ! CONSTANT_P (tem))
5043 tem = copy_addr_to_reg (tem);
5044 return tem;
5048 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5049 failed and the caller should emit a normal call. */
5051 static rtx
5052 expand_builtin_alloca (tree exp)
5054 rtx op0;
5055 rtx result;
5056 unsigned int align;
5057 tree fndecl = get_callee_fndecl (exp);
5058 HOST_WIDE_INT max_size;
5059 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5060 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5061 bool valid_arglist
5062 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5063 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5064 VOID_TYPE)
5065 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5066 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5067 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5069 if (!valid_arglist)
5070 return NULL_RTX;
5072 if ((alloca_for_var && !warn_vla_limit)
5073 || (!alloca_for_var && !warn_alloca_limit))
5075 /* -Walloca-larger-than and -Wvla-larger-than settings override
5076 the more general -Walloc-size-larger-than so unless either of
5077 the former options is specified check the alloca arguments for
5078 overflow. */
5079 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5080 int idx[] = { 0, -1 };
5081 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5084 /* Compute the argument. */
5085 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5087 /* Compute the alignment. */
5088 align = (fcode == BUILT_IN_ALLOCA
5089 ? BIGGEST_ALIGNMENT
5090 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5092 /* Compute the maximum size. */
5093 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5094 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5095 : -1);
5097 /* Allocate the desired space. If the allocation stems from the declaration
5098 of a variable-sized object, it cannot accumulate. */
5099 result
5100 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5101 result = convert_memory_address (ptr_mode, result);
5103 return result;
5106 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5107 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5108 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5109 handle_builtin_stack_restore function. */
5111 static rtx
5112 expand_asan_emit_allocas_unpoison (tree exp)
5114 tree arg0 = CALL_EXPR_ARG (exp, 0);
5115 tree arg1 = CALL_EXPR_ARG (exp, 1);
5116 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5117 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5118 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5119 stack_pointer_rtx, NULL_RTX, 0,
5120 OPTAB_LIB_WIDEN);
5121 off = convert_modes (ptr_mode, Pmode, off, 0);
5122 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5123 OPTAB_LIB_WIDEN);
5124 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5125 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5126 top, ptr_mode, bot, ptr_mode);
5127 return ret;
5130 /* Expand a call to bswap builtin in EXP.
5131 Return NULL_RTX if a normal call should be emitted rather than expanding the
5132 function in-line. If convenient, the result should be placed in TARGET.
5133 SUBTARGET may be used as the target for computing one of EXP's operands. */
5135 static rtx
5136 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5137 rtx subtarget)
5139 tree arg;
5140 rtx op0;
5142 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5143 return NULL_RTX;
5145 arg = CALL_EXPR_ARG (exp, 0);
5146 op0 = expand_expr (arg,
5147 subtarget && GET_MODE (subtarget) == target_mode
5148 ? subtarget : NULL_RTX,
5149 target_mode, EXPAND_NORMAL);
5150 if (GET_MODE (op0) != target_mode)
5151 op0 = convert_to_mode (target_mode, op0, 1);
5153 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5155 gcc_assert (target);
5157 return convert_to_mode (target_mode, target, 1);
5160 /* Expand a call to a unary builtin in EXP.
5161 Return NULL_RTX if a normal call should be emitted rather than expanding the
5162 function in-line. If convenient, the result should be placed in TARGET.
5163 SUBTARGET may be used as the target for computing one of EXP's operands. */
5165 static rtx
5166 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5167 rtx subtarget, optab op_optab)
5169 rtx op0;
5171 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5172 return NULL_RTX;
5174 /* Compute the argument. */
5175 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5176 (subtarget
5177 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5178 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5179 VOIDmode, EXPAND_NORMAL);
5180 /* Compute op, into TARGET if possible.
5181 Set TARGET to wherever the result comes back. */
5182 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5183 op_optab, op0, target, op_optab != clrsb_optab);
5184 gcc_assert (target);
5186 return convert_to_mode (target_mode, target, 0);
5189 /* Expand a call to __builtin_expect. We just return our argument
5190 as the builtin_expect semantic should've been already executed by
5191 tree branch prediction pass. */
5193 static rtx
5194 expand_builtin_expect (tree exp, rtx target)
5196 tree arg;
5198 if (call_expr_nargs (exp) < 2)
5199 return const0_rtx;
5200 arg = CALL_EXPR_ARG (exp, 0);
5202 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5203 /* When guessing was done, the hints should be already stripped away. */
5204 gcc_assert (!flag_guess_branch_prob
5205 || optimize == 0 || seen_error ());
5206 return target;
5209 /* Expand a call to __builtin_assume_aligned. We just return our first
5210 argument as the builtin_assume_aligned semantic should've been already
5211 executed by CCP. */
5213 static rtx
5214 expand_builtin_assume_aligned (tree exp, rtx target)
5216 if (call_expr_nargs (exp) < 2)
5217 return const0_rtx;
5218 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5219 EXPAND_NORMAL);
5220 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5221 && (call_expr_nargs (exp) < 3
5222 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5223 return target;
5226 void
5227 expand_builtin_trap (void)
5229 if (targetm.have_trap ())
5231 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5232 /* For trap insns when not accumulating outgoing args force
5233 REG_ARGS_SIZE note to prevent crossjumping of calls with
5234 different args sizes. */
5235 if (!ACCUMULATE_OUTGOING_ARGS)
5236 add_args_size_note (insn, stack_pointer_delta);
5238 else
5240 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5241 tree call_expr = build_call_expr (fn, 0);
5242 expand_call (call_expr, NULL_RTX, false);
5245 emit_barrier ();
5248 /* Expand a call to __builtin_unreachable. We do nothing except emit
5249 a barrier saying that control flow will not pass here.
5251 It is the responsibility of the program being compiled to ensure
5252 that control flow does never reach __builtin_unreachable. */
5253 static void
5254 expand_builtin_unreachable (void)
5256 emit_barrier ();
5259 /* Expand EXP, a call to fabs, fabsf or fabsl.
5260 Return NULL_RTX if a normal call should be emitted rather than expanding
5261 the function inline. If convenient, the result should be placed
5262 in TARGET. SUBTARGET may be used as the target for computing
5263 the operand. */
5265 static rtx
5266 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5268 machine_mode mode;
5269 tree arg;
5270 rtx op0;
5272 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5273 return NULL_RTX;
5275 arg = CALL_EXPR_ARG (exp, 0);
5276 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5277 mode = TYPE_MODE (TREE_TYPE (arg));
5278 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5279 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5282 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5283 Return NULL is a normal call should be emitted rather than expanding the
5284 function inline. If convenient, the result should be placed in TARGET.
5285 SUBTARGET may be used as the target for computing the operand. */
5287 static rtx
5288 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5290 rtx op0, op1;
5291 tree arg;
5293 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5294 return NULL_RTX;
5296 arg = CALL_EXPR_ARG (exp, 0);
5297 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5299 arg = CALL_EXPR_ARG (exp, 1);
5300 op1 = expand_normal (arg);
5302 return expand_copysign (op0, op1, target);
5305 /* Expand a call to __builtin___clear_cache. */
5307 static rtx
5308 expand_builtin___clear_cache (tree exp)
5310 if (!targetm.code_for_clear_cache)
5312 #ifdef CLEAR_INSN_CACHE
5313 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5314 does something. Just do the default expansion to a call to
5315 __clear_cache(). */
5316 return NULL_RTX;
5317 #else
5318 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5319 does nothing. There is no need to call it. Do nothing. */
5320 return const0_rtx;
5321 #endif /* CLEAR_INSN_CACHE */
5324 /* We have a "clear_cache" insn, and it will handle everything. */
5325 tree begin, end;
5326 rtx begin_rtx, end_rtx;
5328 /* We must not expand to a library call. If we did, any
5329 fallback library function in libgcc that might contain a call to
5330 __builtin___clear_cache() would recurse infinitely. */
5331 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5333 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5334 return const0_rtx;
5337 if (targetm.have_clear_cache ())
5339 struct expand_operand ops[2];
5341 begin = CALL_EXPR_ARG (exp, 0);
5342 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5344 end = CALL_EXPR_ARG (exp, 1);
5345 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5347 create_address_operand (&ops[0], begin_rtx);
5348 create_address_operand (&ops[1], end_rtx);
5349 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5350 return const0_rtx;
5352 return const0_rtx;
5355 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5357 static rtx
5358 round_trampoline_addr (rtx tramp)
5360 rtx temp, addend, mask;
5362 /* If we don't need too much alignment, we'll have been guaranteed
5363 proper alignment by get_trampoline_type. */
5364 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5365 return tramp;
5367 /* Round address up to desired boundary. */
5368 temp = gen_reg_rtx (Pmode);
5369 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5370 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5372 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5373 temp, 0, OPTAB_LIB_WIDEN);
5374 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5375 temp, 0, OPTAB_LIB_WIDEN);
5377 return tramp;
5380 static rtx
5381 expand_builtin_init_trampoline (tree exp, bool onstack)
5383 tree t_tramp, t_func, t_chain;
5384 rtx m_tramp, r_tramp, r_chain, tmp;
5386 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5387 POINTER_TYPE, VOID_TYPE))
5388 return NULL_RTX;
5390 t_tramp = CALL_EXPR_ARG (exp, 0);
5391 t_func = CALL_EXPR_ARG (exp, 1);
5392 t_chain = CALL_EXPR_ARG (exp, 2);
5394 r_tramp = expand_normal (t_tramp);
5395 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5396 MEM_NOTRAP_P (m_tramp) = 1;
5398 /* If ONSTACK, the TRAMP argument should be the address of a field
5399 within the local function's FRAME decl. Either way, let's see if
5400 we can fill in the MEM_ATTRs for this memory. */
5401 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5402 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5404 /* Creator of a heap trampoline is responsible for making sure the
5405 address is aligned to at least STACK_BOUNDARY. Normally malloc
5406 will ensure this anyhow. */
5407 tmp = round_trampoline_addr (r_tramp);
5408 if (tmp != r_tramp)
5410 m_tramp = change_address (m_tramp, BLKmode, tmp);
5411 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5412 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5415 /* The FUNC argument should be the address of the nested function.
5416 Extract the actual function decl to pass to the hook. */
5417 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5418 t_func = TREE_OPERAND (t_func, 0);
5419 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5421 r_chain = expand_normal (t_chain);
5423 /* Generate insns to initialize the trampoline. */
5424 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5426 if (onstack)
5428 trampolines_created = 1;
5430 if (targetm.calls.custom_function_descriptors != 0)
5431 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5432 "trampoline generated for nested function %qD", t_func);
5435 return const0_rtx;
5438 static rtx
5439 expand_builtin_adjust_trampoline (tree exp)
5441 rtx tramp;
5443 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5444 return NULL_RTX;
5446 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5447 tramp = round_trampoline_addr (tramp);
5448 if (targetm.calls.trampoline_adjust_address)
5449 tramp = targetm.calls.trampoline_adjust_address (tramp);
5451 return tramp;
5454 /* Expand a call to the builtin descriptor initialization routine.
5455 A descriptor is made up of a couple of pointers to the static
5456 chain and the code entry in this order. */
5458 static rtx
5459 expand_builtin_init_descriptor (tree exp)
5461 tree t_descr, t_func, t_chain;
5462 rtx m_descr, r_descr, r_func, r_chain;
5464 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5465 VOID_TYPE))
5466 return NULL_RTX;
5468 t_descr = CALL_EXPR_ARG (exp, 0);
5469 t_func = CALL_EXPR_ARG (exp, 1);
5470 t_chain = CALL_EXPR_ARG (exp, 2);
5472 r_descr = expand_normal (t_descr);
5473 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5474 MEM_NOTRAP_P (m_descr) = 1;
5476 r_func = expand_normal (t_func);
5477 r_chain = expand_normal (t_chain);
5479 /* Generate insns to initialize the descriptor. */
5480 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5481 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5482 POINTER_SIZE / BITS_PER_UNIT), r_func);
5484 return const0_rtx;
5487 /* Expand a call to the builtin descriptor adjustment routine. */
5489 static rtx
5490 expand_builtin_adjust_descriptor (tree exp)
5492 rtx tramp;
5494 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5495 return NULL_RTX;
5497 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5499 /* Unalign the descriptor to allow runtime identification. */
5500 tramp = plus_constant (ptr_mode, tramp,
5501 targetm.calls.custom_function_descriptors);
5503 return force_operand (tramp, NULL_RTX);
5506 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5507 function. The function first checks whether the back end provides
5508 an insn to implement signbit for the respective mode. If not, it
5509 checks whether the floating point format of the value is such that
5510 the sign bit can be extracted. If that is not the case, error out.
5511 EXP is the expression that is a call to the builtin function; if
5512 convenient, the result should be placed in TARGET. */
5513 static rtx
5514 expand_builtin_signbit (tree exp, rtx target)
5516 const struct real_format *fmt;
5517 scalar_float_mode fmode;
5518 scalar_int_mode rmode, imode;
5519 tree arg;
5520 int word, bitpos;
5521 enum insn_code icode;
5522 rtx temp;
5523 location_t loc = EXPR_LOCATION (exp);
5525 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5526 return NULL_RTX;
5528 arg = CALL_EXPR_ARG (exp, 0);
5529 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5530 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5531 fmt = REAL_MODE_FORMAT (fmode);
5533 arg = builtin_save_expr (arg);
5535 /* Expand the argument yielding a RTX expression. */
5536 temp = expand_normal (arg);
5538 /* Check if the back end provides an insn that handles signbit for the
5539 argument's mode. */
5540 icode = optab_handler (signbit_optab, fmode);
5541 if (icode != CODE_FOR_nothing)
5543 rtx_insn *last = get_last_insn ();
5544 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5545 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5546 return target;
5547 delete_insns_since (last);
5550 /* For floating point formats without a sign bit, implement signbit
5551 as "ARG < 0.0". */
5552 bitpos = fmt->signbit_ro;
5553 if (bitpos < 0)
5555 /* But we can't do this if the format supports signed zero. */
5556 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5558 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5559 build_real (TREE_TYPE (arg), dconst0));
5560 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5563 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5565 imode = int_mode_for_mode (fmode).require ();
5566 temp = gen_lowpart (imode, temp);
5568 else
5570 imode = word_mode;
5571 /* Handle targets with different FP word orders. */
5572 if (FLOAT_WORDS_BIG_ENDIAN)
5573 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5574 else
5575 word = bitpos / BITS_PER_WORD;
5576 temp = operand_subword_force (temp, word, fmode);
5577 bitpos = bitpos % BITS_PER_WORD;
5580 /* Force the intermediate word_mode (or narrower) result into a
5581 register. This avoids attempting to create paradoxical SUBREGs
5582 of floating point modes below. */
5583 temp = force_reg (imode, temp);
5585 /* If the bitpos is within the "result mode" lowpart, the operation
5586 can be implement with a single bitwise AND. Otherwise, we need
5587 a right shift and an AND. */
5589 if (bitpos < GET_MODE_BITSIZE (rmode))
5591 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5593 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5594 temp = gen_lowpart (rmode, temp);
5595 temp = expand_binop (rmode, and_optab, temp,
5596 immed_wide_int_const (mask, rmode),
5597 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5599 else
5601 /* Perform a logical right shift to place the signbit in the least
5602 significant bit, then truncate the result to the desired mode
5603 and mask just this bit. */
5604 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5605 temp = gen_lowpart (rmode, temp);
5606 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5607 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5610 return temp;
5613 /* Expand fork or exec calls. TARGET is the desired target of the
5614 call. EXP is the call. FN is the
5615 identificator of the actual function. IGNORE is nonzero if the
5616 value is to be ignored. */
5618 static rtx
5619 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5621 tree id, decl;
5622 tree call;
5624 /* If we are not profiling, just call the function. */
5625 if (!profile_arc_flag)
5626 return NULL_RTX;
5628 /* Otherwise call the wrapper. This should be equivalent for the rest of
5629 compiler, so the code does not diverge, and the wrapper may run the
5630 code necessary for keeping the profiling sane. */
5632 switch (DECL_FUNCTION_CODE (fn))
5634 case BUILT_IN_FORK:
5635 id = get_identifier ("__gcov_fork");
5636 break;
5638 case BUILT_IN_EXECL:
5639 id = get_identifier ("__gcov_execl");
5640 break;
5642 case BUILT_IN_EXECV:
5643 id = get_identifier ("__gcov_execv");
5644 break;
5646 case BUILT_IN_EXECLP:
5647 id = get_identifier ("__gcov_execlp");
5648 break;
5650 case BUILT_IN_EXECLE:
5651 id = get_identifier ("__gcov_execle");
5652 break;
5654 case BUILT_IN_EXECVP:
5655 id = get_identifier ("__gcov_execvp");
5656 break;
5658 case BUILT_IN_EXECVE:
5659 id = get_identifier ("__gcov_execve");
5660 break;
5662 default:
5663 gcc_unreachable ();
5666 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5667 FUNCTION_DECL, id, TREE_TYPE (fn));
5668 DECL_EXTERNAL (decl) = 1;
5669 TREE_PUBLIC (decl) = 1;
5670 DECL_ARTIFICIAL (decl) = 1;
5671 TREE_NOTHROW (decl) = 1;
5672 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5673 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5674 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5675 return expand_call (call, target, ignore);
5680 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5681 the pointer in these functions is void*, the tree optimizers may remove
5682 casts. The mode computed in expand_builtin isn't reliable either, due
5683 to __sync_bool_compare_and_swap.
5685 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5686 group of builtins. This gives us log2 of the mode size. */
5688 static inline machine_mode
5689 get_builtin_sync_mode (int fcode_diff)
5691 /* The size is not negotiable, so ask not to get BLKmode in return
5692 if the target indicates that a smaller size would be better. */
5693 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5696 /* Expand the memory expression LOC and return the appropriate memory operand
5697 for the builtin_sync operations. */
5699 static rtx
5700 get_builtin_sync_mem (tree loc, machine_mode mode)
5702 rtx addr, mem;
5704 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5705 addr = convert_memory_address (Pmode, addr);
5707 /* Note that we explicitly do not want any alias information for this
5708 memory, so that we kill all other live memories. Otherwise we don't
5709 satisfy the full barrier semantics of the intrinsic. */
5710 mem = validize_mem (gen_rtx_MEM (mode, addr));
5712 /* The alignment needs to be at least according to that of the mode. */
5713 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5714 get_pointer_alignment (loc)));
5715 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5716 MEM_VOLATILE_P (mem) = 1;
5718 return mem;
5721 /* Make sure an argument is in the right mode.
5722 EXP is the tree argument.
5723 MODE is the mode it should be in. */
5725 static rtx
5726 expand_expr_force_mode (tree exp, machine_mode mode)
5728 rtx val;
5729 machine_mode old_mode;
5731 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5732 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5733 of CONST_INTs, where we know the old_mode only from the call argument. */
5735 old_mode = GET_MODE (val);
5736 if (old_mode == VOIDmode)
5737 old_mode = TYPE_MODE (TREE_TYPE (exp));
5738 val = convert_modes (mode, old_mode, val, 1);
5739 return val;
5743 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5744 EXP is the CALL_EXPR. CODE is the rtx code
5745 that corresponds to the arithmetic or logical operation from the name;
5746 an exception here is that NOT actually means NAND. TARGET is an optional
5747 place for us to store the results; AFTER is true if this is the
5748 fetch_and_xxx form. */
5750 static rtx
5751 expand_builtin_sync_operation (machine_mode mode, tree exp,
5752 enum rtx_code code, bool after,
5753 rtx target)
5755 rtx val, mem;
5756 location_t loc = EXPR_LOCATION (exp);
5758 if (code == NOT && warn_sync_nand)
5760 tree fndecl = get_callee_fndecl (exp);
5761 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5763 static bool warned_f_a_n, warned_n_a_f;
5765 switch (fcode)
5767 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5768 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5769 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5770 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5771 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5772 if (warned_f_a_n)
5773 break;
5775 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5776 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5777 warned_f_a_n = true;
5778 break;
5780 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5781 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5782 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5783 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5784 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5785 if (warned_n_a_f)
5786 break;
5788 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5789 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5790 warned_n_a_f = true;
5791 break;
5793 default:
5794 gcc_unreachable ();
5798 /* Expand the operands. */
5799 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5800 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5802 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5803 after);
5806 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5807 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5808 true if this is the boolean form. TARGET is a place for us to store the
5809 results; this is NOT optional if IS_BOOL is true. */
5811 static rtx
5812 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5813 bool is_bool, rtx target)
5815 rtx old_val, new_val, mem;
5816 rtx *pbool, *poval;
5818 /* Expand the operands. */
5819 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5820 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5821 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5823 pbool = poval = NULL;
5824 if (target != const0_rtx)
5826 if (is_bool)
5827 pbool = &target;
5828 else
5829 poval = &target;
5831 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5832 false, MEMMODEL_SYNC_SEQ_CST,
5833 MEMMODEL_SYNC_SEQ_CST))
5834 return NULL_RTX;
5836 return target;
5839 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5840 general form is actually an atomic exchange, and some targets only
5841 support a reduced form with the second argument being a constant 1.
5842 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5843 the results. */
5845 static rtx
5846 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5847 rtx target)
5849 rtx val, mem;
5851 /* Expand the operands. */
5852 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5853 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5855 return expand_sync_lock_test_and_set (target, mem, val);
5858 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5860 static void
5861 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5863 rtx mem;
5865 /* Expand the operands. */
5866 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5868 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5871 /* Given an integer representing an ``enum memmodel'', verify its
5872 correctness and return the memory model enum. */
5874 static enum memmodel
5875 get_memmodel (tree exp)
5877 rtx op;
5878 unsigned HOST_WIDE_INT val;
5879 source_location loc
5880 = expansion_point_location_if_in_system_header (input_location);
5882 /* If the parameter is not a constant, it's a run time value so we'll just
5883 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5884 if (TREE_CODE (exp) != INTEGER_CST)
5885 return MEMMODEL_SEQ_CST;
5887 op = expand_normal (exp);
5889 val = INTVAL (op);
5890 if (targetm.memmodel_check)
5891 val = targetm.memmodel_check (val);
5892 else if (val & ~MEMMODEL_MASK)
5894 warning_at (loc, OPT_Winvalid_memory_model,
5895 "unknown architecture specifier in memory model to builtin");
5896 return MEMMODEL_SEQ_CST;
5899 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5900 if (memmodel_base (val) >= MEMMODEL_LAST)
5902 warning_at (loc, OPT_Winvalid_memory_model,
5903 "invalid memory model argument to builtin");
5904 return MEMMODEL_SEQ_CST;
5907 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5908 be conservative and promote consume to acquire. */
5909 if (val == MEMMODEL_CONSUME)
5910 val = MEMMODEL_ACQUIRE;
5912 return (enum memmodel) val;
5915 /* Expand the __atomic_exchange intrinsic:
5916 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5917 EXP is the CALL_EXPR.
5918 TARGET is an optional place for us to store the results. */
5920 static rtx
5921 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5923 rtx val, mem;
5924 enum memmodel model;
5926 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5928 if (!flag_inline_atomics)
5929 return NULL_RTX;
5931 /* Expand the operands. */
5932 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5933 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5935 return expand_atomic_exchange (target, mem, val, model);
5938 /* Expand the __atomic_compare_exchange intrinsic:
5939 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5940 TYPE desired, BOOL weak,
5941 enum memmodel success,
5942 enum memmodel failure)
5943 EXP is the CALL_EXPR.
5944 TARGET is an optional place for us to store the results. */
5946 static rtx
5947 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5948 rtx target)
5950 rtx expect, desired, mem, oldval;
5951 rtx_code_label *label;
5952 enum memmodel success, failure;
5953 tree weak;
5954 bool is_weak;
5955 source_location loc
5956 = expansion_point_location_if_in_system_header (input_location);
5958 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5959 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5961 if (failure > success)
5963 warning_at (loc, OPT_Winvalid_memory_model,
5964 "failure memory model cannot be stronger than success "
5965 "memory model for %<__atomic_compare_exchange%>");
5966 success = MEMMODEL_SEQ_CST;
5969 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5971 warning_at (loc, OPT_Winvalid_memory_model,
5972 "invalid failure memory model for "
5973 "%<__atomic_compare_exchange%>");
5974 failure = MEMMODEL_SEQ_CST;
5975 success = MEMMODEL_SEQ_CST;
5979 if (!flag_inline_atomics)
5980 return NULL_RTX;
5982 /* Expand the operands. */
5983 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5985 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5986 expect = convert_memory_address (Pmode, expect);
5987 expect = gen_rtx_MEM (mode, expect);
5988 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5990 weak = CALL_EXPR_ARG (exp, 3);
5991 is_weak = false;
5992 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5993 is_weak = true;
5995 if (target == const0_rtx)
5996 target = NULL;
5998 /* Lest the rtl backend create a race condition with an imporoper store
5999 to memory, always create a new pseudo for OLDVAL. */
6000 oldval = NULL;
6002 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6003 is_weak, success, failure))
6004 return NULL_RTX;
6006 /* Conditionally store back to EXPECT, lest we create a race condition
6007 with an improper store to memory. */
6008 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6009 the normal case where EXPECT is totally private, i.e. a register. At
6010 which point the store can be unconditional. */
6011 label = gen_label_rtx ();
6012 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6013 GET_MODE (target), 1, label);
6014 emit_move_insn (expect, oldval);
6015 emit_label (label);
6017 return target;
6020 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6021 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6022 call. The weak parameter must be dropped to match the expected parameter
6023 list and the expected argument changed from value to pointer to memory
6024 slot. */
6026 static void
6027 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6029 unsigned int z;
6030 vec<tree, va_gc> *vec;
6032 vec_alloc (vec, 5);
6033 vec->quick_push (gimple_call_arg (call, 0));
6034 tree expected = gimple_call_arg (call, 1);
6035 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6036 TREE_TYPE (expected));
6037 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6038 if (expd != x)
6039 emit_move_insn (x, expd);
6040 tree v = make_tree (TREE_TYPE (expected), x);
6041 vec->quick_push (build1 (ADDR_EXPR,
6042 build_pointer_type (TREE_TYPE (expected)), v));
6043 vec->quick_push (gimple_call_arg (call, 2));
6044 /* Skip the boolean weak parameter. */
6045 for (z = 4; z < 6; z++)
6046 vec->quick_push (gimple_call_arg (call, z));
6047 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6048 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6049 gcc_assert (bytes_log2 < 5);
6050 built_in_function fncode
6051 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6052 + bytes_log2);
6053 tree fndecl = builtin_decl_explicit (fncode);
6054 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6055 fndecl);
6056 tree exp = build_call_vec (boolean_type_node, fn, vec);
6057 tree lhs = gimple_call_lhs (call);
6058 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6059 if (lhs)
6061 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6062 if (GET_MODE (boolret) != mode)
6063 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6064 x = force_reg (mode, x);
6065 write_complex_part (target, boolret, true);
6066 write_complex_part (target, x, false);
6070 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6072 void
6073 expand_ifn_atomic_compare_exchange (gcall *call)
6075 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6076 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6077 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6078 rtx expect, desired, mem, oldval, boolret;
6079 enum memmodel success, failure;
6080 tree lhs;
6081 bool is_weak;
6082 source_location loc
6083 = expansion_point_location_if_in_system_header (gimple_location (call));
6085 success = get_memmodel (gimple_call_arg (call, 4));
6086 failure = get_memmodel (gimple_call_arg (call, 5));
6088 if (failure > success)
6090 warning_at (loc, OPT_Winvalid_memory_model,
6091 "failure memory model cannot be stronger than success "
6092 "memory model for %<__atomic_compare_exchange%>");
6093 success = MEMMODEL_SEQ_CST;
6096 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6098 warning_at (loc, OPT_Winvalid_memory_model,
6099 "invalid failure memory model for "
6100 "%<__atomic_compare_exchange%>");
6101 failure = MEMMODEL_SEQ_CST;
6102 success = MEMMODEL_SEQ_CST;
6105 if (!flag_inline_atomics)
6107 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6108 return;
6111 /* Expand the operands. */
6112 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6114 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6115 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6117 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6119 boolret = NULL;
6120 oldval = NULL;
6122 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6123 is_weak, success, failure))
6125 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6126 return;
6129 lhs = gimple_call_lhs (call);
6130 if (lhs)
6132 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6133 if (GET_MODE (boolret) != mode)
6134 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6135 write_complex_part (target, boolret, true);
6136 write_complex_part (target, oldval, false);
6140 /* Expand the __atomic_load intrinsic:
6141 TYPE __atomic_load (TYPE *object, enum memmodel)
6142 EXP is the CALL_EXPR.
6143 TARGET is an optional place for us to store the results. */
6145 static rtx
6146 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6148 rtx mem;
6149 enum memmodel model;
6151 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6152 if (is_mm_release (model) || is_mm_acq_rel (model))
6154 source_location loc
6155 = expansion_point_location_if_in_system_header (input_location);
6156 warning_at (loc, OPT_Winvalid_memory_model,
6157 "invalid memory model for %<__atomic_load%>");
6158 model = MEMMODEL_SEQ_CST;
6161 if (!flag_inline_atomics)
6162 return NULL_RTX;
6164 /* Expand the operand. */
6165 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6167 return expand_atomic_load (target, mem, model);
6171 /* Expand the __atomic_store intrinsic:
6172 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6173 EXP is the CALL_EXPR.
6174 TARGET is an optional place for us to store the results. */
6176 static rtx
6177 expand_builtin_atomic_store (machine_mode mode, tree exp)
6179 rtx mem, val;
6180 enum memmodel model;
6182 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6183 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6184 || is_mm_release (model)))
6186 source_location loc
6187 = expansion_point_location_if_in_system_header (input_location);
6188 warning_at (loc, OPT_Winvalid_memory_model,
6189 "invalid memory model for %<__atomic_store%>");
6190 model = MEMMODEL_SEQ_CST;
6193 if (!flag_inline_atomics)
6194 return NULL_RTX;
6196 /* Expand the operands. */
6197 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6198 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6200 return expand_atomic_store (mem, val, model, false);
6203 /* Expand the __atomic_fetch_XXX intrinsic:
6204 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6205 EXP is the CALL_EXPR.
6206 TARGET is an optional place for us to store the results.
6207 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6208 FETCH_AFTER is true if returning the result of the operation.
6209 FETCH_AFTER is false if returning the value before the operation.
6210 IGNORE is true if the result is not used.
6211 EXT_CALL is the correct builtin for an external call if this cannot be
6212 resolved to an instruction sequence. */
6214 static rtx
6215 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6216 enum rtx_code code, bool fetch_after,
6217 bool ignore, enum built_in_function ext_call)
6219 rtx val, mem, ret;
6220 enum memmodel model;
6221 tree fndecl;
6222 tree addr;
6224 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6226 /* Expand the operands. */
6227 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6228 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6230 /* Only try generating instructions if inlining is turned on. */
6231 if (flag_inline_atomics)
6233 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6234 if (ret)
6235 return ret;
6238 /* Return if a different routine isn't needed for the library call. */
6239 if (ext_call == BUILT_IN_NONE)
6240 return NULL_RTX;
6242 /* Change the call to the specified function. */
6243 fndecl = get_callee_fndecl (exp);
6244 addr = CALL_EXPR_FN (exp);
6245 STRIP_NOPS (addr);
6247 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6248 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6250 /* If we will emit code after the call, the call can not be a tail call.
6251 If it is emitted as a tail call, a barrier is emitted after it, and
6252 then all trailing code is removed. */
6253 if (!ignore)
6254 CALL_EXPR_TAILCALL (exp) = 0;
6256 /* Expand the call here so we can emit trailing code. */
6257 ret = expand_call (exp, target, ignore);
6259 /* Replace the original function just in case it matters. */
6260 TREE_OPERAND (addr, 0) = fndecl;
6262 /* Then issue the arithmetic correction to return the right result. */
6263 if (!ignore)
6265 if (code == NOT)
6267 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6268 OPTAB_LIB_WIDEN);
6269 ret = expand_simple_unop (mode, NOT, ret, target, true);
6271 else
6272 ret = expand_simple_binop (mode, code, ret, val, target, true,
6273 OPTAB_LIB_WIDEN);
6275 return ret;
6278 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6280 void
6281 expand_ifn_atomic_bit_test_and (gcall *call)
6283 tree ptr = gimple_call_arg (call, 0);
6284 tree bit = gimple_call_arg (call, 1);
6285 tree flag = gimple_call_arg (call, 2);
6286 tree lhs = gimple_call_lhs (call);
6287 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6288 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6289 enum rtx_code code;
6290 optab optab;
6291 struct expand_operand ops[5];
6293 gcc_assert (flag_inline_atomics);
6295 if (gimple_call_num_args (call) == 4)
6296 model = get_memmodel (gimple_call_arg (call, 3));
6298 rtx mem = get_builtin_sync_mem (ptr, mode);
6299 rtx val = expand_expr_force_mode (bit, mode);
6301 switch (gimple_call_internal_fn (call))
6303 case IFN_ATOMIC_BIT_TEST_AND_SET:
6304 code = IOR;
6305 optab = atomic_bit_test_and_set_optab;
6306 break;
6307 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6308 code = XOR;
6309 optab = atomic_bit_test_and_complement_optab;
6310 break;
6311 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6312 code = AND;
6313 optab = atomic_bit_test_and_reset_optab;
6314 break;
6315 default:
6316 gcc_unreachable ();
6319 if (lhs == NULL_TREE)
6321 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6322 val, NULL_RTX, true, OPTAB_DIRECT);
6323 if (code == AND)
6324 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6325 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6326 return;
6329 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6330 enum insn_code icode = direct_optab_handler (optab, mode);
6331 gcc_assert (icode != CODE_FOR_nothing);
6332 create_output_operand (&ops[0], target, mode);
6333 create_fixed_operand (&ops[1], mem);
6334 create_convert_operand_to (&ops[2], val, mode, true);
6335 create_integer_operand (&ops[3], model);
6336 create_integer_operand (&ops[4], integer_onep (flag));
6337 if (maybe_expand_insn (icode, 5, ops))
6338 return;
6340 rtx bitval = val;
6341 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6342 val, NULL_RTX, true, OPTAB_DIRECT);
6343 rtx maskval = val;
6344 if (code == AND)
6345 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6346 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6347 code, model, false);
6348 if (integer_onep (flag))
6350 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6351 NULL_RTX, true, OPTAB_DIRECT);
6352 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6353 true, OPTAB_DIRECT);
6355 else
6356 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6357 OPTAB_DIRECT);
6358 if (result != target)
6359 emit_move_insn (target, result);
6362 /* Expand an atomic clear operation.
6363 void _atomic_clear (BOOL *obj, enum memmodel)
6364 EXP is the call expression. */
6366 static rtx
6367 expand_builtin_atomic_clear (tree exp)
6369 machine_mode mode;
6370 rtx mem, ret;
6371 enum memmodel model;
6373 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6374 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6375 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6377 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6379 source_location loc
6380 = expansion_point_location_if_in_system_header (input_location);
6381 warning_at (loc, OPT_Winvalid_memory_model,
6382 "invalid memory model for %<__atomic_store%>");
6383 model = MEMMODEL_SEQ_CST;
6386 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6387 Failing that, a store is issued by __atomic_store. The only way this can
6388 fail is if the bool type is larger than a word size. Unlikely, but
6389 handle it anyway for completeness. Assume a single threaded model since
6390 there is no atomic support in this case, and no barriers are required. */
6391 ret = expand_atomic_store (mem, const0_rtx, model, true);
6392 if (!ret)
6393 emit_move_insn (mem, const0_rtx);
6394 return const0_rtx;
6397 /* Expand an atomic test_and_set operation.
6398 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6399 EXP is the call expression. */
6401 static rtx
6402 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6404 rtx mem;
6405 enum memmodel model;
6406 machine_mode mode;
6408 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6409 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6410 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6412 return expand_atomic_test_and_set (target, mem, model);
6416 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6417 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6419 static tree
6420 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6422 int size;
6423 machine_mode mode;
6424 unsigned int mode_align, type_align;
6426 if (TREE_CODE (arg0) != INTEGER_CST)
6427 return NULL_TREE;
6429 /* We need a corresponding integer mode for the access to be lock-free. */
6430 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6431 if (!int_mode_for_size (size, 0).exists (&mode))
6432 return boolean_false_node;
6434 mode_align = GET_MODE_ALIGNMENT (mode);
6436 if (TREE_CODE (arg1) == INTEGER_CST)
6438 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6440 /* Either this argument is null, or it's a fake pointer encoding
6441 the alignment of the object. */
6442 val = least_bit_hwi (val);
6443 val *= BITS_PER_UNIT;
6445 if (val == 0 || mode_align < val)
6446 type_align = mode_align;
6447 else
6448 type_align = val;
6450 else
6452 tree ttype = TREE_TYPE (arg1);
6454 /* This function is usually invoked and folded immediately by the front
6455 end before anything else has a chance to look at it. The pointer
6456 parameter at this point is usually cast to a void *, so check for that
6457 and look past the cast. */
6458 if (CONVERT_EXPR_P (arg1)
6459 && POINTER_TYPE_P (ttype)
6460 && VOID_TYPE_P (TREE_TYPE (ttype))
6461 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6462 arg1 = TREE_OPERAND (arg1, 0);
6464 ttype = TREE_TYPE (arg1);
6465 gcc_assert (POINTER_TYPE_P (ttype));
6467 /* Get the underlying type of the object. */
6468 ttype = TREE_TYPE (ttype);
6469 type_align = TYPE_ALIGN (ttype);
6472 /* If the object has smaller alignment, the lock free routines cannot
6473 be used. */
6474 if (type_align < mode_align)
6475 return boolean_false_node;
6477 /* Check if a compare_and_swap pattern exists for the mode which represents
6478 the required size. The pattern is not allowed to fail, so the existence
6479 of the pattern indicates support is present. Also require that an
6480 atomic load exists for the required size. */
6481 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6482 return boolean_true_node;
6483 else
6484 return boolean_false_node;
6487 /* Return true if the parameters to call EXP represent an object which will
6488 always generate lock free instructions. The first argument represents the
6489 size of the object, and the second parameter is a pointer to the object
6490 itself. If NULL is passed for the object, then the result is based on
6491 typical alignment for an object of the specified size. Otherwise return
6492 false. */
6494 static rtx
6495 expand_builtin_atomic_always_lock_free (tree exp)
6497 tree size;
6498 tree arg0 = CALL_EXPR_ARG (exp, 0);
6499 tree arg1 = CALL_EXPR_ARG (exp, 1);
6501 if (TREE_CODE (arg0) != INTEGER_CST)
6503 error ("non-constant argument 1 to __atomic_always_lock_free");
6504 return const0_rtx;
6507 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6508 if (size == boolean_true_node)
6509 return const1_rtx;
6510 return const0_rtx;
6513 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6514 is lock free on this architecture. */
6516 static tree
6517 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6519 if (!flag_inline_atomics)
6520 return NULL_TREE;
6522 /* If it isn't always lock free, don't generate a result. */
6523 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6524 return boolean_true_node;
6526 return NULL_TREE;
6529 /* Return true if the parameters to call EXP represent an object which will
6530 always generate lock free instructions. The first argument represents the
6531 size of the object, and the second parameter is a pointer to the object
6532 itself. If NULL is passed for the object, then the result is based on
6533 typical alignment for an object of the specified size. Otherwise return
6534 NULL*/
6536 static rtx
6537 expand_builtin_atomic_is_lock_free (tree exp)
6539 tree size;
6540 tree arg0 = CALL_EXPR_ARG (exp, 0);
6541 tree arg1 = CALL_EXPR_ARG (exp, 1);
6543 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6545 error ("non-integer argument 1 to __atomic_is_lock_free");
6546 return NULL_RTX;
6549 if (!flag_inline_atomics)
6550 return NULL_RTX;
6552 /* If the value is known at compile time, return the RTX for it. */
6553 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6554 if (size == boolean_true_node)
6555 return const1_rtx;
6557 return NULL_RTX;
6560 /* Expand the __atomic_thread_fence intrinsic:
6561 void __atomic_thread_fence (enum memmodel)
6562 EXP is the CALL_EXPR. */
6564 static void
6565 expand_builtin_atomic_thread_fence (tree exp)
6567 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6568 expand_mem_thread_fence (model);
6571 /* Expand the __atomic_signal_fence intrinsic:
6572 void __atomic_signal_fence (enum memmodel)
6573 EXP is the CALL_EXPR. */
6575 static void
6576 expand_builtin_atomic_signal_fence (tree exp)
6578 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6579 expand_mem_signal_fence (model);
6582 /* Expand the __sync_synchronize intrinsic. */
6584 static void
6585 expand_builtin_sync_synchronize (void)
6587 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6590 static rtx
6591 expand_builtin_thread_pointer (tree exp, rtx target)
6593 enum insn_code icode;
6594 if (!validate_arglist (exp, VOID_TYPE))
6595 return const0_rtx;
6596 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6597 if (icode != CODE_FOR_nothing)
6599 struct expand_operand op;
6600 /* If the target is not sutitable then create a new target. */
6601 if (target == NULL_RTX
6602 || !REG_P (target)
6603 || GET_MODE (target) != Pmode)
6604 target = gen_reg_rtx (Pmode);
6605 create_output_operand (&op, target, Pmode);
6606 expand_insn (icode, 1, &op);
6607 return target;
6609 error ("__builtin_thread_pointer is not supported on this target");
6610 return const0_rtx;
6613 static void
6614 expand_builtin_set_thread_pointer (tree exp)
6616 enum insn_code icode;
6617 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6618 return;
6619 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6620 if (icode != CODE_FOR_nothing)
6622 struct expand_operand op;
6623 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6624 Pmode, EXPAND_NORMAL);
6625 create_input_operand (&op, val, Pmode);
6626 expand_insn (icode, 1, &op);
6627 return;
6629 error ("__builtin_set_thread_pointer is not supported on this target");
6633 /* Emit code to restore the current value of stack. */
6635 static void
6636 expand_stack_restore (tree var)
6638 rtx_insn *prev;
6639 rtx sa = expand_normal (var);
6641 sa = convert_memory_address (Pmode, sa);
6643 prev = get_last_insn ();
6644 emit_stack_restore (SAVE_BLOCK, sa);
6646 record_new_stack_level ();
6648 fixup_args_size_notes (prev, get_last_insn (), 0);
6651 /* Emit code to save the current value of stack. */
6653 static rtx
6654 expand_stack_save (void)
6656 rtx ret = NULL_RTX;
6658 emit_stack_save (SAVE_BLOCK, &ret);
6659 return ret;
6662 /* Emit code to get the openacc gang, worker or vector id or size. */
6664 static rtx
6665 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
6667 const char *name;
6668 rtx fallback_retval;
6669 rtx_insn *(*gen_fn) (rtx, rtx);
6670 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
6672 case BUILT_IN_GOACC_PARLEVEL_ID:
6673 name = "__builtin_goacc_parlevel_id";
6674 fallback_retval = const0_rtx;
6675 gen_fn = targetm.gen_oacc_dim_pos;
6676 break;
6677 case BUILT_IN_GOACC_PARLEVEL_SIZE:
6678 name = "__builtin_goacc_parlevel_size";
6679 fallback_retval = const1_rtx;
6680 gen_fn = targetm.gen_oacc_dim_size;
6681 break;
6682 default:
6683 gcc_unreachable ();
6686 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
6688 error ("%qs only supported in OpenACC code", name);
6689 return const0_rtx;
6692 tree arg = CALL_EXPR_ARG (exp, 0);
6693 if (TREE_CODE (arg) != INTEGER_CST)
6695 error ("non-constant argument 0 to %qs", name);
6696 return const0_rtx;
6699 int dim = TREE_INT_CST_LOW (arg);
6700 switch (dim)
6702 case GOMP_DIM_GANG:
6703 case GOMP_DIM_WORKER:
6704 case GOMP_DIM_VECTOR:
6705 break;
6706 default:
6707 error ("illegal argument 0 to %qs", name);
6708 return const0_rtx;
6711 if (ignore)
6712 return target;
6714 if (target == NULL_RTX)
6715 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6717 if (!targetm.have_oacc_dim_size ())
6719 emit_move_insn (target, fallback_retval);
6720 return target;
6723 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
6724 emit_insn (gen_fn (reg, GEN_INT (dim)));
6725 if (reg != target)
6726 emit_move_insn (target, reg);
6728 return target;
6731 /* Expand an expression EXP that calls a built-in function,
6732 with result going to TARGET if that's convenient
6733 (and in mode MODE if that's convenient).
6734 SUBTARGET may be used as the target for computing one of EXP's operands.
6735 IGNORE is nonzero if the value is to be ignored. */
6738 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
6739 int ignore)
6741 tree fndecl = get_callee_fndecl (exp);
6742 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6743 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6744 int flags;
6746 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6747 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6749 /* When ASan is enabled, we don't want to expand some memory/string
6750 builtins and rely on libsanitizer's hooks. This allows us to avoid
6751 redundant checks and be sure, that possible overflow will be detected
6752 by ASan. */
6754 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
6755 return expand_call (exp, target, ignore);
6757 /* When not optimizing, generate calls to library functions for a certain
6758 set of builtins. */
6759 if (!optimize
6760 && !called_as_built_in (fndecl)
6761 && fcode != BUILT_IN_FORK
6762 && fcode != BUILT_IN_EXECL
6763 && fcode != BUILT_IN_EXECV
6764 && fcode != BUILT_IN_EXECLP
6765 && fcode != BUILT_IN_EXECLE
6766 && fcode != BUILT_IN_EXECVP
6767 && fcode != BUILT_IN_EXECVE
6768 && !ALLOCA_FUNCTION_CODE_P (fcode)
6769 && fcode != BUILT_IN_FREE
6770 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
6771 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
6772 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
6773 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
6774 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6775 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
6776 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
6777 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
6778 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
6779 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
6780 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
6781 && fcode != BUILT_IN_CHKP_BNDRET)
6782 return expand_call (exp, target, ignore);
6784 /* The built-in function expanders test for target == const0_rtx
6785 to determine whether the function's result will be ignored. */
6786 if (ignore)
6787 target = const0_rtx;
6789 /* If the result of a pure or const built-in function is ignored, and
6790 none of its arguments are volatile, we can avoid expanding the
6791 built-in call and just evaluate the arguments for side-effects. */
6792 if (target == const0_rtx
6793 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6794 && !(flags & ECF_LOOPING_CONST_OR_PURE))
6796 bool volatilep = false;
6797 tree arg;
6798 call_expr_arg_iterator iter;
6800 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6801 if (TREE_THIS_VOLATILE (arg))
6803 volatilep = true;
6804 break;
6807 if (! volatilep)
6809 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6810 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6811 return const0_rtx;
6815 /* expand_builtin_with_bounds is supposed to be used for
6816 instrumented builtin calls. */
6817 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
6819 switch (fcode)
6821 CASE_FLT_FN (BUILT_IN_FABS):
6822 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
6823 case BUILT_IN_FABSD32:
6824 case BUILT_IN_FABSD64:
6825 case BUILT_IN_FABSD128:
6826 target = expand_builtin_fabs (exp, target, subtarget);
6827 if (target)
6828 return target;
6829 break;
6831 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6832 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
6833 target = expand_builtin_copysign (exp, target, subtarget);
6834 if (target)
6835 return target;
6836 break;
6838 /* Just do a normal library call if we were unable to fold
6839 the values. */
6840 CASE_FLT_FN (BUILT_IN_CABS):
6841 break;
6843 CASE_FLT_FN (BUILT_IN_FMA):
6844 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
6845 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6846 if (target)
6847 return target;
6848 break;
6850 CASE_FLT_FN (BUILT_IN_ILOGB):
6851 if (! flag_unsafe_math_optimizations)
6852 break;
6853 gcc_fallthrough ();
6854 CASE_FLT_FN (BUILT_IN_ISINF):
6855 CASE_FLT_FN (BUILT_IN_FINITE):
6856 case BUILT_IN_ISFINITE:
6857 case BUILT_IN_ISNORMAL:
6858 target = expand_builtin_interclass_mathfn (exp, target);
6859 if (target)
6860 return target;
6861 break;
6863 CASE_FLT_FN (BUILT_IN_ICEIL):
6864 CASE_FLT_FN (BUILT_IN_LCEIL):
6865 CASE_FLT_FN (BUILT_IN_LLCEIL):
6866 CASE_FLT_FN (BUILT_IN_LFLOOR):
6867 CASE_FLT_FN (BUILT_IN_IFLOOR):
6868 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6869 target = expand_builtin_int_roundingfn (exp, target);
6870 if (target)
6871 return target;
6872 break;
6874 CASE_FLT_FN (BUILT_IN_IRINT):
6875 CASE_FLT_FN (BUILT_IN_LRINT):
6876 CASE_FLT_FN (BUILT_IN_LLRINT):
6877 CASE_FLT_FN (BUILT_IN_IROUND):
6878 CASE_FLT_FN (BUILT_IN_LROUND):
6879 CASE_FLT_FN (BUILT_IN_LLROUND):
6880 target = expand_builtin_int_roundingfn_2 (exp, target);
6881 if (target)
6882 return target;
6883 break;
6885 CASE_FLT_FN (BUILT_IN_POWI):
6886 target = expand_builtin_powi (exp, target);
6887 if (target)
6888 return target;
6889 break;
6891 CASE_FLT_FN (BUILT_IN_CEXPI):
6892 target = expand_builtin_cexpi (exp, target);
6893 gcc_assert (target);
6894 return target;
6896 CASE_FLT_FN (BUILT_IN_SIN):
6897 CASE_FLT_FN (BUILT_IN_COS):
6898 if (! flag_unsafe_math_optimizations)
6899 break;
6900 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6901 if (target)
6902 return target;
6903 break;
6905 CASE_FLT_FN (BUILT_IN_SINCOS):
6906 if (! flag_unsafe_math_optimizations)
6907 break;
6908 target = expand_builtin_sincos (exp);
6909 if (target)
6910 return target;
6911 break;
6913 case BUILT_IN_APPLY_ARGS:
6914 return expand_builtin_apply_args ();
6916 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6917 FUNCTION with a copy of the parameters described by
6918 ARGUMENTS, and ARGSIZE. It returns a block of memory
6919 allocated on the stack into which is stored all the registers
6920 that might possibly be used for returning the result of a
6921 function. ARGUMENTS is the value returned by
6922 __builtin_apply_args. ARGSIZE is the number of bytes of
6923 arguments that must be copied. ??? How should this value be
6924 computed? We'll also need a safe worst case value for varargs
6925 functions. */
6926 case BUILT_IN_APPLY:
6927 if (!validate_arglist (exp, POINTER_TYPE,
6928 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6929 && !validate_arglist (exp, REFERENCE_TYPE,
6930 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6931 return const0_rtx;
6932 else
6934 rtx ops[3];
6936 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6937 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6938 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6940 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6943 /* __builtin_return (RESULT) causes the function to return the
6944 value described by RESULT. RESULT is address of the block of
6945 memory returned by __builtin_apply. */
6946 case BUILT_IN_RETURN:
6947 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6948 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6949 return const0_rtx;
6951 case BUILT_IN_SAVEREGS:
6952 return expand_builtin_saveregs ();
6954 case BUILT_IN_VA_ARG_PACK:
6955 /* All valid uses of __builtin_va_arg_pack () are removed during
6956 inlining. */
6957 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6958 return const0_rtx;
6960 case BUILT_IN_VA_ARG_PACK_LEN:
6961 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6962 inlining. */
6963 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6964 return const0_rtx;
6966 /* Return the address of the first anonymous stack arg. */
6967 case BUILT_IN_NEXT_ARG:
6968 if (fold_builtin_next_arg (exp, false))
6969 return const0_rtx;
6970 return expand_builtin_next_arg ();
6972 case BUILT_IN_CLEAR_CACHE:
6973 target = expand_builtin___clear_cache (exp);
6974 if (target)
6975 return target;
6976 break;
6978 case BUILT_IN_CLASSIFY_TYPE:
6979 return expand_builtin_classify_type (exp);
6981 case BUILT_IN_CONSTANT_P:
6982 return const0_rtx;
6984 case BUILT_IN_FRAME_ADDRESS:
6985 case BUILT_IN_RETURN_ADDRESS:
6986 return expand_builtin_frame_address (fndecl, exp);
6988 /* Returns the address of the area where the structure is returned.
6989 0 otherwise. */
6990 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6991 if (call_expr_nargs (exp) != 0
6992 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6993 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6994 return const0_rtx;
6995 else
6996 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6998 CASE_BUILT_IN_ALLOCA:
6999 target = expand_builtin_alloca (exp);
7000 if (target)
7001 return target;
7002 break;
7004 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7005 return expand_asan_emit_allocas_unpoison (exp);
7007 case BUILT_IN_STACK_SAVE:
7008 return expand_stack_save ();
7010 case BUILT_IN_STACK_RESTORE:
7011 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
7012 return const0_rtx;
7014 case BUILT_IN_BSWAP16:
7015 case BUILT_IN_BSWAP32:
7016 case BUILT_IN_BSWAP64:
7017 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
7018 if (target)
7019 return target;
7020 break;
7022 CASE_INT_FN (BUILT_IN_FFS):
7023 target = expand_builtin_unop (target_mode, exp, target,
7024 subtarget, ffs_optab);
7025 if (target)
7026 return target;
7027 break;
7029 CASE_INT_FN (BUILT_IN_CLZ):
7030 target = expand_builtin_unop (target_mode, exp, target,
7031 subtarget, clz_optab);
7032 if (target)
7033 return target;
7034 break;
7036 CASE_INT_FN (BUILT_IN_CTZ):
7037 target = expand_builtin_unop (target_mode, exp, target,
7038 subtarget, ctz_optab);
7039 if (target)
7040 return target;
7041 break;
7043 CASE_INT_FN (BUILT_IN_CLRSB):
7044 target = expand_builtin_unop (target_mode, exp, target,
7045 subtarget, clrsb_optab);
7046 if (target)
7047 return target;
7048 break;
7050 CASE_INT_FN (BUILT_IN_POPCOUNT):
7051 target = expand_builtin_unop (target_mode, exp, target,
7052 subtarget, popcount_optab);
7053 if (target)
7054 return target;
7055 break;
7057 CASE_INT_FN (BUILT_IN_PARITY):
7058 target = expand_builtin_unop (target_mode, exp, target,
7059 subtarget, parity_optab);
7060 if (target)
7061 return target;
7062 break;
7064 case BUILT_IN_STRLEN:
7065 target = expand_builtin_strlen (exp, target, target_mode);
7066 if (target)
7067 return target;
7068 break;
7070 case BUILT_IN_STRCAT:
7071 target = expand_builtin_strcat (exp, target);
7072 if (target)
7073 return target;
7074 break;
7076 case BUILT_IN_STRCPY:
7077 target = expand_builtin_strcpy (exp, target);
7078 if (target)
7079 return target;
7080 break;
7082 case BUILT_IN_STRNCAT:
7083 target = expand_builtin_strncat (exp, target);
7084 if (target)
7085 return target;
7086 break;
7088 case BUILT_IN_STRNCPY:
7089 target = expand_builtin_strncpy (exp, target);
7090 if (target)
7091 return target;
7092 break;
7094 case BUILT_IN_STPCPY:
7095 target = expand_builtin_stpcpy (exp, target, mode);
7096 if (target)
7097 return target;
7098 break;
7100 case BUILT_IN_STPNCPY:
7101 target = expand_builtin_stpncpy (exp, target);
7102 if (target)
7103 return target;
7104 break;
7106 case BUILT_IN_MEMCHR:
7107 target = expand_builtin_memchr (exp, target);
7108 if (target)
7109 return target;
7110 break;
7112 case BUILT_IN_MEMCPY:
7113 target = expand_builtin_memcpy (exp, target);
7114 if (target)
7115 return target;
7116 break;
7118 case BUILT_IN_MEMMOVE:
7119 target = expand_builtin_memmove (exp, target);
7120 if (target)
7121 return target;
7122 break;
7124 case BUILT_IN_MEMPCPY:
7125 target = expand_builtin_mempcpy (exp, target);
7126 if (target)
7127 return target;
7128 break;
7130 case BUILT_IN_MEMSET:
7131 target = expand_builtin_memset (exp, target, mode);
7132 if (target)
7133 return target;
7134 break;
7136 case BUILT_IN_BZERO:
7137 target = expand_builtin_bzero (exp);
7138 if (target)
7139 return target;
7140 break;
7142 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7143 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
7144 when changing it to a strcmp call. */
7145 case BUILT_IN_STRCMP_EQ:
7146 target = expand_builtin_memcmp (exp, target, true);
7147 if (target)
7148 return target;
7150 /* Change this call back to a BUILT_IN_STRCMP. */
7151 TREE_OPERAND (exp, 1)
7152 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7154 /* Delete the last parameter. */
7155 unsigned int i;
7156 vec<tree, va_gc> *arg_vec;
7157 vec_alloc (arg_vec, 2);
7158 for (i = 0; i < 2; i++)
7159 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7160 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7161 /* FALLTHROUGH */
7163 case BUILT_IN_STRCMP:
7164 target = expand_builtin_strcmp (exp, target);
7165 if (target)
7166 return target;
7167 break;
7169 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7170 back to a BUILT_IN_STRNCMP. */
7171 case BUILT_IN_STRNCMP_EQ:
7172 target = expand_builtin_memcmp (exp, target, true);
7173 if (target)
7174 return target;
7176 /* Change it back to a BUILT_IN_STRNCMP. */
7177 TREE_OPERAND (exp, 1)
7178 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7179 /* FALLTHROUGH */
7181 case BUILT_IN_STRNCMP:
7182 target = expand_builtin_strncmp (exp, target, mode);
7183 if (target)
7184 return target;
7185 break;
7187 case BUILT_IN_BCMP:
7188 case BUILT_IN_MEMCMP:
7189 case BUILT_IN_MEMCMP_EQ:
7190 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7191 if (target)
7192 return target;
7193 if (fcode == BUILT_IN_MEMCMP_EQ)
7195 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7196 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7198 break;
7200 case BUILT_IN_SETJMP:
7201 /* This should have been lowered to the builtins below. */
7202 gcc_unreachable ();
7204 case BUILT_IN_SETJMP_SETUP:
7205 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7206 and the receiver label. */
7207 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7209 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7210 VOIDmode, EXPAND_NORMAL);
7211 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7212 rtx_insn *label_r = label_rtx (label);
7214 /* This is copied from the handling of non-local gotos. */
7215 expand_builtin_setjmp_setup (buf_addr, label_r);
7216 nonlocal_goto_handler_labels
7217 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7218 nonlocal_goto_handler_labels);
7219 /* ??? Do not let expand_label treat us as such since we would
7220 not want to be both on the list of non-local labels and on
7221 the list of forced labels. */
7222 FORCED_LABEL (label) = 0;
7223 return const0_rtx;
7225 break;
7227 case BUILT_IN_SETJMP_RECEIVER:
7228 /* __builtin_setjmp_receiver is passed the receiver label. */
7229 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7231 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7232 rtx_insn *label_r = label_rtx (label);
7234 expand_builtin_setjmp_receiver (label_r);
7235 return const0_rtx;
7237 break;
7239 /* __builtin_longjmp is passed a pointer to an array of five words.
7240 It's similar to the C library longjmp function but works with
7241 __builtin_setjmp above. */
7242 case BUILT_IN_LONGJMP:
7243 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7245 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7246 VOIDmode, EXPAND_NORMAL);
7247 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7249 if (value != const1_rtx)
7251 error ("%<__builtin_longjmp%> second argument must be 1");
7252 return const0_rtx;
7255 expand_builtin_longjmp (buf_addr, value);
7256 return const0_rtx;
7258 break;
7260 case BUILT_IN_NONLOCAL_GOTO:
7261 target = expand_builtin_nonlocal_goto (exp);
7262 if (target)
7263 return target;
7264 break;
7266 /* This updates the setjmp buffer that is its argument with the value
7267 of the current stack pointer. */
7268 case BUILT_IN_UPDATE_SETJMP_BUF:
7269 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7271 rtx buf_addr
7272 = expand_normal (CALL_EXPR_ARG (exp, 0));
7274 expand_builtin_update_setjmp_buf (buf_addr);
7275 return const0_rtx;
7277 break;
7279 case BUILT_IN_TRAP:
7280 expand_builtin_trap ();
7281 return const0_rtx;
7283 case BUILT_IN_UNREACHABLE:
7284 expand_builtin_unreachable ();
7285 return const0_rtx;
7287 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7288 case BUILT_IN_SIGNBITD32:
7289 case BUILT_IN_SIGNBITD64:
7290 case BUILT_IN_SIGNBITD128:
7291 target = expand_builtin_signbit (exp, target);
7292 if (target)
7293 return target;
7294 break;
7296 /* Various hooks for the DWARF 2 __throw routine. */
7297 case BUILT_IN_UNWIND_INIT:
7298 expand_builtin_unwind_init ();
7299 return const0_rtx;
7300 case BUILT_IN_DWARF_CFA:
7301 return virtual_cfa_rtx;
7302 #ifdef DWARF2_UNWIND_INFO
7303 case BUILT_IN_DWARF_SP_COLUMN:
7304 return expand_builtin_dwarf_sp_column ();
7305 case BUILT_IN_INIT_DWARF_REG_SIZES:
7306 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7307 return const0_rtx;
7308 #endif
7309 case BUILT_IN_FROB_RETURN_ADDR:
7310 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7311 case BUILT_IN_EXTRACT_RETURN_ADDR:
7312 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7313 case BUILT_IN_EH_RETURN:
7314 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7315 CALL_EXPR_ARG (exp, 1));
7316 return const0_rtx;
7317 case BUILT_IN_EH_RETURN_DATA_REGNO:
7318 return expand_builtin_eh_return_data_regno (exp);
7319 case BUILT_IN_EXTEND_POINTER:
7320 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7321 case BUILT_IN_EH_POINTER:
7322 return expand_builtin_eh_pointer (exp);
7323 case BUILT_IN_EH_FILTER:
7324 return expand_builtin_eh_filter (exp);
7325 case BUILT_IN_EH_COPY_VALUES:
7326 return expand_builtin_eh_copy_values (exp);
7328 case BUILT_IN_VA_START:
7329 return expand_builtin_va_start (exp);
7330 case BUILT_IN_VA_END:
7331 return expand_builtin_va_end (exp);
7332 case BUILT_IN_VA_COPY:
7333 return expand_builtin_va_copy (exp);
7334 case BUILT_IN_EXPECT:
7335 return expand_builtin_expect (exp, target);
7336 case BUILT_IN_ASSUME_ALIGNED:
7337 return expand_builtin_assume_aligned (exp, target);
7338 case BUILT_IN_PREFETCH:
7339 expand_builtin_prefetch (exp);
7340 return const0_rtx;
7342 case BUILT_IN_INIT_TRAMPOLINE:
7343 return expand_builtin_init_trampoline (exp, true);
7344 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7345 return expand_builtin_init_trampoline (exp, false);
7346 case BUILT_IN_ADJUST_TRAMPOLINE:
7347 return expand_builtin_adjust_trampoline (exp);
7349 case BUILT_IN_INIT_DESCRIPTOR:
7350 return expand_builtin_init_descriptor (exp);
7351 case BUILT_IN_ADJUST_DESCRIPTOR:
7352 return expand_builtin_adjust_descriptor (exp);
7354 case BUILT_IN_FORK:
7355 case BUILT_IN_EXECL:
7356 case BUILT_IN_EXECV:
7357 case BUILT_IN_EXECLP:
7358 case BUILT_IN_EXECLE:
7359 case BUILT_IN_EXECVP:
7360 case BUILT_IN_EXECVE:
7361 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7362 if (target)
7363 return target;
7364 break;
7366 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7367 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7368 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7369 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7370 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7371 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7372 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7373 if (target)
7374 return target;
7375 break;
7377 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7378 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7379 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7380 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7381 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7382 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7383 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7384 if (target)
7385 return target;
7386 break;
7388 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7389 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7390 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7391 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7392 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7393 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7394 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7395 if (target)
7396 return target;
7397 break;
7399 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7400 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7401 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7402 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7403 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7404 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7405 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7406 if (target)
7407 return target;
7408 break;
7410 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7411 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7412 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7413 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7414 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7415 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7416 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7417 if (target)
7418 return target;
7419 break;
7421 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7422 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7423 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7424 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7425 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7426 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7427 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7428 if (target)
7429 return target;
7430 break;
7432 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7433 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7434 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7435 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7436 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7437 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7438 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7439 if (target)
7440 return target;
7441 break;
7443 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7444 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7445 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7446 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7447 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7448 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7449 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7450 if (target)
7451 return target;
7452 break;
7454 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7455 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7456 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7457 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7458 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7459 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7460 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7461 if (target)
7462 return target;
7463 break;
7465 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7466 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7467 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7468 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7469 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7470 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7471 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7472 if (target)
7473 return target;
7474 break;
7476 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7477 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7478 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7479 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7480 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7481 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7482 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7483 if (target)
7484 return target;
7485 break;
7487 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7488 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7489 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7490 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7491 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7492 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7493 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7494 if (target)
7495 return target;
7496 break;
7498 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7499 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7500 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7501 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7502 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7503 if (mode == VOIDmode)
7504 mode = TYPE_MODE (boolean_type_node);
7505 if (!target || !register_operand (target, mode))
7506 target = gen_reg_rtx (mode);
7508 mode = get_builtin_sync_mode
7509 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7510 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7511 if (target)
7512 return target;
7513 break;
7515 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7516 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7517 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7518 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7519 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7520 mode = get_builtin_sync_mode
7521 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7522 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7523 if (target)
7524 return target;
7525 break;
7527 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7528 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7529 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7530 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7531 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7532 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7533 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7534 if (target)
7535 return target;
7536 break;
7538 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7539 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7540 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7541 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7542 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7543 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7544 expand_builtin_sync_lock_release (mode, exp);
7545 return const0_rtx;
7547 case BUILT_IN_SYNC_SYNCHRONIZE:
7548 expand_builtin_sync_synchronize ();
7549 return const0_rtx;
7551 case BUILT_IN_ATOMIC_EXCHANGE_1:
7552 case BUILT_IN_ATOMIC_EXCHANGE_2:
7553 case BUILT_IN_ATOMIC_EXCHANGE_4:
7554 case BUILT_IN_ATOMIC_EXCHANGE_8:
7555 case BUILT_IN_ATOMIC_EXCHANGE_16:
7556 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7557 target = expand_builtin_atomic_exchange (mode, exp, target);
7558 if (target)
7559 return target;
7560 break;
7562 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7563 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7564 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7565 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7566 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7568 unsigned int nargs, z;
7569 vec<tree, va_gc> *vec;
7571 mode =
7572 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7573 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7574 if (target)
7575 return target;
7577 /* If this is turned into an external library call, the weak parameter
7578 must be dropped to match the expected parameter list. */
7579 nargs = call_expr_nargs (exp);
7580 vec_alloc (vec, nargs - 1);
7581 for (z = 0; z < 3; z++)
7582 vec->quick_push (CALL_EXPR_ARG (exp, z));
7583 /* Skip the boolean weak parameter. */
7584 for (z = 4; z < 6; z++)
7585 vec->quick_push (CALL_EXPR_ARG (exp, z));
7586 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7587 break;
7590 case BUILT_IN_ATOMIC_LOAD_1:
7591 case BUILT_IN_ATOMIC_LOAD_2:
7592 case BUILT_IN_ATOMIC_LOAD_4:
7593 case BUILT_IN_ATOMIC_LOAD_8:
7594 case BUILT_IN_ATOMIC_LOAD_16:
7595 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7596 target = expand_builtin_atomic_load (mode, exp, target);
7597 if (target)
7598 return target;
7599 break;
7601 case BUILT_IN_ATOMIC_STORE_1:
7602 case BUILT_IN_ATOMIC_STORE_2:
7603 case BUILT_IN_ATOMIC_STORE_4:
7604 case BUILT_IN_ATOMIC_STORE_8:
7605 case BUILT_IN_ATOMIC_STORE_16:
7606 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7607 target = expand_builtin_atomic_store (mode, exp);
7608 if (target)
7609 return const0_rtx;
7610 break;
7612 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7613 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7614 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7615 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7616 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7618 enum built_in_function lib;
7619 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7620 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7621 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7622 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7623 ignore, lib);
7624 if (target)
7625 return target;
7626 break;
7628 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7629 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7630 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7631 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7632 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7634 enum built_in_function lib;
7635 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7636 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7637 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7638 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7639 ignore, lib);
7640 if (target)
7641 return target;
7642 break;
7644 case BUILT_IN_ATOMIC_AND_FETCH_1:
7645 case BUILT_IN_ATOMIC_AND_FETCH_2:
7646 case BUILT_IN_ATOMIC_AND_FETCH_4:
7647 case BUILT_IN_ATOMIC_AND_FETCH_8:
7648 case BUILT_IN_ATOMIC_AND_FETCH_16:
7650 enum built_in_function lib;
7651 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7652 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7653 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7654 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7655 ignore, lib);
7656 if (target)
7657 return target;
7658 break;
7660 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7661 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7662 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7663 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7664 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7666 enum built_in_function lib;
7667 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7668 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7669 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7670 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7671 ignore, lib);
7672 if (target)
7673 return target;
7674 break;
7676 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7677 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7678 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7679 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7680 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7682 enum built_in_function lib;
7683 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7684 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7685 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7686 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7687 ignore, lib);
7688 if (target)
7689 return target;
7690 break;
7692 case BUILT_IN_ATOMIC_OR_FETCH_1:
7693 case BUILT_IN_ATOMIC_OR_FETCH_2:
7694 case BUILT_IN_ATOMIC_OR_FETCH_4:
7695 case BUILT_IN_ATOMIC_OR_FETCH_8:
7696 case BUILT_IN_ATOMIC_OR_FETCH_16:
7698 enum built_in_function lib;
7699 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7700 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7701 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7702 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7703 ignore, lib);
7704 if (target)
7705 return target;
7706 break;
7708 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7709 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7710 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7711 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7712 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7713 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7714 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7715 ignore, BUILT_IN_NONE);
7716 if (target)
7717 return target;
7718 break;
7720 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7721 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7722 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7723 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7724 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7725 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7726 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7727 ignore, BUILT_IN_NONE);
7728 if (target)
7729 return target;
7730 break;
7732 case BUILT_IN_ATOMIC_FETCH_AND_1:
7733 case BUILT_IN_ATOMIC_FETCH_AND_2:
7734 case BUILT_IN_ATOMIC_FETCH_AND_4:
7735 case BUILT_IN_ATOMIC_FETCH_AND_8:
7736 case BUILT_IN_ATOMIC_FETCH_AND_16:
7737 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7738 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7739 ignore, BUILT_IN_NONE);
7740 if (target)
7741 return target;
7742 break;
7744 case BUILT_IN_ATOMIC_FETCH_NAND_1:
7745 case BUILT_IN_ATOMIC_FETCH_NAND_2:
7746 case BUILT_IN_ATOMIC_FETCH_NAND_4:
7747 case BUILT_IN_ATOMIC_FETCH_NAND_8:
7748 case BUILT_IN_ATOMIC_FETCH_NAND_16:
7749 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
7750 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
7751 ignore, BUILT_IN_NONE);
7752 if (target)
7753 return target;
7754 break;
7756 case BUILT_IN_ATOMIC_FETCH_XOR_1:
7757 case BUILT_IN_ATOMIC_FETCH_XOR_2:
7758 case BUILT_IN_ATOMIC_FETCH_XOR_4:
7759 case BUILT_IN_ATOMIC_FETCH_XOR_8:
7760 case BUILT_IN_ATOMIC_FETCH_XOR_16:
7761 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
7762 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
7763 ignore, BUILT_IN_NONE);
7764 if (target)
7765 return target;
7766 break;
7768 case BUILT_IN_ATOMIC_FETCH_OR_1:
7769 case BUILT_IN_ATOMIC_FETCH_OR_2:
7770 case BUILT_IN_ATOMIC_FETCH_OR_4:
7771 case BUILT_IN_ATOMIC_FETCH_OR_8:
7772 case BUILT_IN_ATOMIC_FETCH_OR_16:
7773 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
7774 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
7775 ignore, BUILT_IN_NONE);
7776 if (target)
7777 return target;
7778 break;
7780 case BUILT_IN_ATOMIC_TEST_AND_SET:
7781 return expand_builtin_atomic_test_and_set (exp, target);
7783 case BUILT_IN_ATOMIC_CLEAR:
7784 return expand_builtin_atomic_clear (exp);
7786 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
7787 return expand_builtin_atomic_always_lock_free (exp);
7789 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
7790 target = expand_builtin_atomic_is_lock_free (exp);
7791 if (target)
7792 return target;
7793 break;
7795 case BUILT_IN_ATOMIC_THREAD_FENCE:
7796 expand_builtin_atomic_thread_fence (exp);
7797 return const0_rtx;
7799 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
7800 expand_builtin_atomic_signal_fence (exp);
7801 return const0_rtx;
7803 case BUILT_IN_OBJECT_SIZE:
7804 return expand_builtin_object_size (exp);
7806 case BUILT_IN_MEMCPY_CHK:
7807 case BUILT_IN_MEMPCPY_CHK:
7808 case BUILT_IN_MEMMOVE_CHK:
7809 case BUILT_IN_MEMSET_CHK:
7810 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7811 if (target)
7812 return target;
7813 break;
7815 case BUILT_IN_STRCPY_CHK:
7816 case BUILT_IN_STPCPY_CHK:
7817 case BUILT_IN_STRNCPY_CHK:
7818 case BUILT_IN_STPNCPY_CHK:
7819 case BUILT_IN_STRCAT_CHK:
7820 case BUILT_IN_STRNCAT_CHK:
7821 case BUILT_IN_SNPRINTF_CHK:
7822 case BUILT_IN_VSNPRINTF_CHK:
7823 maybe_emit_chk_warning (exp, fcode);
7824 break;
7826 case BUILT_IN_SPRINTF_CHK:
7827 case BUILT_IN_VSPRINTF_CHK:
7828 maybe_emit_sprintf_chk_warning (exp, fcode);
7829 break;
7831 case BUILT_IN_FREE:
7832 if (warn_free_nonheap_object)
7833 maybe_emit_free_warning (exp);
7834 break;
7836 case BUILT_IN_THREAD_POINTER:
7837 return expand_builtin_thread_pointer (exp, target);
7839 case BUILT_IN_SET_THREAD_POINTER:
7840 expand_builtin_set_thread_pointer (exp);
7841 return const0_rtx;
7843 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
7844 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
7845 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
7846 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
7847 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7848 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7849 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7850 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7851 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7852 case BUILT_IN_CHKP_GET_PTR_LBOUND:
7853 case BUILT_IN_CHKP_GET_PTR_UBOUND:
7854 /* We allow user CHKP builtins if Pointer Bounds
7855 Checker is off. */
7856 if (!chkp_function_instrumented_p (current_function_decl))
7858 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7859 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7860 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7861 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7862 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7863 return expand_normal (CALL_EXPR_ARG (exp, 0));
7864 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7865 return expand_normal (size_zero_node);
7866 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7867 return expand_normal (size_int (-1));
7868 else
7869 return const0_rtx;
7871 /* FALLTHROUGH */
7873 case BUILT_IN_CHKP_BNDMK:
7874 case BUILT_IN_CHKP_BNDSTX:
7875 case BUILT_IN_CHKP_BNDCL:
7876 case BUILT_IN_CHKP_BNDCU:
7877 case BUILT_IN_CHKP_BNDLDX:
7878 case BUILT_IN_CHKP_BNDRET:
7879 case BUILT_IN_CHKP_INTERSECT:
7880 case BUILT_IN_CHKP_NARROW:
7881 case BUILT_IN_CHKP_EXTRACT_LOWER:
7882 case BUILT_IN_CHKP_EXTRACT_UPPER:
7883 /* Software implementation of Pointer Bounds Checker is NYI.
7884 Target support is required. */
7885 error ("Your target platform does not support -fcheck-pointer-bounds");
7886 break;
7888 case BUILT_IN_ACC_ON_DEVICE:
7889 /* Do library call, if we failed to expand the builtin when
7890 folding. */
7891 break;
7893 case BUILT_IN_GOACC_PARLEVEL_ID:
7894 case BUILT_IN_GOACC_PARLEVEL_SIZE:
7895 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
7897 default: /* just do library call, if unknown builtin */
7898 break;
7901 /* The switch statement above can drop through to cause the function
7902 to be called normally. */
7903 return expand_call (exp, target, ignore);
7906 /* Similar to expand_builtin but is used for instrumented calls. */
7909 expand_builtin_with_bounds (tree exp, rtx target,
7910 rtx subtarget ATTRIBUTE_UNUSED,
7911 machine_mode mode, int ignore)
7913 tree fndecl = get_callee_fndecl (exp);
7914 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7916 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7918 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7919 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7921 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7922 && fcode < END_CHKP_BUILTINS);
7924 switch (fcode)
7926 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7927 target = expand_builtin_memcpy_with_bounds (exp, target);
7928 if (target)
7929 return target;
7930 break;
7932 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7933 target = expand_builtin_mempcpy_with_bounds (exp, target);
7934 if (target)
7935 return target;
7936 break;
7938 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7939 target = expand_builtin_memset_with_bounds (exp, target, mode);
7940 if (target)
7941 return target;
7942 break;
7944 case BUILT_IN_MEMCPY_CHKP:
7945 case BUILT_IN_MEMMOVE_CHKP:
7946 case BUILT_IN_MEMPCPY_CHKP:
7947 if (call_expr_nargs (exp) > 3)
7949 /* memcpy_chkp (void *dst, size_t dstbnd,
7950 const void *src, size_t srcbnd, size_t n)
7951 and others take a pointer bound argument just after each
7952 pointer argument. */
7953 tree dest = CALL_EXPR_ARG (exp, 0);
7954 tree src = CALL_EXPR_ARG (exp, 2);
7955 tree len = CALL_EXPR_ARG (exp, 4);
7957 check_memop_access (exp, dest, src, len);
7958 break;
7961 default:
7962 break;
7965 /* The switch statement above can drop through to cause the function
7966 to be called normally. */
7967 return expand_call (exp, target, ignore);
7970 /* Determine whether a tree node represents a call to a built-in
7971 function. If the tree T is a call to a built-in function with
7972 the right number of arguments of the appropriate types, return
7973 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7974 Otherwise the return value is END_BUILTINS. */
7976 enum built_in_function
7977 builtin_mathfn_code (const_tree t)
7979 const_tree fndecl, arg, parmlist;
7980 const_tree argtype, parmtype;
7981 const_call_expr_arg_iterator iter;
7983 if (TREE_CODE (t) != CALL_EXPR)
7984 return END_BUILTINS;
7986 fndecl = get_callee_fndecl (t);
7987 if (fndecl == NULL_TREE
7988 || TREE_CODE (fndecl) != FUNCTION_DECL
7989 || ! DECL_BUILT_IN (fndecl)
7990 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7991 return END_BUILTINS;
7993 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7994 init_const_call_expr_arg_iterator (t, &iter);
7995 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7997 /* If a function doesn't take a variable number of arguments,
7998 the last element in the list will have type `void'. */
7999 parmtype = TREE_VALUE (parmlist);
8000 if (VOID_TYPE_P (parmtype))
8002 if (more_const_call_expr_args_p (&iter))
8003 return END_BUILTINS;
8004 return DECL_FUNCTION_CODE (fndecl);
8007 if (! more_const_call_expr_args_p (&iter))
8008 return END_BUILTINS;
8010 arg = next_const_call_expr_arg (&iter);
8011 argtype = TREE_TYPE (arg);
8013 if (SCALAR_FLOAT_TYPE_P (parmtype))
8015 if (! SCALAR_FLOAT_TYPE_P (argtype))
8016 return END_BUILTINS;
8018 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8020 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8021 return END_BUILTINS;
8023 else if (POINTER_TYPE_P (parmtype))
8025 if (! POINTER_TYPE_P (argtype))
8026 return END_BUILTINS;
8028 else if (INTEGRAL_TYPE_P (parmtype))
8030 if (! INTEGRAL_TYPE_P (argtype))
8031 return END_BUILTINS;
8033 else
8034 return END_BUILTINS;
8037 /* Variable-length argument list. */
8038 return DECL_FUNCTION_CODE (fndecl);
8041 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8042 evaluate to a constant. */
8044 static tree
8045 fold_builtin_constant_p (tree arg)
8047 /* We return 1 for a numeric type that's known to be a constant
8048 value at compile-time or for an aggregate type that's a
8049 literal constant. */
8050 STRIP_NOPS (arg);
8052 /* If we know this is a constant, emit the constant of one. */
8053 if (CONSTANT_CLASS_P (arg)
8054 || (TREE_CODE (arg) == CONSTRUCTOR
8055 && TREE_CONSTANT (arg)))
8056 return integer_one_node;
8057 if (TREE_CODE (arg) == ADDR_EXPR)
8059 tree op = TREE_OPERAND (arg, 0);
8060 if (TREE_CODE (op) == STRING_CST
8061 || (TREE_CODE (op) == ARRAY_REF
8062 && integer_zerop (TREE_OPERAND (op, 1))
8063 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8064 return integer_one_node;
8067 /* If this expression has side effects, show we don't know it to be a
8068 constant. Likewise if it's a pointer or aggregate type since in
8069 those case we only want literals, since those are only optimized
8070 when generating RTL, not later.
8071 And finally, if we are compiling an initializer, not code, we
8072 need to return a definite result now; there's not going to be any
8073 more optimization done. */
8074 if (TREE_SIDE_EFFECTS (arg)
8075 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8076 || POINTER_TYPE_P (TREE_TYPE (arg))
8077 || cfun == 0
8078 || folding_initializer
8079 || force_folding_builtin_constant_p)
8080 return integer_zero_node;
8082 return NULL_TREE;
8085 /* Create builtin_expect with PRED and EXPECTED as its arguments and
8086 return it as a truthvalue. */
8088 static tree
8089 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
8090 tree predictor)
8092 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
8094 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
8095 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8096 ret_type = TREE_TYPE (TREE_TYPE (fn));
8097 pred_type = TREE_VALUE (arg_types);
8098 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8100 pred = fold_convert_loc (loc, pred_type, pred);
8101 expected = fold_convert_loc (loc, expected_type, expected);
8102 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8103 predictor);
8105 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8106 build_int_cst (ret_type, 0));
8109 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
8110 NULL_TREE if no simplification is possible. */
8112 tree
8113 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
8115 tree inner, fndecl, inner_arg0;
8116 enum tree_code code;
8118 /* Distribute the expected value over short-circuiting operators.
8119 See through the cast from truthvalue_type_node to long. */
8120 inner_arg0 = arg0;
8121 while (CONVERT_EXPR_P (inner_arg0)
8122 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8123 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8124 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8126 /* If this is a builtin_expect within a builtin_expect keep the
8127 inner one. See through a comparison against a constant. It
8128 might have been added to create a thruthvalue. */
8129 inner = inner_arg0;
8131 if (COMPARISON_CLASS_P (inner)
8132 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8133 inner = TREE_OPERAND (inner, 0);
8135 if (TREE_CODE (inner) == CALL_EXPR
8136 && (fndecl = get_callee_fndecl (inner))
8137 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
8138 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
8139 return arg0;
8141 inner = inner_arg0;
8142 code = TREE_CODE (inner);
8143 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8145 tree op0 = TREE_OPERAND (inner, 0);
8146 tree op1 = TREE_OPERAND (inner, 1);
8147 arg1 = save_expr (arg1);
8149 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
8150 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
8151 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8153 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
8156 /* If the argument isn't invariant then there's nothing else we can do. */
8157 if (!TREE_CONSTANT (inner_arg0))
8158 return NULL_TREE;
8160 /* If we expect that a comparison against the argument will fold to
8161 a constant return the constant. In practice, this means a true
8162 constant or the address of a non-weak symbol. */
8163 inner = inner_arg0;
8164 STRIP_NOPS (inner);
8165 if (TREE_CODE (inner) == ADDR_EXPR)
8169 inner = TREE_OPERAND (inner, 0);
8171 while (TREE_CODE (inner) == COMPONENT_REF
8172 || TREE_CODE (inner) == ARRAY_REF);
8173 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8174 return NULL_TREE;
8177 /* Otherwise, ARG0 already has the proper type for the return value. */
8178 return arg0;
8181 /* Fold a call to __builtin_classify_type with argument ARG. */
8183 static tree
8184 fold_builtin_classify_type (tree arg)
8186 if (arg == 0)
8187 return build_int_cst (integer_type_node, no_type_class);
8189 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8192 /* Fold a call to __builtin_strlen with argument ARG. */
8194 static tree
8195 fold_builtin_strlen (location_t loc, tree type, tree arg)
8197 if (!validate_arg (arg, POINTER_TYPE))
8198 return NULL_TREE;
8199 else
8201 tree len = c_strlen (arg, 0);
8203 if (len)
8204 return fold_convert_loc (loc, type, len);
8206 return NULL_TREE;
8210 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8212 static tree
8213 fold_builtin_inf (location_t loc, tree type, int warn)
8215 REAL_VALUE_TYPE real;
8217 /* __builtin_inff is intended to be usable to define INFINITY on all
8218 targets. If an infinity is not available, INFINITY expands "to a
8219 positive constant of type float that overflows at translation
8220 time", footnote "In this case, using INFINITY will violate the
8221 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8222 Thus we pedwarn to ensure this constraint violation is
8223 diagnosed. */
8224 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8225 pedwarn (loc, 0, "target format does not support infinity");
8227 real_inf (&real);
8228 return build_real (type, real);
8231 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8232 NULL_TREE if no simplification can be made. */
8234 static tree
8235 fold_builtin_sincos (location_t loc,
8236 tree arg0, tree arg1, tree arg2)
8238 tree type;
8239 tree fndecl, call = NULL_TREE;
8241 if (!validate_arg (arg0, REAL_TYPE)
8242 || !validate_arg (arg1, POINTER_TYPE)
8243 || !validate_arg (arg2, POINTER_TYPE))
8244 return NULL_TREE;
8246 type = TREE_TYPE (arg0);
8248 /* Calculate the result when the argument is a constant. */
8249 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8250 if (fn == END_BUILTINS)
8251 return NULL_TREE;
8253 /* Canonicalize sincos to cexpi. */
8254 if (TREE_CODE (arg0) == REAL_CST)
8256 tree complex_type = build_complex_type (type);
8257 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8259 if (!call)
8261 if (!targetm.libc_has_function (function_c99_math_complex)
8262 || !builtin_decl_implicit_p (fn))
8263 return NULL_TREE;
8264 fndecl = builtin_decl_explicit (fn);
8265 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8266 call = builtin_save_expr (call);
8269 tree ptype = build_pointer_type (type);
8270 arg1 = fold_convert (ptype, arg1);
8271 arg2 = fold_convert (ptype, arg2);
8272 return build2 (COMPOUND_EXPR, void_type_node,
8273 build2 (MODIFY_EXPR, void_type_node,
8274 build_fold_indirect_ref_loc (loc, arg1),
8275 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8276 build2 (MODIFY_EXPR, void_type_node,
8277 build_fold_indirect_ref_loc (loc, arg2),
8278 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8281 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8282 Return NULL_TREE if no simplification can be made. */
8284 static tree
8285 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8287 if (!validate_arg (arg1, POINTER_TYPE)
8288 || !validate_arg (arg2, POINTER_TYPE)
8289 || !validate_arg (len, INTEGER_TYPE))
8290 return NULL_TREE;
8292 /* If the LEN parameter is zero, return zero. */
8293 if (integer_zerop (len))
8294 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8295 arg1, arg2);
8297 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8298 if (operand_equal_p (arg1, arg2, 0))
8299 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8301 /* If len parameter is one, return an expression corresponding to
8302 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8303 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8305 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8306 tree cst_uchar_ptr_node
8307 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8309 tree ind1
8310 = fold_convert_loc (loc, integer_type_node,
8311 build1 (INDIRECT_REF, cst_uchar_node,
8312 fold_convert_loc (loc,
8313 cst_uchar_ptr_node,
8314 arg1)));
8315 tree ind2
8316 = fold_convert_loc (loc, integer_type_node,
8317 build1 (INDIRECT_REF, cst_uchar_node,
8318 fold_convert_loc (loc,
8319 cst_uchar_ptr_node,
8320 arg2)));
8321 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8324 return NULL_TREE;
8327 /* Fold a call to builtin isascii with argument ARG. */
8329 static tree
8330 fold_builtin_isascii (location_t loc, tree arg)
8332 if (!validate_arg (arg, INTEGER_TYPE))
8333 return NULL_TREE;
8334 else
8336 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8337 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8338 build_int_cst (integer_type_node,
8339 ~ (unsigned HOST_WIDE_INT) 0x7f));
8340 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8341 arg, integer_zero_node);
8345 /* Fold a call to builtin toascii with argument ARG. */
8347 static tree
8348 fold_builtin_toascii (location_t loc, tree arg)
8350 if (!validate_arg (arg, INTEGER_TYPE))
8351 return NULL_TREE;
8353 /* Transform toascii(c) -> (c & 0x7f). */
8354 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8355 build_int_cst (integer_type_node, 0x7f));
8358 /* Fold a call to builtin isdigit with argument ARG. */
8360 static tree
8361 fold_builtin_isdigit (location_t loc, tree arg)
8363 if (!validate_arg (arg, INTEGER_TYPE))
8364 return NULL_TREE;
8365 else
8367 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8368 /* According to the C standard, isdigit is unaffected by locale.
8369 However, it definitely is affected by the target character set. */
8370 unsigned HOST_WIDE_INT target_digit0
8371 = lang_hooks.to_target_charset ('0');
8373 if (target_digit0 == 0)
8374 return NULL_TREE;
8376 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8377 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8378 build_int_cst (unsigned_type_node, target_digit0));
8379 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8380 build_int_cst (unsigned_type_node, 9));
8384 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8386 static tree
8387 fold_builtin_fabs (location_t loc, tree arg, tree type)
8389 if (!validate_arg (arg, REAL_TYPE))
8390 return NULL_TREE;
8392 arg = fold_convert_loc (loc, type, arg);
8393 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8396 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8398 static tree
8399 fold_builtin_abs (location_t loc, tree arg, tree type)
8401 if (!validate_arg (arg, INTEGER_TYPE))
8402 return NULL_TREE;
8404 arg = fold_convert_loc (loc, type, arg);
8405 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8408 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8410 static tree
8411 fold_builtin_carg (location_t loc, tree arg, tree type)
8413 if (validate_arg (arg, COMPLEX_TYPE)
8414 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8416 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8418 if (atan2_fn)
8420 tree new_arg = builtin_save_expr (arg);
8421 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8422 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8423 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8427 return NULL_TREE;
8430 /* Fold a call to builtin frexp, we can assume the base is 2. */
8432 static tree
8433 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8435 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8436 return NULL_TREE;
8438 STRIP_NOPS (arg0);
8440 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8441 return NULL_TREE;
8443 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8445 /* Proceed if a valid pointer type was passed in. */
8446 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8448 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8449 tree frac, exp;
8451 switch (value->cl)
8453 case rvc_zero:
8454 /* For +-0, return (*exp = 0, +-0). */
8455 exp = integer_zero_node;
8456 frac = arg0;
8457 break;
8458 case rvc_nan:
8459 case rvc_inf:
8460 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8461 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8462 case rvc_normal:
8464 /* Since the frexp function always expects base 2, and in
8465 GCC normalized significands are already in the range
8466 [0.5, 1.0), we have exactly what frexp wants. */
8467 REAL_VALUE_TYPE frac_rvt = *value;
8468 SET_REAL_EXP (&frac_rvt, 0);
8469 frac = build_real (rettype, frac_rvt);
8470 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8472 break;
8473 default:
8474 gcc_unreachable ();
8477 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8478 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8479 TREE_SIDE_EFFECTS (arg1) = 1;
8480 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8483 return NULL_TREE;
8486 /* Fold a call to builtin modf. */
8488 static tree
8489 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8491 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8492 return NULL_TREE;
8494 STRIP_NOPS (arg0);
8496 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8497 return NULL_TREE;
8499 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8501 /* Proceed if a valid pointer type was passed in. */
8502 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8504 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8505 REAL_VALUE_TYPE trunc, frac;
8507 switch (value->cl)
8509 case rvc_nan:
8510 case rvc_zero:
8511 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8512 trunc = frac = *value;
8513 break;
8514 case rvc_inf:
8515 /* For +-Inf, return (*arg1 = arg0, +-0). */
8516 frac = dconst0;
8517 frac.sign = value->sign;
8518 trunc = *value;
8519 break;
8520 case rvc_normal:
8521 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8522 real_trunc (&trunc, VOIDmode, value);
8523 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8524 /* If the original number was negative and already
8525 integral, then the fractional part is -0.0. */
8526 if (value->sign && frac.cl == rvc_zero)
8527 frac.sign = value->sign;
8528 break;
8531 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8532 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8533 build_real (rettype, trunc));
8534 TREE_SIDE_EFFECTS (arg1) = 1;
8535 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8536 build_real (rettype, frac));
8539 return NULL_TREE;
8542 /* Given a location LOC, an interclass builtin function decl FNDECL
8543 and its single argument ARG, return an folded expression computing
8544 the same, or NULL_TREE if we either couldn't or didn't want to fold
8545 (the latter happen if there's an RTL instruction available). */
8547 static tree
8548 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8550 machine_mode mode;
8552 if (!validate_arg (arg, REAL_TYPE))
8553 return NULL_TREE;
8555 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8556 return NULL_TREE;
8558 mode = TYPE_MODE (TREE_TYPE (arg));
8560 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8562 /* If there is no optab, try generic code. */
8563 switch (DECL_FUNCTION_CODE (fndecl))
8565 tree result;
8567 CASE_FLT_FN (BUILT_IN_ISINF):
8569 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8570 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8571 tree type = TREE_TYPE (arg);
8572 REAL_VALUE_TYPE r;
8573 char buf[128];
8575 if (is_ibm_extended)
8577 /* NaN and Inf are encoded in the high-order double value
8578 only. The low-order value is not significant. */
8579 type = double_type_node;
8580 mode = DFmode;
8581 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8583 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8584 real_from_string (&r, buf);
8585 result = build_call_expr (isgr_fn, 2,
8586 fold_build1_loc (loc, ABS_EXPR, type, arg),
8587 build_real (type, r));
8588 return result;
8590 CASE_FLT_FN (BUILT_IN_FINITE):
8591 case BUILT_IN_ISFINITE:
8593 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8594 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8595 tree type = TREE_TYPE (arg);
8596 REAL_VALUE_TYPE r;
8597 char buf[128];
8599 if (is_ibm_extended)
8601 /* NaN and Inf are encoded in the high-order double value
8602 only. The low-order value is not significant. */
8603 type = double_type_node;
8604 mode = DFmode;
8605 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8607 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8608 real_from_string (&r, buf);
8609 result = build_call_expr (isle_fn, 2,
8610 fold_build1_loc (loc, ABS_EXPR, type, arg),
8611 build_real (type, r));
8612 /*result = fold_build2_loc (loc, UNGT_EXPR,
8613 TREE_TYPE (TREE_TYPE (fndecl)),
8614 fold_build1_loc (loc, ABS_EXPR, type, arg),
8615 build_real (type, r));
8616 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8617 TREE_TYPE (TREE_TYPE (fndecl)),
8618 result);*/
8619 return result;
8621 case BUILT_IN_ISNORMAL:
8623 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8624 islessequal(fabs(x),DBL_MAX). */
8625 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8626 tree type = TREE_TYPE (arg);
8627 tree orig_arg, max_exp, min_exp;
8628 machine_mode orig_mode = mode;
8629 REAL_VALUE_TYPE rmax, rmin;
8630 char buf[128];
8632 orig_arg = arg = builtin_save_expr (arg);
8633 if (is_ibm_extended)
8635 /* Use double to test the normal range of IBM extended
8636 precision. Emin for IBM extended precision is
8637 different to emin for IEEE double, being 53 higher
8638 since the low double exponent is at least 53 lower
8639 than the high double exponent. */
8640 type = double_type_node;
8641 mode = DFmode;
8642 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8644 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8646 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8647 real_from_string (&rmax, buf);
8648 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8649 real_from_string (&rmin, buf);
8650 max_exp = build_real (type, rmax);
8651 min_exp = build_real (type, rmin);
8653 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8654 if (is_ibm_extended)
8656 /* Testing the high end of the range is done just using
8657 the high double, using the same test as isfinite().
8658 For the subnormal end of the range we first test the
8659 high double, then if its magnitude is equal to the
8660 limit of 0x1p-969, we test whether the low double is
8661 non-zero and opposite sign to the high double. */
8662 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8663 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8664 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8665 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8666 arg, min_exp);
8667 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8668 complex_double_type_node, orig_arg);
8669 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8670 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8671 tree zero = build_real (type, dconst0);
8672 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8673 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8674 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8675 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8676 fold_build3 (COND_EXPR,
8677 integer_type_node,
8678 hilt, logt, lolt));
8679 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8680 eq_min, ok_lo);
8681 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8682 gt_min, eq_min);
8684 else
8686 tree const isge_fn
8687 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8688 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8690 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8691 max_exp, min_exp);
8692 return result;
8694 default:
8695 break;
8698 return NULL_TREE;
8701 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8702 ARG is the argument for the call. */
8704 static tree
8705 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8707 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8709 if (!validate_arg (arg, REAL_TYPE))
8710 return NULL_TREE;
8712 switch (builtin_index)
8714 case BUILT_IN_ISINF:
8715 if (!HONOR_INFINITIES (arg))
8716 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8718 return NULL_TREE;
8720 case BUILT_IN_ISINF_SIGN:
8722 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8723 /* In a boolean context, GCC will fold the inner COND_EXPR to
8724 1. So e.g. "if (isinf_sign(x))" would be folded to just
8725 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8726 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8727 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8728 tree tmp = NULL_TREE;
8730 arg = builtin_save_expr (arg);
8732 if (signbit_fn && isinf_fn)
8734 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8735 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8737 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8738 signbit_call, integer_zero_node);
8739 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8740 isinf_call, integer_zero_node);
8742 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8743 integer_minus_one_node, integer_one_node);
8744 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8745 isinf_call, tmp,
8746 integer_zero_node);
8749 return tmp;
8752 case BUILT_IN_ISFINITE:
8753 if (!HONOR_NANS (arg)
8754 && !HONOR_INFINITIES (arg))
8755 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8757 return NULL_TREE;
8759 case BUILT_IN_ISNAN:
8760 if (!HONOR_NANS (arg))
8761 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8764 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8765 if (is_ibm_extended)
8767 /* NaN and Inf are encoded in the high-order double value
8768 only. The low-order value is not significant. */
8769 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8772 arg = builtin_save_expr (arg);
8773 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8775 default:
8776 gcc_unreachable ();
8780 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8781 This builtin will generate code to return the appropriate floating
8782 point classification depending on the value of the floating point
8783 number passed in. The possible return values must be supplied as
8784 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8785 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8786 one floating point argument which is "type generic". */
8788 static tree
8789 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8791 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8792 arg, type, res, tmp;
8793 machine_mode mode;
8794 REAL_VALUE_TYPE r;
8795 char buf[128];
8797 /* Verify the required arguments in the original call. */
8798 if (nargs != 6
8799 || !validate_arg (args[0], INTEGER_TYPE)
8800 || !validate_arg (args[1], INTEGER_TYPE)
8801 || !validate_arg (args[2], INTEGER_TYPE)
8802 || !validate_arg (args[3], INTEGER_TYPE)
8803 || !validate_arg (args[4], INTEGER_TYPE)
8804 || !validate_arg (args[5], REAL_TYPE))
8805 return NULL_TREE;
8807 fp_nan = args[0];
8808 fp_infinite = args[1];
8809 fp_normal = args[2];
8810 fp_subnormal = args[3];
8811 fp_zero = args[4];
8812 arg = args[5];
8813 type = TREE_TYPE (arg);
8814 mode = TYPE_MODE (type);
8815 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8817 /* fpclassify(x) ->
8818 isnan(x) ? FP_NAN :
8819 (fabs(x) == Inf ? FP_INFINITE :
8820 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8821 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8823 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8824 build_real (type, dconst0));
8825 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8826 tmp, fp_zero, fp_subnormal);
8828 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8829 real_from_string (&r, buf);
8830 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8831 arg, build_real (type, r));
8832 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8834 if (HONOR_INFINITIES (mode))
8836 real_inf (&r);
8837 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8838 build_real (type, r));
8839 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8840 fp_infinite, res);
8843 if (HONOR_NANS (mode))
8845 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8846 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8849 return res;
8852 /* Fold a call to an unordered comparison function such as
8853 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8854 being called and ARG0 and ARG1 are the arguments for the call.
8855 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8856 the opposite of the desired result. UNORDERED_CODE is used
8857 for modes that can hold NaNs and ORDERED_CODE is used for
8858 the rest. */
8860 static tree
8861 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8862 enum tree_code unordered_code,
8863 enum tree_code ordered_code)
8865 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8866 enum tree_code code;
8867 tree type0, type1;
8868 enum tree_code code0, code1;
8869 tree cmp_type = NULL_TREE;
8871 type0 = TREE_TYPE (arg0);
8872 type1 = TREE_TYPE (arg1);
8874 code0 = TREE_CODE (type0);
8875 code1 = TREE_CODE (type1);
8877 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8878 /* Choose the wider of two real types. */
8879 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8880 ? type0 : type1;
8881 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8882 cmp_type = type0;
8883 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8884 cmp_type = type1;
8886 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8887 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8889 if (unordered_code == UNORDERED_EXPR)
8891 if (!HONOR_NANS (arg0))
8892 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8893 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8896 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8897 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8898 fold_build2_loc (loc, code, type, arg0, arg1));
8901 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8902 arithmetics if it can never overflow, or into internal functions that
8903 return both result of arithmetics and overflowed boolean flag in
8904 a complex integer result, or some other check for overflow.
8905 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8906 checking part of that. */
8908 static tree
8909 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8910 tree arg0, tree arg1, tree arg2)
8912 enum internal_fn ifn = IFN_LAST;
8913 /* The code of the expression corresponding to the type-generic
8914 built-in, or ERROR_MARK for the type-specific ones. */
8915 enum tree_code opcode = ERROR_MARK;
8916 bool ovf_only = false;
8918 switch (fcode)
8920 case BUILT_IN_ADD_OVERFLOW_P:
8921 ovf_only = true;
8922 /* FALLTHRU */
8923 case BUILT_IN_ADD_OVERFLOW:
8924 opcode = PLUS_EXPR;
8925 /* FALLTHRU */
8926 case BUILT_IN_SADD_OVERFLOW:
8927 case BUILT_IN_SADDL_OVERFLOW:
8928 case BUILT_IN_SADDLL_OVERFLOW:
8929 case BUILT_IN_UADD_OVERFLOW:
8930 case BUILT_IN_UADDL_OVERFLOW:
8931 case BUILT_IN_UADDLL_OVERFLOW:
8932 ifn = IFN_ADD_OVERFLOW;
8933 break;
8934 case BUILT_IN_SUB_OVERFLOW_P:
8935 ovf_only = true;
8936 /* FALLTHRU */
8937 case BUILT_IN_SUB_OVERFLOW:
8938 opcode = MINUS_EXPR;
8939 /* FALLTHRU */
8940 case BUILT_IN_SSUB_OVERFLOW:
8941 case BUILT_IN_SSUBL_OVERFLOW:
8942 case BUILT_IN_SSUBLL_OVERFLOW:
8943 case BUILT_IN_USUB_OVERFLOW:
8944 case BUILT_IN_USUBL_OVERFLOW:
8945 case BUILT_IN_USUBLL_OVERFLOW:
8946 ifn = IFN_SUB_OVERFLOW;
8947 break;
8948 case BUILT_IN_MUL_OVERFLOW_P:
8949 ovf_only = true;
8950 /* FALLTHRU */
8951 case BUILT_IN_MUL_OVERFLOW:
8952 opcode = MULT_EXPR;
8953 /* FALLTHRU */
8954 case BUILT_IN_SMUL_OVERFLOW:
8955 case BUILT_IN_SMULL_OVERFLOW:
8956 case BUILT_IN_SMULLL_OVERFLOW:
8957 case BUILT_IN_UMUL_OVERFLOW:
8958 case BUILT_IN_UMULL_OVERFLOW:
8959 case BUILT_IN_UMULLL_OVERFLOW:
8960 ifn = IFN_MUL_OVERFLOW;
8961 break;
8962 default:
8963 gcc_unreachable ();
8966 /* For the "generic" overloads, the first two arguments can have different
8967 types and the last argument determines the target type to use to check
8968 for overflow. The arguments of the other overloads all have the same
8969 type. */
8970 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
8972 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8973 arguments are constant, attempt to fold the built-in call into a constant
8974 expression indicating whether or not it detected an overflow. */
8975 if (ovf_only
8976 && TREE_CODE (arg0) == INTEGER_CST
8977 && TREE_CODE (arg1) == INTEGER_CST)
8978 /* Perform the computation in the target type and check for overflow. */
8979 return omit_one_operand_loc (loc, boolean_type_node,
8980 arith_overflowed_p (opcode, type, arg0, arg1)
8981 ? boolean_true_node : boolean_false_node,
8982 arg2);
8984 tree ctype = build_complex_type (type);
8985 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8986 2, arg0, arg1);
8987 tree tgt = save_expr (call);
8988 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8989 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8990 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8992 if (ovf_only)
8993 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
8995 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8996 tree store
8997 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8998 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9001 /* Fold a call to __builtin_FILE to a constant string. */
9003 static inline tree
9004 fold_builtin_FILE (location_t loc)
9006 if (const char *fname = LOCATION_FILE (loc))
9008 /* The documentation says this builtin is equivalent to the preprocessor
9009 __FILE__ macro so it appears appropriate to use the same file prefix
9010 mappings. */
9011 fname = remap_macro_filename (fname);
9012 return build_string_literal (strlen (fname) + 1, fname);
9015 return build_string_literal (1, "");
9018 /* Fold a call to __builtin_FUNCTION to a constant string. */
9020 static inline tree
9021 fold_builtin_FUNCTION ()
9023 const char *name = "";
9025 if (current_function_decl)
9026 name = lang_hooks.decl_printable_name (current_function_decl, 0);
9028 return build_string_literal (strlen (name) + 1, name);
9031 /* Fold a call to __builtin_LINE to an integer constant. */
9033 static inline tree
9034 fold_builtin_LINE (location_t loc, tree type)
9036 return build_int_cst (type, LOCATION_LINE (loc));
9039 /* Fold a call to built-in function FNDECL with 0 arguments.
9040 This function returns NULL_TREE if no simplification was possible. */
9042 static tree
9043 fold_builtin_0 (location_t loc, tree fndecl)
9045 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9046 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9047 switch (fcode)
9049 case BUILT_IN_FILE:
9050 return fold_builtin_FILE (loc);
9052 case BUILT_IN_FUNCTION:
9053 return fold_builtin_FUNCTION ();
9055 case BUILT_IN_LINE:
9056 return fold_builtin_LINE (loc, type);
9058 CASE_FLT_FN (BUILT_IN_INF):
9059 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
9060 case BUILT_IN_INFD32:
9061 case BUILT_IN_INFD64:
9062 case BUILT_IN_INFD128:
9063 return fold_builtin_inf (loc, type, true);
9065 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9066 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
9067 return fold_builtin_inf (loc, type, false);
9069 case BUILT_IN_CLASSIFY_TYPE:
9070 return fold_builtin_classify_type (NULL_TREE);
9072 default:
9073 break;
9075 return NULL_TREE;
9078 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9079 This function returns NULL_TREE if no simplification was possible. */
9081 static tree
9082 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9084 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9085 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9087 if (TREE_CODE (arg0) == ERROR_MARK)
9088 return NULL_TREE;
9090 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
9091 return ret;
9093 switch (fcode)
9095 case BUILT_IN_CONSTANT_P:
9097 tree val = fold_builtin_constant_p (arg0);
9099 /* Gimplification will pull the CALL_EXPR for the builtin out of
9100 an if condition. When not optimizing, we'll not CSE it back.
9101 To avoid link error types of regressions, return false now. */
9102 if (!val && !optimize)
9103 val = integer_zero_node;
9105 return val;
9108 case BUILT_IN_CLASSIFY_TYPE:
9109 return fold_builtin_classify_type (arg0);
9111 case BUILT_IN_STRLEN:
9112 return fold_builtin_strlen (loc, type, arg0);
9114 CASE_FLT_FN (BUILT_IN_FABS):
9115 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9116 case BUILT_IN_FABSD32:
9117 case BUILT_IN_FABSD64:
9118 case BUILT_IN_FABSD128:
9119 return fold_builtin_fabs (loc, arg0, type);
9121 case BUILT_IN_ABS:
9122 case BUILT_IN_LABS:
9123 case BUILT_IN_LLABS:
9124 case BUILT_IN_IMAXABS:
9125 return fold_builtin_abs (loc, arg0, type);
9127 CASE_FLT_FN (BUILT_IN_CONJ):
9128 if (validate_arg (arg0, COMPLEX_TYPE)
9129 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9130 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9131 break;
9133 CASE_FLT_FN (BUILT_IN_CREAL):
9134 if (validate_arg (arg0, COMPLEX_TYPE)
9135 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9136 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9137 break;
9139 CASE_FLT_FN (BUILT_IN_CIMAG):
9140 if (validate_arg (arg0, COMPLEX_TYPE)
9141 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9142 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9143 break;
9145 CASE_FLT_FN (BUILT_IN_CARG):
9146 return fold_builtin_carg (loc, arg0, type);
9148 case BUILT_IN_ISASCII:
9149 return fold_builtin_isascii (loc, arg0);
9151 case BUILT_IN_TOASCII:
9152 return fold_builtin_toascii (loc, arg0);
9154 case BUILT_IN_ISDIGIT:
9155 return fold_builtin_isdigit (loc, arg0);
9157 CASE_FLT_FN (BUILT_IN_FINITE):
9158 case BUILT_IN_FINITED32:
9159 case BUILT_IN_FINITED64:
9160 case BUILT_IN_FINITED128:
9161 case BUILT_IN_ISFINITE:
9163 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9164 if (ret)
9165 return ret;
9166 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9169 CASE_FLT_FN (BUILT_IN_ISINF):
9170 case BUILT_IN_ISINFD32:
9171 case BUILT_IN_ISINFD64:
9172 case BUILT_IN_ISINFD128:
9174 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9175 if (ret)
9176 return ret;
9177 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9180 case BUILT_IN_ISNORMAL:
9181 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9183 case BUILT_IN_ISINF_SIGN:
9184 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9186 CASE_FLT_FN (BUILT_IN_ISNAN):
9187 case BUILT_IN_ISNAND32:
9188 case BUILT_IN_ISNAND64:
9189 case BUILT_IN_ISNAND128:
9190 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9192 case BUILT_IN_FREE:
9193 if (integer_zerop (arg0))
9194 return build_empty_stmt (loc);
9195 break;
9197 default:
9198 break;
9201 return NULL_TREE;
9205 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9206 This function returns NULL_TREE if no simplification was possible. */
9208 static tree
9209 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
9211 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9212 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9214 if (TREE_CODE (arg0) == ERROR_MARK
9215 || TREE_CODE (arg1) == ERROR_MARK)
9216 return NULL_TREE;
9218 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9219 return ret;
9221 switch (fcode)
9223 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9224 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9225 if (validate_arg (arg0, REAL_TYPE)
9226 && validate_arg (arg1, POINTER_TYPE))
9227 return do_mpfr_lgamma_r (arg0, arg1, type);
9228 break;
9230 CASE_FLT_FN (BUILT_IN_FREXP):
9231 return fold_builtin_frexp (loc, arg0, arg1, type);
9233 CASE_FLT_FN (BUILT_IN_MODF):
9234 return fold_builtin_modf (loc, arg0, arg1, type);
9236 case BUILT_IN_STRSPN:
9237 return fold_builtin_strspn (loc, arg0, arg1);
9239 case BUILT_IN_STRCSPN:
9240 return fold_builtin_strcspn (loc, arg0, arg1);
9242 case BUILT_IN_STRPBRK:
9243 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9245 case BUILT_IN_EXPECT:
9246 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
9248 case BUILT_IN_ISGREATER:
9249 return fold_builtin_unordered_cmp (loc, fndecl,
9250 arg0, arg1, UNLE_EXPR, LE_EXPR);
9251 case BUILT_IN_ISGREATEREQUAL:
9252 return fold_builtin_unordered_cmp (loc, fndecl,
9253 arg0, arg1, UNLT_EXPR, LT_EXPR);
9254 case BUILT_IN_ISLESS:
9255 return fold_builtin_unordered_cmp (loc, fndecl,
9256 arg0, arg1, UNGE_EXPR, GE_EXPR);
9257 case BUILT_IN_ISLESSEQUAL:
9258 return fold_builtin_unordered_cmp (loc, fndecl,
9259 arg0, arg1, UNGT_EXPR, GT_EXPR);
9260 case BUILT_IN_ISLESSGREATER:
9261 return fold_builtin_unordered_cmp (loc, fndecl,
9262 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9263 case BUILT_IN_ISUNORDERED:
9264 return fold_builtin_unordered_cmp (loc, fndecl,
9265 arg0, arg1, UNORDERED_EXPR,
9266 NOP_EXPR);
9268 /* We do the folding for va_start in the expander. */
9269 case BUILT_IN_VA_START:
9270 break;
9272 case BUILT_IN_OBJECT_SIZE:
9273 return fold_builtin_object_size (arg0, arg1);
9275 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9276 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9278 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9279 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9281 default:
9282 break;
9284 return NULL_TREE;
9287 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9288 and ARG2.
9289 This function returns NULL_TREE if no simplification was possible. */
9291 static tree
9292 fold_builtin_3 (location_t loc, tree fndecl,
9293 tree arg0, tree arg1, tree arg2)
9295 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9296 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9298 if (TREE_CODE (arg0) == ERROR_MARK
9299 || TREE_CODE (arg1) == ERROR_MARK
9300 || TREE_CODE (arg2) == ERROR_MARK)
9301 return NULL_TREE;
9303 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9304 arg0, arg1, arg2))
9305 return ret;
9307 switch (fcode)
9310 CASE_FLT_FN (BUILT_IN_SINCOS):
9311 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9313 CASE_FLT_FN (BUILT_IN_REMQUO):
9314 if (validate_arg (arg0, REAL_TYPE)
9315 && validate_arg (arg1, REAL_TYPE)
9316 && validate_arg (arg2, POINTER_TYPE))
9317 return do_mpfr_remquo (arg0, arg1, arg2);
9318 break;
9320 case BUILT_IN_MEMCMP:
9321 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9323 case BUILT_IN_EXPECT:
9324 return fold_builtin_expect (loc, arg0, arg1, arg2);
9326 case BUILT_IN_ADD_OVERFLOW:
9327 case BUILT_IN_SUB_OVERFLOW:
9328 case BUILT_IN_MUL_OVERFLOW:
9329 case BUILT_IN_ADD_OVERFLOW_P:
9330 case BUILT_IN_SUB_OVERFLOW_P:
9331 case BUILT_IN_MUL_OVERFLOW_P:
9332 case BUILT_IN_SADD_OVERFLOW:
9333 case BUILT_IN_SADDL_OVERFLOW:
9334 case BUILT_IN_SADDLL_OVERFLOW:
9335 case BUILT_IN_SSUB_OVERFLOW:
9336 case BUILT_IN_SSUBL_OVERFLOW:
9337 case BUILT_IN_SSUBLL_OVERFLOW:
9338 case BUILT_IN_SMUL_OVERFLOW:
9339 case BUILT_IN_SMULL_OVERFLOW:
9340 case BUILT_IN_SMULLL_OVERFLOW:
9341 case BUILT_IN_UADD_OVERFLOW:
9342 case BUILT_IN_UADDL_OVERFLOW:
9343 case BUILT_IN_UADDLL_OVERFLOW:
9344 case BUILT_IN_USUB_OVERFLOW:
9345 case BUILT_IN_USUBL_OVERFLOW:
9346 case BUILT_IN_USUBLL_OVERFLOW:
9347 case BUILT_IN_UMUL_OVERFLOW:
9348 case BUILT_IN_UMULL_OVERFLOW:
9349 case BUILT_IN_UMULLL_OVERFLOW:
9350 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9352 default:
9353 break;
9355 return NULL_TREE;
9358 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9359 arguments. IGNORE is true if the result of the
9360 function call is ignored. This function returns NULL_TREE if no
9361 simplification was possible. */
9363 tree
9364 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9366 tree ret = NULL_TREE;
9368 switch (nargs)
9370 case 0:
9371 ret = fold_builtin_0 (loc, fndecl);
9372 break;
9373 case 1:
9374 ret = fold_builtin_1 (loc, fndecl, args[0]);
9375 break;
9376 case 2:
9377 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9378 break;
9379 case 3:
9380 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9381 break;
9382 default:
9383 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9384 break;
9386 if (ret)
9388 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9389 SET_EXPR_LOCATION (ret, loc);
9390 TREE_NO_WARNING (ret) = 1;
9391 return ret;
9393 return NULL_TREE;
9396 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9397 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9398 of arguments in ARGS to be omitted. OLDNARGS is the number of
9399 elements in ARGS. */
9401 static tree
9402 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9403 int skip, tree fndecl, int n, va_list newargs)
9405 int nargs = oldnargs - skip + n;
9406 tree *buffer;
9408 if (n > 0)
9410 int i, j;
9412 buffer = XALLOCAVEC (tree, nargs);
9413 for (i = 0; i < n; i++)
9414 buffer[i] = va_arg (newargs, tree);
9415 for (j = skip; j < oldnargs; j++, i++)
9416 buffer[i] = args[j];
9418 else
9419 buffer = args + skip;
9421 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9424 /* Return true if FNDECL shouldn't be folded right now.
9425 If a built-in function has an inline attribute always_inline
9426 wrapper, defer folding it after always_inline functions have
9427 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9428 might not be performed. */
9430 bool
9431 avoid_folding_inline_builtin (tree fndecl)
9433 return (DECL_DECLARED_INLINE_P (fndecl)
9434 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9435 && cfun
9436 && !cfun->always_inline_functions_inlined
9437 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9440 /* A wrapper function for builtin folding that prevents warnings for
9441 "statement without effect" and the like, caused by removing the
9442 call node earlier than the warning is generated. */
9444 tree
9445 fold_call_expr (location_t loc, tree exp, bool ignore)
9447 tree ret = NULL_TREE;
9448 tree fndecl = get_callee_fndecl (exp);
9449 if (fndecl
9450 && TREE_CODE (fndecl) == FUNCTION_DECL
9451 && DECL_BUILT_IN (fndecl)
9452 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9453 yet. Defer folding until we see all the arguments
9454 (after inlining). */
9455 && !CALL_EXPR_VA_ARG_PACK (exp))
9457 int nargs = call_expr_nargs (exp);
9459 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9460 instead last argument is __builtin_va_arg_pack (). Defer folding
9461 even in that case, until arguments are finalized. */
9462 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9464 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9465 if (fndecl2
9466 && TREE_CODE (fndecl2) == FUNCTION_DECL
9467 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9468 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9469 return NULL_TREE;
9472 if (avoid_folding_inline_builtin (fndecl))
9473 return NULL_TREE;
9475 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9476 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9477 CALL_EXPR_ARGP (exp), ignore);
9478 else
9480 tree *args = CALL_EXPR_ARGP (exp);
9481 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9482 if (ret)
9483 return ret;
9486 return NULL_TREE;
9489 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9490 N arguments are passed in the array ARGARRAY. Return a folded
9491 expression or NULL_TREE if no simplification was possible. */
9493 tree
9494 fold_builtin_call_array (location_t loc, tree,
9495 tree fn,
9496 int n,
9497 tree *argarray)
9499 if (TREE_CODE (fn) != ADDR_EXPR)
9500 return NULL_TREE;
9502 tree fndecl = TREE_OPERAND (fn, 0);
9503 if (TREE_CODE (fndecl) == FUNCTION_DECL
9504 && DECL_BUILT_IN (fndecl))
9506 /* If last argument is __builtin_va_arg_pack (), arguments to this
9507 function are not finalized yet. Defer folding until they are. */
9508 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9510 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9511 if (fndecl2
9512 && TREE_CODE (fndecl2) == FUNCTION_DECL
9513 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9514 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9515 return NULL_TREE;
9517 if (avoid_folding_inline_builtin (fndecl))
9518 return NULL_TREE;
9519 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9520 return targetm.fold_builtin (fndecl, n, argarray, false);
9521 else
9522 return fold_builtin_n (loc, fndecl, argarray, n, false);
9525 return NULL_TREE;
9528 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9529 along with N new arguments specified as the "..." parameters. SKIP
9530 is the number of arguments in EXP to be omitted. This function is used
9531 to do varargs-to-varargs transformations. */
9533 static tree
9534 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9536 va_list ap;
9537 tree t;
9539 va_start (ap, n);
9540 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9541 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9542 va_end (ap);
9544 return t;
9547 /* Validate a single argument ARG against a tree code CODE representing
9548 a type. Return true when argument is valid. */
9550 static bool
9551 validate_arg (const_tree arg, enum tree_code code)
9553 if (!arg)
9554 return false;
9555 else if (code == POINTER_TYPE)
9556 return POINTER_TYPE_P (TREE_TYPE (arg));
9557 else if (code == INTEGER_TYPE)
9558 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9559 return code == TREE_CODE (TREE_TYPE (arg));
9562 /* This function validates the types of a function call argument list
9563 against a specified list of tree_codes. If the last specifier is a 0,
9564 that represents an ellipses, otherwise the last specifier must be a
9565 VOID_TYPE.
9567 This is the GIMPLE version of validate_arglist. Eventually we want to
9568 completely convert builtins.c to work from GIMPLEs and the tree based
9569 validate_arglist will then be removed. */
9571 bool
9572 validate_gimple_arglist (const gcall *call, ...)
9574 enum tree_code code;
9575 bool res = 0;
9576 va_list ap;
9577 const_tree arg;
9578 size_t i;
9580 va_start (ap, call);
9581 i = 0;
9585 code = (enum tree_code) va_arg (ap, int);
9586 switch (code)
9588 case 0:
9589 /* This signifies an ellipses, any further arguments are all ok. */
9590 res = true;
9591 goto end;
9592 case VOID_TYPE:
9593 /* This signifies an endlink, if no arguments remain, return
9594 true, otherwise return false. */
9595 res = (i == gimple_call_num_args (call));
9596 goto end;
9597 default:
9598 /* If no parameters remain or the parameter's code does not
9599 match the specified code, return false. Otherwise continue
9600 checking any remaining arguments. */
9601 arg = gimple_call_arg (call, i++);
9602 if (!validate_arg (arg, code))
9603 goto end;
9604 break;
9607 while (1);
9609 /* We need gotos here since we can only have one VA_CLOSE in a
9610 function. */
9611 end: ;
9612 va_end (ap);
9614 return res;
9617 /* Default target-specific builtin expander that does nothing. */
9620 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9621 rtx target ATTRIBUTE_UNUSED,
9622 rtx subtarget ATTRIBUTE_UNUSED,
9623 machine_mode mode ATTRIBUTE_UNUSED,
9624 int ignore ATTRIBUTE_UNUSED)
9626 return NULL_RTX;
9629 /* Returns true is EXP represents data that would potentially reside
9630 in a readonly section. */
9632 bool
9633 readonly_data_expr (tree exp)
9635 STRIP_NOPS (exp);
9637 if (TREE_CODE (exp) != ADDR_EXPR)
9638 return false;
9640 exp = get_base_address (TREE_OPERAND (exp, 0));
9641 if (!exp)
9642 return false;
9644 /* Make sure we call decl_readonly_section only for trees it
9645 can handle (since it returns true for everything it doesn't
9646 understand). */
9647 if (TREE_CODE (exp) == STRING_CST
9648 || TREE_CODE (exp) == CONSTRUCTOR
9649 || (VAR_P (exp) && TREE_STATIC (exp)))
9650 return decl_readonly_section (exp, 0);
9651 else
9652 return false;
9655 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9656 to the call, and TYPE is its return type.
9658 Return NULL_TREE if no simplification was possible, otherwise return the
9659 simplified form of the call as a tree.
9661 The simplified form may be a constant or other expression which
9662 computes the same value, but in a more efficient manner (including
9663 calls to other builtin functions).
9665 The call may contain arguments which need to be evaluated, but
9666 which are not useful to determine the result of the call. In
9667 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9668 COMPOUND_EXPR will be an argument which must be evaluated.
9669 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9670 COMPOUND_EXPR in the chain will contain the tree for the simplified
9671 form of the builtin function call. */
9673 static tree
9674 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9676 if (!validate_arg (s1, POINTER_TYPE)
9677 || !validate_arg (s2, POINTER_TYPE))
9678 return NULL_TREE;
9679 else
9681 tree fn;
9682 const char *p1, *p2;
9684 p2 = c_getstr (s2);
9685 if (p2 == NULL)
9686 return NULL_TREE;
9688 p1 = c_getstr (s1);
9689 if (p1 != NULL)
9691 const char *r = strpbrk (p1, p2);
9692 tree tem;
9694 if (r == NULL)
9695 return build_int_cst (TREE_TYPE (s1), 0);
9697 /* Return an offset into the constant string argument. */
9698 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9699 return fold_convert_loc (loc, type, tem);
9702 if (p2[0] == '\0')
9703 /* strpbrk(x, "") == NULL.
9704 Evaluate and ignore s1 in case it had side-effects. */
9705 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
9707 if (p2[1] != '\0')
9708 return NULL_TREE; /* Really call strpbrk. */
9710 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9711 if (!fn)
9712 return NULL_TREE;
9714 /* New argument list transforming strpbrk(s1, s2) to
9715 strchr(s1, s2[0]). */
9716 return build_call_expr_loc (loc, fn, 2, s1,
9717 build_int_cst (integer_type_node, p2[0]));
9721 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9722 to the call.
9724 Return NULL_TREE if no simplification was possible, otherwise return the
9725 simplified form of the call as a tree.
9727 The simplified form may be a constant or other expression which
9728 computes the same value, but in a more efficient manner (including
9729 calls to other builtin functions).
9731 The call may contain arguments which need to be evaluated, but
9732 which are not useful to determine the result of the call. In
9733 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9734 COMPOUND_EXPR will be an argument which must be evaluated.
9735 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9736 COMPOUND_EXPR in the chain will contain the tree for the simplified
9737 form of the builtin function call. */
9739 static tree
9740 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9742 if (!validate_arg (s1, POINTER_TYPE)
9743 || !validate_arg (s2, POINTER_TYPE))
9744 return NULL_TREE;
9745 else
9747 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9749 /* If either argument is "", return NULL_TREE. */
9750 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9751 /* Evaluate and ignore both arguments in case either one has
9752 side-effects. */
9753 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9754 s1, s2);
9755 return NULL_TREE;
9759 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9760 to the call.
9762 Return NULL_TREE if no simplification was possible, otherwise return the
9763 simplified form of the call as a tree.
9765 The simplified form may be a constant or other expression which
9766 computes the same value, but in a more efficient manner (including
9767 calls to other builtin functions).
9769 The call may contain arguments which need to be evaluated, but
9770 which are not useful to determine the result of the call. In
9771 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9772 COMPOUND_EXPR will be an argument which must be evaluated.
9773 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9774 COMPOUND_EXPR in the chain will contain the tree for the simplified
9775 form of the builtin function call. */
9777 static tree
9778 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9780 if (!validate_arg (s1, POINTER_TYPE)
9781 || !validate_arg (s2, POINTER_TYPE))
9782 return NULL_TREE;
9783 else
9785 /* If the first argument is "", return NULL_TREE. */
9786 const char *p1 = c_getstr (s1);
9787 if (p1 && *p1 == '\0')
9789 /* Evaluate and ignore argument s2 in case it has
9790 side-effects. */
9791 return omit_one_operand_loc (loc, size_type_node,
9792 size_zero_node, s2);
9795 /* If the second argument is "", return __builtin_strlen(s1). */
9796 const char *p2 = c_getstr (s2);
9797 if (p2 && *p2 == '\0')
9799 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9801 /* If the replacement _DECL isn't initialized, don't do the
9802 transformation. */
9803 if (!fn)
9804 return NULL_TREE;
9806 return build_call_expr_loc (loc, fn, 1, s1);
9808 return NULL_TREE;
9812 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9813 produced. False otherwise. This is done so that we don't output the error
9814 or warning twice or three times. */
9816 bool
9817 fold_builtin_next_arg (tree exp, bool va_start_p)
9819 tree fntype = TREE_TYPE (current_function_decl);
9820 int nargs = call_expr_nargs (exp);
9821 tree arg;
9822 /* There is good chance the current input_location points inside the
9823 definition of the va_start macro (perhaps on the token for
9824 builtin) in a system header, so warnings will not be emitted.
9825 Use the location in real source code. */
9826 source_location current_location =
9827 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9828 NULL);
9830 if (!stdarg_p (fntype))
9832 error ("%<va_start%> used in function with fixed args");
9833 return true;
9836 if (va_start_p)
9838 if (va_start_p && (nargs != 2))
9840 error ("wrong number of arguments to function %<va_start%>");
9841 return true;
9843 arg = CALL_EXPR_ARG (exp, 1);
9845 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9846 when we checked the arguments and if needed issued a warning. */
9847 else
9849 if (nargs == 0)
9851 /* Evidently an out of date version of <stdarg.h>; can't validate
9852 va_start's second argument, but can still work as intended. */
9853 warning_at (current_location,
9854 OPT_Wvarargs,
9855 "%<__builtin_next_arg%> called without an argument");
9856 return true;
9858 else if (nargs > 1)
9860 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9861 return true;
9863 arg = CALL_EXPR_ARG (exp, 0);
9866 if (TREE_CODE (arg) == SSA_NAME)
9867 arg = SSA_NAME_VAR (arg);
9869 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9870 or __builtin_next_arg (0) the first time we see it, after checking
9871 the arguments and if needed issuing a warning. */
9872 if (!integer_zerop (arg))
9874 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9876 /* Strip off all nops for the sake of the comparison. This
9877 is not quite the same as STRIP_NOPS. It does more.
9878 We must also strip off INDIRECT_EXPR for C++ reference
9879 parameters. */
9880 while (CONVERT_EXPR_P (arg)
9881 || TREE_CODE (arg) == INDIRECT_REF)
9882 arg = TREE_OPERAND (arg, 0);
9883 if (arg != last_parm)
9885 /* FIXME: Sometimes with the tree optimizers we can get the
9886 not the last argument even though the user used the last
9887 argument. We just warn and set the arg to be the last
9888 argument so that we will get wrong-code because of
9889 it. */
9890 warning_at (current_location,
9891 OPT_Wvarargs,
9892 "second parameter of %<va_start%> not last named argument");
9895 /* Undefined by C99 7.15.1.4p4 (va_start):
9896 "If the parameter parmN is declared with the register storage
9897 class, with a function or array type, or with a type that is
9898 not compatible with the type that results after application of
9899 the default argument promotions, the behavior is undefined."
9901 else if (DECL_REGISTER (arg))
9903 warning_at (current_location,
9904 OPT_Wvarargs,
9905 "undefined behavior when second parameter of "
9906 "%<va_start%> is declared with %<register%> storage");
9909 /* We want to verify the second parameter just once before the tree
9910 optimizers are run and then avoid keeping it in the tree,
9911 as otherwise we could warn even for correct code like:
9912 void foo (int i, ...)
9913 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9914 if (va_start_p)
9915 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9916 else
9917 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9919 return false;
9923 /* Expand a call EXP to __builtin_object_size. */
9925 static rtx
9926 expand_builtin_object_size (tree exp)
9928 tree ost;
9929 int object_size_type;
9930 tree fndecl = get_callee_fndecl (exp);
9932 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9934 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
9935 exp, fndecl);
9936 expand_builtin_trap ();
9937 return const0_rtx;
9940 ost = CALL_EXPR_ARG (exp, 1);
9941 STRIP_NOPS (ost);
9943 if (TREE_CODE (ost) != INTEGER_CST
9944 || tree_int_cst_sgn (ost) < 0
9945 || compare_tree_int (ost, 3) > 0)
9947 error ("%Klast argument of %qD is not integer constant between 0 and 3",
9948 exp, fndecl);
9949 expand_builtin_trap ();
9950 return const0_rtx;
9953 object_size_type = tree_to_shwi (ost);
9955 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9958 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9959 FCODE is the BUILT_IN_* to use.
9960 Return NULL_RTX if we failed; the caller should emit a normal call,
9961 otherwise try to get the result in TARGET, if convenient (and in
9962 mode MODE if that's convenient). */
9964 static rtx
9965 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9966 enum built_in_function fcode)
9968 if (!validate_arglist (exp,
9969 POINTER_TYPE,
9970 fcode == BUILT_IN_MEMSET_CHK
9971 ? INTEGER_TYPE : POINTER_TYPE,
9972 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9973 return NULL_RTX;
9975 tree dest = CALL_EXPR_ARG (exp, 0);
9976 tree src = CALL_EXPR_ARG (exp, 1);
9977 tree len = CALL_EXPR_ARG (exp, 2);
9978 tree size = CALL_EXPR_ARG (exp, 3);
9980 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
9981 /*str=*/NULL_TREE, size);
9983 if (!tree_fits_uhwi_p (size))
9984 return NULL_RTX;
9986 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
9988 /* Avoid transforming the checking call to an ordinary one when
9989 an overflow has been detected or when the call couldn't be
9990 validated because the size is not constant. */
9991 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
9992 return NULL_RTX;
9994 tree fn = NULL_TREE;
9995 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9996 mem{cpy,pcpy,move,set} is available. */
9997 switch (fcode)
9999 case BUILT_IN_MEMCPY_CHK:
10000 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10001 break;
10002 case BUILT_IN_MEMPCPY_CHK:
10003 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10004 break;
10005 case BUILT_IN_MEMMOVE_CHK:
10006 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10007 break;
10008 case BUILT_IN_MEMSET_CHK:
10009 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10010 break;
10011 default:
10012 break;
10015 if (! fn)
10016 return NULL_RTX;
10018 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10019 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10020 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10021 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10023 else if (fcode == BUILT_IN_MEMSET_CHK)
10024 return NULL_RTX;
10025 else
10027 unsigned int dest_align = get_pointer_alignment (dest);
10029 /* If DEST is not a pointer type, call the normal function. */
10030 if (dest_align == 0)
10031 return NULL_RTX;
10033 /* If SRC and DEST are the same (and not volatile), do nothing. */
10034 if (operand_equal_p (src, dest, 0))
10036 tree expr;
10038 if (fcode != BUILT_IN_MEMPCPY_CHK)
10040 /* Evaluate and ignore LEN in case it has side-effects. */
10041 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10042 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10045 expr = fold_build_pointer_plus (dest, len);
10046 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10049 /* __memmove_chk special case. */
10050 if (fcode == BUILT_IN_MEMMOVE_CHK)
10052 unsigned int src_align = get_pointer_alignment (src);
10054 if (src_align == 0)
10055 return NULL_RTX;
10057 /* If src is categorized for a readonly section we can use
10058 normal __memcpy_chk. */
10059 if (readonly_data_expr (src))
10061 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10062 if (!fn)
10063 return NULL_RTX;
10064 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10065 dest, src, len, size);
10066 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10067 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10068 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10071 return NULL_RTX;
10075 /* Emit warning if a buffer overflow is detected at compile time. */
10077 static void
10078 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10080 /* The source string. */
10081 tree srcstr = NULL_TREE;
10082 /* The size of the destination object. */
10083 tree objsize = NULL_TREE;
10084 /* The string that is being concatenated with (as in __strcat_chk)
10085 or null if it isn't. */
10086 tree catstr = NULL_TREE;
10087 /* The maximum length of the source sequence in a bounded operation
10088 (such as __strncat_chk) or null if the operation isn't bounded
10089 (such as __strcat_chk). */
10090 tree maxread = NULL_TREE;
10091 /* The exact size of the access (such as in __strncpy_chk). */
10092 tree size = NULL_TREE;
10094 switch (fcode)
10096 case BUILT_IN_STRCPY_CHK:
10097 case BUILT_IN_STPCPY_CHK:
10098 srcstr = CALL_EXPR_ARG (exp, 1);
10099 objsize = CALL_EXPR_ARG (exp, 2);
10100 break;
10102 case BUILT_IN_STRCAT_CHK:
10103 /* For __strcat_chk the warning will be emitted only if overflowing
10104 by at least strlen (dest) + 1 bytes. */
10105 catstr = CALL_EXPR_ARG (exp, 0);
10106 srcstr = CALL_EXPR_ARG (exp, 1);
10107 objsize = CALL_EXPR_ARG (exp, 2);
10108 break;
10110 case BUILT_IN_STRNCAT_CHK:
10111 catstr = CALL_EXPR_ARG (exp, 0);
10112 srcstr = CALL_EXPR_ARG (exp, 1);
10113 maxread = CALL_EXPR_ARG (exp, 2);
10114 objsize = CALL_EXPR_ARG (exp, 3);
10115 break;
10117 case BUILT_IN_STRNCPY_CHK:
10118 case BUILT_IN_STPNCPY_CHK:
10119 srcstr = CALL_EXPR_ARG (exp, 1);
10120 size = CALL_EXPR_ARG (exp, 2);
10121 objsize = CALL_EXPR_ARG (exp, 3);
10122 break;
10124 case BUILT_IN_SNPRINTF_CHK:
10125 case BUILT_IN_VSNPRINTF_CHK:
10126 maxread = CALL_EXPR_ARG (exp, 1);
10127 objsize = CALL_EXPR_ARG (exp, 3);
10128 break;
10129 default:
10130 gcc_unreachable ();
10133 if (catstr && maxread)
10135 /* Check __strncat_chk. There is no way to determine the length
10136 of the string to which the source string is being appended so
10137 just warn when the length of the source string is not known. */
10138 check_strncat_sizes (exp, objsize);
10139 return;
10142 /* The destination argument is the first one for all built-ins above. */
10143 tree dst = CALL_EXPR_ARG (exp, 0);
10145 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
10148 /* Emit warning if a buffer overflow is detected at compile time
10149 in __sprintf_chk/__vsprintf_chk calls. */
10151 static void
10152 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10154 tree size, len, fmt;
10155 const char *fmt_str;
10156 int nargs = call_expr_nargs (exp);
10158 /* Verify the required arguments in the original call. */
10160 if (nargs < 4)
10161 return;
10162 size = CALL_EXPR_ARG (exp, 2);
10163 fmt = CALL_EXPR_ARG (exp, 3);
10165 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10166 return;
10168 /* Check whether the format is a literal string constant. */
10169 fmt_str = c_getstr (fmt);
10170 if (fmt_str == NULL)
10171 return;
10173 if (!init_target_chars ())
10174 return;
10176 /* If the format doesn't contain % args or %%, we know its size. */
10177 if (strchr (fmt_str, target_percent) == 0)
10178 len = build_int_cstu (size_type_node, strlen (fmt_str));
10179 /* If the format is "%s" and first ... argument is a string literal,
10180 we know it too. */
10181 else if (fcode == BUILT_IN_SPRINTF_CHK
10182 && strcmp (fmt_str, target_percent_s) == 0)
10184 tree arg;
10186 if (nargs < 5)
10187 return;
10188 arg = CALL_EXPR_ARG (exp, 4);
10189 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10190 return;
10192 len = c_strlen (arg, 1);
10193 if (!len || ! tree_fits_uhwi_p (len))
10194 return;
10196 else
10197 return;
10199 /* Add one for the terminating nul. */
10200 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10202 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
10203 /*maxread=*/NULL_TREE, len, size);
10206 /* Emit warning if a free is called with address of a variable. */
10208 static void
10209 maybe_emit_free_warning (tree exp)
10211 tree arg = CALL_EXPR_ARG (exp, 0);
10213 STRIP_NOPS (arg);
10214 if (TREE_CODE (arg) != ADDR_EXPR)
10215 return;
10217 arg = get_base_address (TREE_OPERAND (arg, 0));
10218 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
10219 return;
10221 if (SSA_VAR_P (arg))
10222 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10223 "%Kattempt to free a non-heap object %qD", exp, arg);
10224 else
10225 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10226 "%Kattempt to free a non-heap object", exp);
10229 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10230 if possible. */
10232 static tree
10233 fold_builtin_object_size (tree ptr, tree ost)
10235 unsigned HOST_WIDE_INT bytes;
10236 int object_size_type;
10238 if (!validate_arg (ptr, POINTER_TYPE)
10239 || !validate_arg (ost, INTEGER_TYPE))
10240 return NULL_TREE;
10242 STRIP_NOPS (ost);
10244 if (TREE_CODE (ost) != INTEGER_CST
10245 || tree_int_cst_sgn (ost) < 0
10246 || compare_tree_int (ost, 3) > 0)
10247 return NULL_TREE;
10249 object_size_type = tree_to_shwi (ost);
10251 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10252 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10253 and (size_t) 0 for types 2 and 3. */
10254 if (TREE_SIDE_EFFECTS (ptr))
10255 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10257 if (TREE_CODE (ptr) == ADDR_EXPR)
10259 compute_builtin_object_size (ptr, object_size_type, &bytes);
10260 if (wi::fits_to_tree_p (bytes, size_type_node))
10261 return build_int_cstu (size_type_node, bytes);
10263 else if (TREE_CODE (ptr) == SSA_NAME)
10265 /* If object size is not known yet, delay folding until
10266 later. Maybe subsequent passes will help determining
10267 it. */
10268 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10269 && wi::fits_to_tree_p (bytes, size_type_node))
10270 return build_int_cstu (size_type_node, bytes);
10273 return NULL_TREE;
10276 /* Builtins with folding operations that operate on "..." arguments
10277 need special handling; we need to store the arguments in a convenient
10278 data structure before attempting any folding. Fortunately there are
10279 only a few builtins that fall into this category. FNDECL is the
10280 function, EXP is the CALL_EXPR for the call. */
10282 static tree
10283 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10285 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10286 tree ret = NULL_TREE;
10288 switch (fcode)
10290 case BUILT_IN_FPCLASSIFY:
10291 ret = fold_builtin_fpclassify (loc, args, nargs);
10292 break;
10294 default:
10295 break;
10297 if (ret)
10299 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10300 SET_EXPR_LOCATION (ret, loc);
10301 TREE_NO_WARNING (ret) = 1;
10302 return ret;
10304 return NULL_TREE;
10307 /* Initialize format string characters in the target charset. */
10309 bool
10310 init_target_chars (void)
10312 static bool init;
10313 if (!init)
10315 target_newline = lang_hooks.to_target_charset ('\n');
10316 target_percent = lang_hooks.to_target_charset ('%');
10317 target_c = lang_hooks.to_target_charset ('c');
10318 target_s = lang_hooks.to_target_charset ('s');
10319 if (target_newline == 0 || target_percent == 0 || target_c == 0
10320 || target_s == 0)
10321 return false;
10323 target_percent_c[0] = target_percent;
10324 target_percent_c[1] = target_c;
10325 target_percent_c[2] = '\0';
10327 target_percent_s[0] = target_percent;
10328 target_percent_s[1] = target_s;
10329 target_percent_s[2] = '\0';
10331 target_percent_s_newline[0] = target_percent;
10332 target_percent_s_newline[1] = target_s;
10333 target_percent_s_newline[2] = target_newline;
10334 target_percent_s_newline[3] = '\0';
10336 init = true;
10338 return true;
10341 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10342 and no overflow/underflow occurred. INEXACT is true if M was not
10343 exactly calculated. TYPE is the tree type for the result. This
10344 function assumes that you cleared the MPFR flags and then
10345 calculated M to see if anything subsequently set a flag prior to
10346 entering this function. Return NULL_TREE if any checks fail. */
10348 static tree
10349 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10351 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10352 overflow/underflow occurred. If -frounding-math, proceed iff the
10353 result of calling FUNC was exact. */
10354 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10355 && (!flag_rounding_math || !inexact))
10357 REAL_VALUE_TYPE rr;
10359 real_from_mpfr (&rr, m, type, GMP_RNDN);
10360 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10361 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10362 but the mpft_t is not, then we underflowed in the
10363 conversion. */
10364 if (real_isfinite (&rr)
10365 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10367 REAL_VALUE_TYPE rmode;
10369 real_convert (&rmode, TYPE_MODE (type), &rr);
10370 /* Proceed iff the specified mode can hold the value. */
10371 if (real_identical (&rmode, &rr))
10372 return build_real (type, rmode);
10375 return NULL_TREE;
10378 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10379 number and no overflow/underflow occurred. INEXACT is true if M
10380 was not exactly calculated. TYPE is the tree type for the result.
10381 This function assumes that you cleared the MPFR flags and then
10382 calculated M to see if anything subsequently set a flag prior to
10383 entering this function. Return NULL_TREE if any checks fail, if
10384 FORCE_CONVERT is true, then bypass the checks. */
10386 static tree
10387 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10389 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10390 overflow/underflow occurred. If -frounding-math, proceed iff the
10391 result of calling FUNC was exact. */
10392 if (force_convert
10393 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10394 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10395 && (!flag_rounding_math || !inexact)))
10397 REAL_VALUE_TYPE re, im;
10399 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10400 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10401 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10402 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10403 but the mpft_t is not, then we underflowed in the
10404 conversion. */
10405 if (force_convert
10406 || (real_isfinite (&re) && real_isfinite (&im)
10407 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10408 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10410 REAL_VALUE_TYPE re_mode, im_mode;
10412 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10413 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10414 /* Proceed iff the specified mode can hold the value. */
10415 if (force_convert
10416 || (real_identical (&re_mode, &re)
10417 && real_identical (&im_mode, &im)))
10418 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10419 build_real (TREE_TYPE (type), im_mode));
10422 return NULL_TREE;
10425 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10426 the pointer *(ARG_QUO) and return the result. The type is taken
10427 from the type of ARG0 and is used for setting the precision of the
10428 calculation and results. */
10430 static tree
10431 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10433 tree const type = TREE_TYPE (arg0);
10434 tree result = NULL_TREE;
10436 STRIP_NOPS (arg0);
10437 STRIP_NOPS (arg1);
10439 /* To proceed, MPFR must exactly represent the target floating point
10440 format, which only happens when the target base equals two. */
10441 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10442 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10443 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10445 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10446 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10448 if (real_isfinite (ra0) && real_isfinite (ra1))
10450 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10451 const int prec = fmt->p;
10452 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10453 tree result_rem;
10454 long integer_quo;
10455 mpfr_t m0, m1;
10457 mpfr_inits2 (prec, m0, m1, NULL);
10458 mpfr_from_real (m0, ra0, GMP_RNDN);
10459 mpfr_from_real (m1, ra1, GMP_RNDN);
10460 mpfr_clear_flags ();
10461 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10462 /* Remquo is independent of the rounding mode, so pass
10463 inexact=0 to do_mpfr_ckconv(). */
10464 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10465 mpfr_clears (m0, m1, NULL);
10466 if (result_rem)
10468 /* MPFR calculates quo in the host's long so it may
10469 return more bits in quo than the target int can hold
10470 if sizeof(host long) > sizeof(target int). This can
10471 happen even for native compilers in LP64 mode. In
10472 these cases, modulo the quo value with the largest
10473 number that the target int can hold while leaving one
10474 bit for the sign. */
10475 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10476 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10478 /* Dereference the quo pointer argument. */
10479 arg_quo = build_fold_indirect_ref (arg_quo);
10480 /* Proceed iff a valid pointer type was passed in. */
10481 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10483 /* Set the value. */
10484 tree result_quo
10485 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10486 build_int_cst (TREE_TYPE (arg_quo),
10487 integer_quo));
10488 TREE_SIDE_EFFECTS (result_quo) = 1;
10489 /* Combine the quo assignment with the rem. */
10490 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10491 result_quo, result_rem));
10496 return result;
10499 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10500 resulting value as a tree with type TYPE. The mpfr precision is
10501 set to the precision of TYPE. We assume that this mpfr function
10502 returns zero if the result could be calculated exactly within the
10503 requested precision. In addition, the integer pointer represented
10504 by ARG_SG will be dereferenced and set to the appropriate signgam
10505 (-1,1) value. */
10507 static tree
10508 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10510 tree result = NULL_TREE;
10512 STRIP_NOPS (arg);
10514 /* To proceed, MPFR must exactly represent the target floating point
10515 format, which only happens when the target base equals two. Also
10516 verify ARG is a constant and that ARG_SG is an int pointer. */
10517 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10518 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10519 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10520 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10522 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10524 /* In addition to NaN and Inf, the argument cannot be zero or a
10525 negative integer. */
10526 if (real_isfinite (ra)
10527 && ra->cl != rvc_zero
10528 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10530 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10531 const int prec = fmt->p;
10532 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10533 int inexact, sg;
10534 mpfr_t m;
10535 tree result_lg;
10537 mpfr_init2 (m, prec);
10538 mpfr_from_real (m, ra, GMP_RNDN);
10539 mpfr_clear_flags ();
10540 inexact = mpfr_lgamma (m, &sg, m, rnd);
10541 result_lg = do_mpfr_ckconv (m, type, inexact);
10542 mpfr_clear (m);
10543 if (result_lg)
10545 tree result_sg;
10547 /* Dereference the arg_sg pointer argument. */
10548 arg_sg = build_fold_indirect_ref (arg_sg);
10549 /* Assign the signgam value into *arg_sg. */
10550 result_sg = fold_build2 (MODIFY_EXPR,
10551 TREE_TYPE (arg_sg), arg_sg,
10552 build_int_cst (TREE_TYPE (arg_sg), sg));
10553 TREE_SIDE_EFFECTS (result_sg) = 1;
10554 /* Combine the signgam assignment with the lgamma result. */
10555 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10556 result_sg, result_lg));
10561 return result;
10564 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10565 mpc function FUNC on it and return the resulting value as a tree
10566 with type TYPE. The mpfr precision is set to the precision of
10567 TYPE. We assume that function FUNC returns zero if the result
10568 could be calculated exactly within the requested precision. If
10569 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10570 in the arguments and/or results. */
10572 tree
10573 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10574 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10576 tree result = NULL_TREE;
10578 STRIP_NOPS (arg0);
10579 STRIP_NOPS (arg1);
10581 /* To proceed, MPFR must exactly represent the target floating point
10582 format, which only happens when the target base equals two. */
10583 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10584 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10585 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10586 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10587 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10589 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10590 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10591 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10592 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10594 if (do_nonfinite
10595 || (real_isfinite (re0) && real_isfinite (im0)
10596 && real_isfinite (re1) && real_isfinite (im1)))
10598 const struct real_format *const fmt =
10599 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10600 const int prec = fmt->p;
10601 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10602 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10603 int inexact;
10604 mpc_t m0, m1;
10606 mpc_init2 (m0, prec);
10607 mpc_init2 (m1, prec);
10608 mpfr_from_real (mpc_realref (m0), re0, rnd);
10609 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10610 mpfr_from_real (mpc_realref (m1), re1, rnd);
10611 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10612 mpfr_clear_flags ();
10613 inexact = func (m0, m0, m1, crnd);
10614 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10615 mpc_clear (m0);
10616 mpc_clear (m1);
10620 return result;
10623 /* A wrapper function for builtin folding that prevents warnings for
10624 "statement without effect" and the like, caused by removing the
10625 call node earlier than the warning is generated. */
10627 tree
10628 fold_call_stmt (gcall *stmt, bool ignore)
10630 tree ret = NULL_TREE;
10631 tree fndecl = gimple_call_fndecl (stmt);
10632 location_t loc = gimple_location (stmt);
10633 if (fndecl
10634 && TREE_CODE (fndecl) == FUNCTION_DECL
10635 && DECL_BUILT_IN (fndecl)
10636 && !gimple_call_va_arg_pack_p (stmt))
10638 int nargs = gimple_call_num_args (stmt);
10639 tree *args = (nargs > 0
10640 ? gimple_call_arg_ptr (stmt, 0)
10641 : &error_mark_node);
10643 if (avoid_folding_inline_builtin (fndecl))
10644 return NULL_TREE;
10645 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10647 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10649 else
10651 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10652 if (ret)
10654 /* Propagate location information from original call to
10655 expansion of builtin. Otherwise things like
10656 maybe_emit_chk_warning, that operate on the expansion
10657 of a builtin, will use the wrong location information. */
10658 if (gimple_has_location (stmt))
10660 tree realret = ret;
10661 if (TREE_CODE (ret) == NOP_EXPR)
10662 realret = TREE_OPERAND (ret, 0);
10663 if (CAN_HAVE_LOCATION_P (realret)
10664 && !EXPR_HAS_LOCATION (realret))
10665 SET_EXPR_LOCATION (realret, loc);
10666 return realret;
10668 return ret;
10672 return NULL_TREE;
10675 /* Look up the function in builtin_decl that corresponds to DECL
10676 and set ASMSPEC as its user assembler name. DECL must be a
10677 function decl that declares a builtin. */
10679 void
10680 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10682 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10683 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10684 && asmspec != 0);
10686 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10687 set_user_assembler_name (builtin, asmspec);
10689 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10690 && INT_TYPE_SIZE < BITS_PER_WORD)
10692 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
10693 set_user_assembler_libfunc ("ffs", asmspec);
10694 set_optab_libfunc (ffs_optab, mode, "ffs");
10698 /* Return true if DECL is a builtin that expands to a constant or similarly
10699 simple code. */
10700 bool
10701 is_simple_builtin (tree decl)
10703 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10704 switch (DECL_FUNCTION_CODE (decl))
10706 /* Builtins that expand to constants. */
10707 case BUILT_IN_CONSTANT_P:
10708 case BUILT_IN_EXPECT:
10709 case BUILT_IN_OBJECT_SIZE:
10710 case BUILT_IN_UNREACHABLE:
10711 /* Simple register moves or loads from stack. */
10712 case BUILT_IN_ASSUME_ALIGNED:
10713 case BUILT_IN_RETURN_ADDRESS:
10714 case BUILT_IN_EXTRACT_RETURN_ADDR:
10715 case BUILT_IN_FROB_RETURN_ADDR:
10716 case BUILT_IN_RETURN:
10717 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10718 case BUILT_IN_FRAME_ADDRESS:
10719 case BUILT_IN_VA_END:
10720 case BUILT_IN_STACK_SAVE:
10721 case BUILT_IN_STACK_RESTORE:
10722 /* Exception state returns or moves registers around. */
10723 case BUILT_IN_EH_FILTER:
10724 case BUILT_IN_EH_POINTER:
10725 case BUILT_IN_EH_COPY_VALUES:
10726 return true;
10728 default:
10729 return false;
10732 return false;
10735 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10736 most probably expanded inline into reasonably simple code. This is a
10737 superset of is_simple_builtin. */
10738 bool
10739 is_inexpensive_builtin (tree decl)
10741 if (!decl)
10742 return false;
10743 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10744 return true;
10745 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10746 switch (DECL_FUNCTION_CODE (decl))
10748 case BUILT_IN_ABS:
10749 CASE_BUILT_IN_ALLOCA:
10750 case BUILT_IN_BSWAP16:
10751 case BUILT_IN_BSWAP32:
10752 case BUILT_IN_BSWAP64:
10753 case BUILT_IN_CLZ:
10754 case BUILT_IN_CLZIMAX:
10755 case BUILT_IN_CLZL:
10756 case BUILT_IN_CLZLL:
10757 case BUILT_IN_CTZ:
10758 case BUILT_IN_CTZIMAX:
10759 case BUILT_IN_CTZL:
10760 case BUILT_IN_CTZLL:
10761 case BUILT_IN_FFS:
10762 case BUILT_IN_FFSIMAX:
10763 case BUILT_IN_FFSL:
10764 case BUILT_IN_FFSLL:
10765 case BUILT_IN_IMAXABS:
10766 case BUILT_IN_FINITE:
10767 case BUILT_IN_FINITEF:
10768 case BUILT_IN_FINITEL:
10769 case BUILT_IN_FINITED32:
10770 case BUILT_IN_FINITED64:
10771 case BUILT_IN_FINITED128:
10772 case BUILT_IN_FPCLASSIFY:
10773 case BUILT_IN_ISFINITE:
10774 case BUILT_IN_ISINF_SIGN:
10775 case BUILT_IN_ISINF:
10776 case BUILT_IN_ISINFF:
10777 case BUILT_IN_ISINFL:
10778 case BUILT_IN_ISINFD32:
10779 case BUILT_IN_ISINFD64:
10780 case BUILT_IN_ISINFD128:
10781 case BUILT_IN_ISNAN:
10782 case BUILT_IN_ISNANF:
10783 case BUILT_IN_ISNANL:
10784 case BUILT_IN_ISNAND32:
10785 case BUILT_IN_ISNAND64:
10786 case BUILT_IN_ISNAND128:
10787 case BUILT_IN_ISNORMAL:
10788 case BUILT_IN_ISGREATER:
10789 case BUILT_IN_ISGREATEREQUAL:
10790 case BUILT_IN_ISLESS:
10791 case BUILT_IN_ISLESSEQUAL:
10792 case BUILT_IN_ISLESSGREATER:
10793 case BUILT_IN_ISUNORDERED:
10794 case BUILT_IN_VA_ARG_PACK:
10795 case BUILT_IN_VA_ARG_PACK_LEN:
10796 case BUILT_IN_VA_COPY:
10797 case BUILT_IN_TRAP:
10798 case BUILT_IN_SAVEREGS:
10799 case BUILT_IN_POPCOUNTL:
10800 case BUILT_IN_POPCOUNTLL:
10801 case BUILT_IN_POPCOUNTIMAX:
10802 case BUILT_IN_POPCOUNT:
10803 case BUILT_IN_PARITYL:
10804 case BUILT_IN_PARITYLL:
10805 case BUILT_IN_PARITYIMAX:
10806 case BUILT_IN_PARITY:
10807 case BUILT_IN_LABS:
10808 case BUILT_IN_LLABS:
10809 case BUILT_IN_PREFETCH:
10810 case BUILT_IN_ACC_ON_DEVICE:
10811 return true;
10813 default:
10814 return is_simple_builtin (decl);
10817 return false;
10820 /* Return true if T is a constant and the value cast to a target char
10821 can be represented by a host char.
10822 Store the casted char constant in *P if so. */
10824 bool
10825 target_char_cst_p (tree t, char *p)
10827 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
10828 return false;
10830 *p = (char)tree_to_uhwi (t);
10831 return true;
10834 /* Return the maximum object size. */
10836 tree
10837 max_object_size (void)
10839 /* To do: Make this a configurable parameter. */
10840 return TYPE_MAX_VALUE (ptrdiff_type_node);