Fix libstdc++ testsuite failures in C++98 and C++11 mode
[official-gcc.git] / gcc / builtins.c
blobbf68e317124f3ae3aa033142d9ae813e78e9ef98
1 /* Expand builtin functions.
2 Copyright (C) 1988-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "stor-layout.h"
47 #include "calls.h"
48 #include "varasm.h"
49 #include "tree-object-size.h"
50 #include "realmpfr.h"
51 #include "cfgrtl.h"
52 #include "except.h"
53 #include "dojump.h"
54 #include "explow.h"
55 #include "stmt.h"
56 #include "expr.h"
57 #include "libfuncs.h"
58 #include "output.h"
59 #include "typeclass.h"
60 #include "langhooks.h"
61 #include "value-prof.h"
62 #include "builtins.h"
63 #include "asan.h"
64 #include "cilk.h"
65 #include "tree-chkp.h"
66 #include "rtl-chkp.h"
67 #include "internal-fn.h"
68 #include "case-cfn-macros.h"
69 #include "gimple-fold.h"
70 #include "intl.h"
72 struct target_builtins default_target_builtins;
73 #if SWITCHABLE_TARGET
74 struct target_builtins *this_target_builtins = &default_target_builtins;
75 #endif
77 /* Define the names of the builtin function types and codes. */
78 const char *const built_in_class_names[BUILT_IN_LAST]
79 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
81 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
82 const char * built_in_names[(int) END_BUILTINS] =
84 #include "builtins.def"
87 /* Setup an array of builtin_info_type, make sure each element decl is
88 initialized to NULL_TREE. */
89 builtin_info_type builtin_info[(int)END_BUILTINS];
91 /* Non-zero if __builtin_constant_p should be folded right away. */
92 bool force_folding_builtin_constant_p;
94 static rtx c_readstr (const char *, machine_mode);
95 static int target_char_cast (tree, char *);
96 static rtx get_memory_rtx (tree, tree);
97 static int apply_args_size (void);
98 static int apply_result_size (void);
99 static rtx result_vector (int, rtx);
100 static void expand_builtin_prefetch (tree);
101 static rtx expand_builtin_apply_args (void);
102 static rtx expand_builtin_apply_args_1 (void);
103 static rtx expand_builtin_apply (rtx, rtx, rtx);
104 static void expand_builtin_return (rtx);
105 static enum type_class type_to_class (tree);
106 static rtx expand_builtin_classify_type (tree);
107 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
109 static rtx expand_builtin_interclass_mathfn (tree, rtx);
110 static rtx expand_builtin_sincos (tree);
111 static rtx expand_builtin_cexpi (tree, rtx);
112 static rtx expand_builtin_int_roundingfn (tree, rtx);
113 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
114 static rtx expand_builtin_next_arg (void);
115 static rtx expand_builtin_va_start (tree);
116 static rtx expand_builtin_va_end (tree);
117 static rtx expand_builtin_va_copy (tree);
118 static rtx expand_builtin_strcmp (tree, rtx);
119 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
120 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
121 static rtx expand_builtin_memcpy (tree, rtx);
122 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
123 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
124 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
125 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
126 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
127 machine_mode, int, tree);
128 static rtx expand_builtin_strcat (tree, rtx);
129 static rtx expand_builtin_strcpy (tree, rtx);
130 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
131 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
132 static rtx expand_builtin_strncat (tree, rtx);
133 static rtx expand_builtin_strncpy (tree, rtx);
134 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
135 static rtx expand_builtin_memset (tree, rtx, machine_mode);
136 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
137 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
138 static rtx expand_builtin_bzero (tree);
139 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
140 static rtx expand_builtin_alloca (tree, bool);
141 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
142 static rtx expand_builtin_frame_address (tree, tree);
143 static tree stabilize_va_list_loc (location_t, tree, int);
144 static rtx expand_builtin_expect (tree, rtx);
145 static tree fold_builtin_constant_p (tree);
146 static tree fold_builtin_classify_type (tree);
147 static tree fold_builtin_strlen (location_t, tree, tree);
148 static tree fold_builtin_inf (location_t, tree, int);
149 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
150 static bool validate_arg (const_tree, enum tree_code code);
151 static rtx expand_builtin_fabs (tree, rtx, rtx);
152 static rtx expand_builtin_signbit (tree, rtx);
153 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
154 static tree fold_builtin_isascii (location_t, tree);
155 static tree fold_builtin_toascii (location_t, tree);
156 static tree fold_builtin_isdigit (location_t, tree);
157 static tree fold_builtin_fabs (location_t, tree, tree);
158 static tree fold_builtin_abs (location_t, tree, tree);
159 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
160 enum tree_code);
161 static tree fold_builtin_0 (location_t, tree);
162 static tree fold_builtin_1 (location_t, tree, tree);
163 static tree fold_builtin_2 (location_t, tree, tree, tree);
164 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
165 static tree fold_builtin_varargs (location_t, tree, tree*, int);
167 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
168 static tree fold_builtin_strspn (location_t, tree, tree);
169 static tree fold_builtin_strcspn (location_t, tree, tree);
171 static rtx expand_builtin_object_size (tree);
172 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
173 enum built_in_function);
174 static void maybe_emit_chk_warning (tree, enum built_in_function);
175 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
176 static void maybe_emit_free_warning (tree);
177 static tree fold_builtin_object_size (tree, tree);
179 unsigned HOST_WIDE_INT target_newline;
180 unsigned HOST_WIDE_INT target_percent;
181 static unsigned HOST_WIDE_INT target_c;
182 static unsigned HOST_WIDE_INT target_s;
183 char target_percent_c[3];
184 char target_percent_s[3];
185 char target_percent_s_newline[4];
186 static tree do_mpfr_remquo (tree, tree, tree);
187 static tree do_mpfr_lgamma_r (tree, tree, tree);
188 static void expand_builtin_sync_synchronize (void);
190 /* Return true if NAME starts with __builtin_ or __sync_. */
192 static bool
193 is_builtin_name (const char *name)
195 if (strncmp (name, "__builtin_", 10) == 0)
196 return true;
197 if (strncmp (name, "__sync_", 7) == 0)
198 return true;
199 if (strncmp (name, "__atomic_", 9) == 0)
200 return true;
201 if (flag_cilkplus
202 && (!strcmp (name, "__cilkrts_detach")
203 || !strcmp (name, "__cilkrts_pop_frame")))
204 return true;
205 return false;
209 /* Return true if DECL is a function symbol representing a built-in. */
211 bool
212 is_builtin_fn (tree decl)
214 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
217 /* Return true if NODE should be considered for inline expansion regardless
218 of the optimization level. This means whenever a function is invoked with
219 its "internal" name, which normally contains the prefix "__builtin". */
221 bool
222 called_as_built_in (tree node)
224 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
225 we want the name used to call the function, not the name it
226 will have. */
227 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
228 return is_builtin_name (name);
231 /* Compute values M and N such that M divides (address of EXP - N) and such
232 that N < M. If these numbers can be determined, store M in alignp and N in
233 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
234 *alignp and any bit-offset to *bitposp.
236 Note that the address (and thus the alignment) computed here is based
237 on the address to which a symbol resolves, whereas DECL_ALIGN is based
238 on the address at which an object is actually located. These two
239 addresses are not always the same. For example, on ARM targets,
240 the address &foo of a Thumb function foo() has the lowest bit set,
241 whereas foo() itself starts on an even address.
243 If ADDR_P is true we are taking the address of the memory reference EXP
244 and thus cannot rely on the access taking place. */
246 static bool
247 get_object_alignment_2 (tree exp, unsigned int *alignp,
248 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
250 HOST_WIDE_INT bitsize, bitpos;
251 tree offset;
252 machine_mode mode;
253 int unsignedp, reversep, volatilep;
254 unsigned int align = BITS_PER_UNIT;
255 bool known_alignment = false;
257 /* Get the innermost object and the constant (bitpos) and possibly
258 variable (offset) offset of the access. */
259 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
260 &unsignedp, &reversep, &volatilep);
262 /* Extract alignment information from the innermost object and
263 possibly adjust bitpos and offset. */
264 if (TREE_CODE (exp) == FUNCTION_DECL)
266 /* Function addresses can encode extra information besides their
267 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
268 allows the low bit to be used as a virtual bit, we know
269 that the address itself must be at least 2-byte aligned. */
270 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
271 align = 2 * BITS_PER_UNIT;
273 else if (TREE_CODE (exp) == LABEL_DECL)
275 else if (TREE_CODE (exp) == CONST_DECL)
277 /* The alignment of a CONST_DECL is determined by its initializer. */
278 exp = DECL_INITIAL (exp);
279 align = TYPE_ALIGN (TREE_TYPE (exp));
280 if (CONSTANT_CLASS_P (exp))
281 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
283 known_alignment = true;
285 else if (DECL_P (exp))
287 align = DECL_ALIGN (exp);
288 known_alignment = true;
290 else if (TREE_CODE (exp) == INDIRECT_REF
291 || TREE_CODE (exp) == MEM_REF
292 || TREE_CODE (exp) == TARGET_MEM_REF)
294 tree addr = TREE_OPERAND (exp, 0);
295 unsigned ptr_align;
296 unsigned HOST_WIDE_INT ptr_bitpos;
297 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
299 /* If the address is explicitely aligned, handle that. */
300 if (TREE_CODE (addr) == BIT_AND_EXPR
301 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
303 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
304 ptr_bitmask *= BITS_PER_UNIT;
305 align = least_bit_hwi (ptr_bitmask);
306 addr = TREE_OPERAND (addr, 0);
309 known_alignment
310 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
311 align = MAX (ptr_align, align);
313 /* Re-apply explicit alignment to the bitpos. */
314 ptr_bitpos &= ptr_bitmask;
316 /* The alignment of the pointer operand in a TARGET_MEM_REF
317 has to take the variable offset parts into account. */
318 if (TREE_CODE (exp) == TARGET_MEM_REF)
320 if (TMR_INDEX (exp))
322 unsigned HOST_WIDE_INT step = 1;
323 if (TMR_STEP (exp))
324 step = TREE_INT_CST_LOW (TMR_STEP (exp));
325 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
327 if (TMR_INDEX2 (exp))
328 align = BITS_PER_UNIT;
329 known_alignment = false;
332 /* When EXP is an actual memory reference then we can use
333 TYPE_ALIGN of a pointer indirection to derive alignment.
334 Do so only if get_pointer_alignment_1 did not reveal absolute
335 alignment knowledge and if using that alignment would
336 improve the situation. */
337 if (!addr_p && !known_alignment
338 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
339 align = TYPE_ALIGN (TREE_TYPE (exp));
340 else
342 /* Else adjust bitpos accordingly. */
343 bitpos += ptr_bitpos;
344 if (TREE_CODE (exp) == MEM_REF
345 || TREE_CODE (exp) == TARGET_MEM_REF)
346 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
349 else if (TREE_CODE (exp) == STRING_CST)
351 /* STRING_CST are the only constant objects we allow to be not
352 wrapped inside a CONST_DECL. */
353 align = TYPE_ALIGN (TREE_TYPE (exp));
354 if (CONSTANT_CLASS_P (exp))
355 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
357 known_alignment = true;
360 /* If there is a non-constant offset part extract the maximum
361 alignment that can prevail. */
362 if (offset)
364 unsigned int trailing_zeros = tree_ctz (offset);
365 if (trailing_zeros < HOST_BITS_PER_INT)
367 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
368 if (inner)
369 align = MIN (align, inner);
373 *alignp = align;
374 *bitposp = bitpos & (*alignp - 1);
375 return known_alignment;
378 /* For a memory reference expression EXP compute values M and N such that M
379 divides (&EXP - N) and such that N < M. If these numbers can be determined,
380 store M in alignp and N in *BITPOSP and return true. Otherwise return false
381 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
383 bool
384 get_object_alignment_1 (tree exp, unsigned int *alignp,
385 unsigned HOST_WIDE_INT *bitposp)
387 return get_object_alignment_2 (exp, alignp, bitposp, false);
390 /* Return the alignment in bits of EXP, an object. */
392 unsigned int
393 get_object_alignment (tree exp)
395 unsigned HOST_WIDE_INT bitpos = 0;
396 unsigned int align;
398 get_object_alignment_1 (exp, &align, &bitpos);
400 /* align and bitpos now specify known low bits of the pointer.
401 ptr & (align - 1) == bitpos. */
403 if (bitpos != 0)
404 align = least_bit_hwi (bitpos);
405 return align;
408 /* For a pointer valued expression EXP compute values M and N such that M
409 divides (EXP - N) and such that N < M. If these numbers can be determined,
410 store M in alignp and N in *BITPOSP and return true. Return false if
411 the results are just a conservative approximation.
413 If EXP is not a pointer, false is returned too. */
415 bool
416 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
417 unsigned HOST_WIDE_INT *bitposp)
419 STRIP_NOPS (exp);
421 if (TREE_CODE (exp) == ADDR_EXPR)
422 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
423 alignp, bitposp, true);
424 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
426 unsigned int align;
427 unsigned HOST_WIDE_INT bitpos;
428 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
429 &align, &bitpos);
430 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
431 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
432 else
434 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
435 if (trailing_zeros < HOST_BITS_PER_INT)
437 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
438 if (inner)
439 align = MIN (align, inner);
442 *alignp = align;
443 *bitposp = bitpos & (align - 1);
444 return res;
446 else if (TREE_CODE (exp) == SSA_NAME
447 && POINTER_TYPE_P (TREE_TYPE (exp)))
449 unsigned int ptr_align, ptr_misalign;
450 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
452 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
454 *bitposp = ptr_misalign * BITS_PER_UNIT;
455 *alignp = ptr_align * BITS_PER_UNIT;
456 /* Make sure to return a sensible alignment when the multiplication
457 by BITS_PER_UNIT overflowed. */
458 if (*alignp == 0)
459 *alignp = 1u << (HOST_BITS_PER_INT - 1);
460 /* We cannot really tell whether this result is an approximation. */
461 return false;
463 else
465 *bitposp = 0;
466 *alignp = BITS_PER_UNIT;
467 return false;
470 else if (TREE_CODE (exp) == INTEGER_CST)
472 *alignp = BIGGEST_ALIGNMENT;
473 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
474 & (BIGGEST_ALIGNMENT - 1));
475 return true;
478 *bitposp = 0;
479 *alignp = BITS_PER_UNIT;
480 return false;
483 /* Return the alignment in bits of EXP, a pointer valued expression.
484 The alignment returned is, by default, the alignment of the thing that
485 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
487 Otherwise, look at the expression to see if we can do better, i.e., if the
488 expression is actually pointing at an object whose alignment is tighter. */
490 unsigned int
491 get_pointer_alignment (tree exp)
493 unsigned HOST_WIDE_INT bitpos = 0;
494 unsigned int align;
496 get_pointer_alignment_1 (exp, &align, &bitpos);
498 /* align and bitpos now specify known low bits of the pointer.
499 ptr & (align - 1) == bitpos. */
501 if (bitpos != 0)
502 align = least_bit_hwi (bitpos);
504 return align;
507 /* Return the number of non-zero elements in the sequence
508 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
509 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
511 static unsigned
512 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
514 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
516 unsigned n;
518 if (eltsize == 1)
520 /* Optimize the common case of plain char. */
521 for (n = 0; n < maxelts; n++)
523 const char *elt = (const char*) ptr + n;
524 if (!*elt)
525 break;
528 else
530 for (n = 0; n < maxelts; n++)
532 const char *elt = (const char*) ptr + n * eltsize;
533 if (!memcmp (elt, "\0\0\0\0", eltsize))
534 break;
537 return n;
540 /* Compute the length of a null-terminated character string or wide
541 character string handling character sizes of 1, 2, and 4 bytes.
542 TREE_STRING_LENGTH is not the right way because it evaluates to
543 the size of the character array in bytes (as opposed to characters)
544 and because it can contain a zero byte in the middle.
546 ONLY_VALUE should be nonzero if the result is not going to be emitted
547 into the instruction stream and zero if it is going to be expanded.
548 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
549 is returned, otherwise NULL, since
550 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
551 evaluate the side-effects.
553 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
554 accesses. Note that this implies the result is not going to be emitted
555 into the instruction stream.
557 The value returned is of type `ssizetype'.
559 Unfortunately, string_constant can't access the values of const char
560 arrays with initializers, so neither can we do so here. */
562 tree
563 c_strlen (tree src, int only_value)
565 STRIP_NOPS (src);
566 if (TREE_CODE (src) == COND_EXPR
567 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
569 tree len1, len2;
571 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
572 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
573 if (tree_int_cst_equal (len1, len2))
574 return len1;
577 if (TREE_CODE (src) == COMPOUND_EXPR
578 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
579 return c_strlen (TREE_OPERAND (src, 1), only_value);
581 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
583 /* Offset from the beginning of the string in bytes. */
584 tree byteoff;
585 src = string_constant (src, &byteoff);
586 if (src == 0)
587 return NULL_TREE;
589 /* Determine the size of the string element. */
590 unsigned eltsize
591 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src))));
593 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
594 length of SRC. */
595 unsigned maxelts = TREE_STRING_LENGTH (src) / eltsize - 1;
597 /* PTR can point to the byte representation of any string type, including
598 char* and wchar_t*. */
599 const char *ptr = TREE_STRING_POINTER (src);
601 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
603 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
604 compute the offset to the following null if we don't know where to
605 start searching for it. */
606 if (string_length (ptr, eltsize, maxelts) < maxelts)
608 /* Return when an embedded null character is found. */
609 return NULL_TREE;
612 /* We don't know the starting offset, but we do know that the string
613 has no internal zero bytes. We can assume that the offset falls
614 within the bounds of the string; otherwise, the programmer deserves
615 what he gets. Subtract the offset from the length of the string,
616 and return that. This would perhaps not be valid if we were dealing
617 with named arrays in addition to literal string constants. */
619 return size_diffop_loc (loc, size_int (maxelts * eltsize), byteoff);
622 /* Offset from the beginning of the string in elements. */
623 HOST_WIDE_INT eltoff;
625 /* We have a known offset into the string. Start searching there for
626 a null character if we can represent it as a single HOST_WIDE_INT. */
627 if (byteoff == 0)
628 eltoff = 0;
629 else if (! tree_fits_shwi_p (byteoff))
630 eltoff = -1;
631 else
632 eltoff = tree_to_shwi (byteoff) / eltsize;
634 /* If the offset is known to be out of bounds, warn, and call strlen at
635 runtime. */
636 if (eltoff < 0 || eltoff > maxelts)
638 /* Suppress multiple warnings for propagated constant strings. */
639 if (only_value != 2
640 && !TREE_NO_WARNING (src))
642 warning_at (loc, 0, "offset %qwi outside bounds of constant string",
643 eltoff);
644 TREE_NO_WARNING (src) = 1;
646 return NULL_TREE;
649 /* Use strlen to search for the first zero byte. Since any strings
650 constructed with build_string will have nulls appended, we win even
651 if we get handed something like (char[4])"abcd".
653 Since ELTOFF is our starting index into the string, no further
654 calculation is needed. */
655 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
656 maxelts - eltoff);
658 return ssize_int (len);
661 /* Return a constant integer corresponding to target reading
662 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
664 static rtx
665 c_readstr (const char *str, machine_mode mode)
667 HOST_WIDE_INT ch;
668 unsigned int i, j;
669 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
671 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
672 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
673 / HOST_BITS_PER_WIDE_INT;
675 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
676 for (i = 0; i < len; i++)
677 tmp[i] = 0;
679 ch = 1;
680 for (i = 0; i < GET_MODE_SIZE (mode); i++)
682 j = i;
683 if (WORDS_BIG_ENDIAN)
684 j = GET_MODE_SIZE (mode) - i - 1;
685 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
686 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
687 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
688 j *= BITS_PER_UNIT;
690 if (ch)
691 ch = (unsigned char) str[i];
692 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
695 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
696 return immed_wide_int_const (c, mode);
699 /* Cast a target constant CST to target CHAR and if that value fits into
700 host char type, return zero and put that value into variable pointed to by
701 P. */
703 static int
704 target_char_cast (tree cst, char *p)
706 unsigned HOST_WIDE_INT val, hostval;
708 if (TREE_CODE (cst) != INTEGER_CST
709 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
710 return 1;
712 /* Do not care if it fits or not right here. */
713 val = TREE_INT_CST_LOW (cst);
715 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
716 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
718 hostval = val;
719 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
720 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
722 if (val != hostval)
723 return 1;
725 *p = hostval;
726 return 0;
729 /* Similar to save_expr, but assumes that arbitrary code is not executed
730 in between the multiple evaluations. In particular, we assume that a
731 non-addressable local variable will not be modified. */
733 static tree
734 builtin_save_expr (tree exp)
736 if (TREE_CODE (exp) == SSA_NAME
737 || (TREE_ADDRESSABLE (exp) == 0
738 && (TREE_CODE (exp) == PARM_DECL
739 || (VAR_P (exp) && !TREE_STATIC (exp)))))
740 return exp;
742 return save_expr (exp);
745 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
746 times to get the address of either a higher stack frame, or a return
747 address located within it (depending on FNDECL_CODE). */
749 static rtx
750 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
752 int i;
753 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
754 if (tem == NULL_RTX)
756 /* For a zero count with __builtin_return_address, we don't care what
757 frame address we return, because target-specific definitions will
758 override us. Therefore frame pointer elimination is OK, and using
759 the soft frame pointer is OK.
761 For a nonzero count, or a zero count with __builtin_frame_address,
762 we require a stable offset from the current frame pointer to the
763 previous one, so we must use the hard frame pointer, and
764 we must disable frame pointer elimination. */
765 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
766 tem = frame_pointer_rtx;
767 else
769 tem = hard_frame_pointer_rtx;
771 /* Tell reload not to eliminate the frame pointer. */
772 crtl->accesses_prior_frames = 1;
776 if (count > 0)
777 SETUP_FRAME_ADDRESSES ();
779 /* On the SPARC, the return address is not in the frame, it is in a
780 register. There is no way to access it off of the current frame
781 pointer, but it can be accessed off the previous frame pointer by
782 reading the value from the register window save area. */
783 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
784 count--;
786 /* Scan back COUNT frames to the specified frame. */
787 for (i = 0; i < count; i++)
789 /* Assume the dynamic chain pointer is in the word that the
790 frame address points to, unless otherwise specified. */
791 tem = DYNAMIC_CHAIN_ADDRESS (tem);
792 tem = memory_address (Pmode, tem);
793 tem = gen_frame_mem (Pmode, tem);
794 tem = copy_to_reg (tem);
797 /* For __builtin_frame_address, return what we've got. But, on
798 the SPARC for example, we may have to add a bias. */
799 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
800 return FRAME_ADDR_RTX (tem);
802 /* For __builtin_return_address, get the return address from that frame. */
803 #ifdef RETURN_ADDR_RTX
804 tem = RETURN_ADDR_RTX (count, tem);
805 #else
806 tem = memory_address (Pmode,
807 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
808 tem = gen_frame_mem (Pmode, tem);
809 #endif
810 return tem;
813 /* Alias set used for setjmp buffer. */
814 static alias_set_type setjmp_alias_set = -1;
816 /* Construct the leading half of a __builtin_setjmp call. Control will
817 return to RECEIVER_LABEL. This is also called directly by the SJLJ
818 exception handling code. */
820 void
821 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
823 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
824 rtx stack_save;
825 rtx mem;
827 if (setjmp_alias_set == -1)
828 setjmp_alias_set = new_alias_set ();
830 buf_addr = convert_memory_address (Pmode, buf_addr);
832 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
834 /* We store the frame pointer and the address of receiver_label in
835 the buffer and use the rest of it for the stack save area, which
836 is machine-dependent. */
838 mem = gen_rtx_MEM (Pmode, buf_addr);
839 set_mem_alias_set (mem, setjmp_alias_set);
840 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
842 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
843 GET_MODE_SIZE (Pmode))),
844 set_mem_alias_set (mem, setjmp_alias_set);
846 emit_move_insn (validize_mem (mem),
847 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
849 stack_save = gen_rtx_MEM (sa_mode,
850 plus_constant (Pmode, buf_addr,
851 2 * GET_MODE_SIZE (Pmode)));
852 set_mem_alias_set (stack_save, setjmp_alias_set);
853 emit_stack_save (SAVE_NONLOCAL, &stack_save);
855 /* If there is further processing to do, do it. */
856 if (targetm.have_builtin_setjmp_setup ())
857 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
859 /* We have a nonlocal label. */
860 cfun->has_nonlocal_label = 1;
863 /* Construct the trailing part of a __builtin_setjmp call. This is
864 also called directly by the SJLJ exception handling code.
865 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
867 void
868 expand_builtin_setjmp_receiver (rtx receiver_label)
870 rtx chain;
872 /* Mark the FP as used when we get here, so we have to make sure it's
873 marked as used by this function. */
874 emit_use (hard_frame_pointer_rtx);
876 /* Mark the static chain as clobbered here so life information
877 doesn't get messed up for it. */
878 chain = targetm.calls.static_chain (current_function_decl, true);
879 if (chain && REG_P (chain))
880 emit_clobber (chain);
882 /* Now put in the code to restore the frame pointer, and argument
883 pointer, if needed. */
884 if (! targetm.have_nonlocal_goto ())
886 /* First adjust our frame pointer to its actual value. It was
887 previously set to the start of the virtual area corresponding to
888 the stacked variables when we branched here and now needs to be
889 adjusted to the actual hardware fp value.
891 Assignments to virtual registers are converted by
892 instantiate_virtual_regs into the corresponding assignment
893 to the underlying register (fp in this case) that makes
894 the original assignment true.
895 So the following insn will actually be decrementing fp by
896 STARTING_FRAME_OFFSET. */
897 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
899 /* Restoring the frame pointer also modifies the hard frame pointer.
900 Mark it used (so that the previous assignment remains live once
901 the frame pointer is eliminated) and clobbered (to represent the
902 implicit update from the assignment). */
903 emit_use (hard_frame_pointer_rtx);
904 emit_clobber (hard_frame_pointer_rtx);
907 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
909 /* If the argument pointer can be eliminated in favor of the
910 frame pointer, we don't need to restore it. We assume here
911 that if such an elimination is present, it can always be used.
912 This is the case on all known machines; if we don't make this
913 assumption, we do unnecessary saving on many machines. */
914 size_t i;
915 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
917 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
918 if (elim_regs[i].from == ARG_POINTER_REGNUM
919 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
920 break;
922 if (i == ARRAY_SIZE (elim_regs))
924 /* Now restore our arg pointer from the address at which it
925 was saved in our stack frame. */
926 emit_move_insn (crtl->args.internal_arg_pointer,
927 copy_to_reg (get_arg_pointer_save_area ()));
931 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
932 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
933 else if (targetm.have_nonlocal_goto_receiver ())
934 emit_insn (targetm.gen_nonlocal_goto_receiver ());
935 else
936 { /* Nothing */ }
938 /* We must not allow the code we just generated to be reordered by
939 scheduling. Specifically, the update of the frame pointer must
940 happen immediately, not later. */
941 emit_insn (gen_blockage ());
944 /* __builtin_longjmp is passed a pointer to an array of five words (not
945 all will be used on all machines). It operates similarly to the C
946 library function of the same name, but is more efficient. Much of
947 the code below is copied from the handling of non-local gotos. */
949 static void
950 expand_builtin_longjmp (rtx buf_addr, rtx value)
952 rtx fp, lab, stack;
953 rtx_insn *insn, *last;
954 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
956 /* DRAP is needed for stack realign if longjmp is expanded to current
957 function */
958 if (SUPPORTS_STACK_ALIGNMENT)
959 crtl->need_drap = true;
961 if (setjmp_alias_set == -1)
962 setjmp_alias_set = new_alias_set ();
964 buf_addr = convert_memory_address (Pmode, buf_addr);
966 buf_addr = force_reg (Pmode, buf_addr);
968 /* We require that the user must pass a second argument of 1, because
969 that is what builtin_setjmp will return. */
970 gcc_assert (value == const1_rtx);
972 last = get_last_insn ();
973 if (targetm.have_builtin_longjmp ())
974 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
975 else
977 fp = gen_rtx_MEM (Pmode, buf_addr);
978 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
979 GET_MODE_SIZE (Pmode)));
981 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
982 2 * GET_MODE_SIZE (Pmode)));
983 set_mem_alias_set (fp, setjmp_alias_set);
984 set_mem_alias_set (lab, setjmp_alias_set);
985 set_mem_alias_set (stack, setjmp_alias_set);
987 /* Pick up FP, label, and SP from the block and jump. This code is
988 from expand_goto in stmt.c; see there for detailed comments. */
989 if (targetm.have_nonlocal_goto ())
990 /* We have to pass a value to the nonlocal_goto pattern that will
991 get copied into the static_chain pointer, but it does not matter
992 what that value is, because builtin_setjmp does not use it. */
993 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
994 else
996 lab = copy_to_reg (lab);
998 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
999 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1001 emit_move_insn (hard_frame_pointer_rtx, fp);
1002 emit_stack_restore (SAVE_NONLOCAL, stack);
1004 emit_use (hard_frame_pointer_rtx);
1005 emit_use (stack_pointer_rtx);
1006 emit_indirect_jump (lab);
1010 /* Search backwards and mark the jump insn as a non-local goto.
1011 Note that this precludes the use of __builtin_longjmp to a
1012 __builtin_setjmp target in the same function. However, we've
1013 already cautioned the user that these functions are for
1014 internal exception handling use only. */
1015 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1017 gcc_assert (insn != last);
1019 if (JUMP_P (insn))
1021 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1022 break;
1024 else if (CALL_P (insn))
1025 break;
1029 static inline bool
1030 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1032 return (iter->i < iter->n);
1035 /* This function validates the types of a function call argument list
1036 against a specified list of tree_codes. If the last specifier is a 0,
1037 that represents an ellipsis, otherwise the last specifier must be a
1038 VOID_TYPE. */
1040 static bool
1041 validate_arglist (const_tree callexpr, ...)
1043 enum tree_code code;
1044 bool res = 0;
1045 va_list ap;
1046 const_call_expr_arg_iterator iter;
1047 const_tree arg;
1049 va_start (ap, callexpr);
1050 init_const_call_expr_arg_iterator (callexpr, &iter);
1052 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1053 tree fn = CALL_EXPR_FN (callexpr);
1054 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1056 for (unsigned argno = 1; ; ++argno)
1058 code = (enum tree_code) va_arg (ap, int);
1060 switch (code)
1062 case 0:
1063 /* This signifies an ellipses, any further arguments are all ok. */
1064 res = true;
1065 goto end;
1066 case VOID_TYPE:
1067 /* This signifies an endlink, if no arguments remain, return
1068 true, otherwise return false. */
1069 res = !more_const_call_expr_args_p (&iter);
1070 goto end;
1071 case POINTER_TYPE:
1072 /* The actual argument must be nonnull when either the whole
1073 called function has been declared nonnull, or when the formal
1074 argument corresponding to the actual argument has been. */
1075 if (argmap
1076 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1078 arg = next_const_call_expr_arg (&iter);
1079 if (!validate_arg (arg, code) || integer_zerop (arg))
1080 goto end;
1081 break;
1083 /* FALLTHRU */
1084 default:
1085 /* If no parameters remain or the parameter's code does not
1086 match the specified code, return false. Otherwise continue
1087 checking any remaining arguments. */
1088 arg = next_const_call_expr_arg (&iter);
1089 if (!validate_arg (arg, code))
1090 goto end;
1091 break;
1095 /* We need gotos here since we can only have one VA_CLOSE in a
1096 function. */
1097 end: ;
1098 va_end (ap);
1100 BITMAP_FREE (argmap);
1102 return res;
1105 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1106 and the address of the save area. */
1108 static rtx
1109 expand_builtin_nonlocal_goto (tree exp)
1111 tree t_label, t_save_area;
1112 rtx r_label, r_save_area, r_fp, r_sp;
1113 rtx_insn *insn;
1115 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1116 return NULL_RTX;
1118 t_label = CALL_EXPR_ARG (exp, 0);
1119 t_save_area = CALL_EXPR_ARG (exp, 1);
1121 r_label = expand_normal (t_label);
1122 r_label = convert_memory_address (Pmode, r_label);
1123 r_save_area = expand_normal (t_save_area);
1124 r_save_area = convert_memory_address (Pmode, r_save_area);
1125 /* Copy the address of the save location to a register just in case it was
1126 based on the frame pointer. */
1127 r_save_area = copy_to_reg (r_save_area);
1128 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1129 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1130 plus_constant (Pmode, r_save_area,
1131 GET_MODE_SIZE (Pmode)));
1133 crtl->has_nonlocal_goto = 1;
1135 /* ??? We no longer need to pass the static chain value, afaik. */
1136 if (targetm.have_nonlocal_goto ())
1137 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1138 else
1140 r_label = copy_to_reg (r_label);
1142 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1143 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1145 /* Restore frame pointer for containing function. */
1146 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1147 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1149 /* USE of hard_frame_pointer_rtx added for consistency;
1150 not clear if really needed. */
1151 emit_use (hard_frame_pointer_rtx);
1152 emit_use (stack_pointer_rtx);
1154 /* If the architecture is using a GP register, we must
1155 conservatively assume that the target function makes use of it.
1156 The prologue of functions with nonlocal gotos must therefore
1157 initialize the GP register to the appropriate value, and we
1158 must then make sure that this value is live at the point
1159 of the jump. (Note that this doesn't necessarily apply
1160 to targets with a nonlocal_goto pattern; they are free
1161 to implement it in their own way. Note also that this is
1162 a no-op if the GP register is a global invariant.) */
1163 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1164 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1165 emit_use (pic_offset_table_rtx);
1167 emit_indirect_jump (r_label);
1170 /* Search backwards to the jump insn and mark it as a
1171 non-local goto. */
1172 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1174 if (JUMP_P (insn))
1176 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1177 break;
1179 else if (CALL_P (insn))
1180 break;
1183 return const0_rtx;
1186 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1187 (not all will be used on all machines) that was passed to __builtin_setjmp.
1188 It updates the stack pointer in that block to the current value. This is
1189 also called directly by the SJLJ exception handling code. */
1191 void
1192 expand_builtin_update_setjmp_buf (rtx buf_addr)
1194 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1195 rtx stack_save
1196 = gen_rtx_MEM (sa_mode,
1197 memory_address
1198 (sa_mode,
1199 plus_constant (Pmode, buf_addr,
1200 2 * GET_MODE_SIZE (Pmode))));
1202 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1205 /* Expand a call to __builtin_prefetch. For a target that does not support
1206 data prefetch, evaluate the memory address argument in case it has side
1207 effects. */
1209 static void
1210 expand_builtin_prefetch (tree exp)
1212 tree arg0, arg1, arg2;
1213 int nargs;
1214 rtx op0, op1, op2;
1216 if (!validate_arglist (exp, POINTER_TYPE, 0))
1217 return;
1219 arg0 = CALL_EXPR_ARG (exp, 0);
1221 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1222 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1223 locality). */
1224 nargs = call_expr_nargs (exp);
1225 if (nargs > 1)
1226 arg1 = CALL_EXPR_ARG (exp, 1);
1227 else
1228 arg1 = integer_zero_node;
1229 if (nargs > 2)
1230 arg2 = CALL_EXPR_ARG (exp, 2);
1231 else
1232 arg2 = integer_three_node;
1234 /* Argument 0 is an address. */
1235 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1237 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1238 if (TREE_CODE (arg1) != INTEGER_CST)
1240 error ("second argument to %<__builtin_prefetch%> must be a constant");
1241 arg1 = integer_zero_node;
1243 op1 = expand_normal (arg1);
1244 /* Argument 1 must be either zero or one. */
1245 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1247 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1248 " using zero");
1249 op1 = const0_rtx;
1252 /* Argument 2 (locality) must be a compile-time constant int. */
1253 if (TREE_CODE (arg2) != INTEGER_CST)
1255 error ("third argument to %<__builtin_prefetch%> must be a constant");
1256 arg2 = integer_zero_node;
1258 op2 = expand_normal (arg2);
1259 /* Argument 2 must be 0, 1, 2, or 3. */
1260 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1262 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1263 op2 = const0_rtx;
1266 if (targetm.have_prefetch ())
1268 struct expand_operand ops[3];
1270 create_address_operand (&ops[0], op0);
1271 create_integer_operand (&ops[1], INTVAL (op1));
1272 create_integer_operand (&ops[2], INTVAL (op2));
1273 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1274 return;
1277 /* Don't do anything with direct references to volatile memory, but
1278 generate code to handle other side effects. */
1279 if (!MEM_P (op0) && side_effects_p (op0))
1280 emit_insn (op0);
1283 /* Get a MEM rtx for expression EXP which is the address of an operand
1284 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1285 the maximum length of the block of memory that might be accessed or
1286 NULL if unknown. */
1288 static rtx
1289 get_memory_rtx (tree exp, tree len)
1291 tree orig_exp = exp;
1292 rtx addr, mem;
1294 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1295 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1296 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1297 exp = TREE_OPERAND (exp, 0);
1299 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1300 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1302 /* Get an expression we can use to find the attributes to assign to MEM.
1303 First remove any nops. */
1304 while (CONVERT_EXPR_P (exp)
1305 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1306 exp = TREE_OPERAND (exp, 0);
1308 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1309 (as builtin stringops may alias with anything). */
1310 exp = fold_build2 (MEM_REF,
1311 build_array_type (char_type_node,
1312 build_range_type (sizetype,
1313 size_one_node, len)),
1314 exp, build_int_cst (ptr_type_node, 0));
1316 /* If the MEM_REF has no acceptable address, try to get the base object
1317 from the original address we got, and build an all-aliasing
1318 unknown-sized access to that one. */
1319 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1320 set_mem_attributes (mem, exp, 0);
1321 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1322 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1323 0))))
1325 exp = build_fold_addr_expr (exp);
1326 exp = fold_build2 (MEM_REF,
1327 build_array_type (char_type_node,
1328 build_range_type (sizetype,
1329 size_zero_node,
1330 NULL)),
1331 exp, build_int_cst (ptr_type_node, 0));
1332 set_mem_attributes (mem, exp, 0);
1334 set_mem_alias_set (mem, 0);
1335 return mem;
1338 /* Built-in functions to perform an untyped call and return. */
1340 #define apply_args_mode \
1341 (this_target_builtins->x_apply_args_mode)
1342 #define apply_result_mode \
1343 (this_target_builtins->x_apply_result_mode)
1345 /* Return the size required for the block returned by __builtin_apply_args,
1346 and initialize apply_args_mode. */
1348 static int
1349 apply_args_size (void)
1351 static int size = -1;
1352 int align;
1353 unsigned int regno;
1354 machine_mode mode;
1356 /* The values computed by this function never change. */
1357 if (size < 0)
1359 /* The first value is the incoming arg-pointer. */
1360 size = GET_MODE_SIZE (Pmode);
1362 /* The second value is the structure value address unless this is
1363 passed as an "invisible" first argument. */
1364 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1365 size += GET_MODE_SIZE (Pmode);
1367 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1368 if (FUNCTION_ARG_REGNO_P (regno))
1370 mode = targetm.calls.get_raw_arg_mode (regno);
1372 gcc_assert (mode != VOIDmode);
1374 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1375 if (size % align != 0)
1376 size = CEIL (size, align) * align;
1377 size += GET_MODE_SIZE (mode);
1378 apply_args_mode[regno] = mode;
1380 else
1382 apply_args_mode[regno] = VOIDmode;
1385 return size;
1388 /* Return the size required for the block returned by __builtin_apply,
1389 and initialize apply_result_mode. */
1391 static int
1392 apply_result_size (void)
1394 static int size = -1;
1395 int align, regno;
1396 machine_mode mode;
1398 /* The values computed by this function never change. */
1399 if (size < 0)
1401 size = 0;
1403 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1404 if (targetm.calls.function_value_regno_p (regno))
1406 mode = targetm.calls.get_raw_result_mode (regno);
1408 gcc_assert (mode != VOIDmode);
1410 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1411 if (size % align != 0)
1412 size = CEIL (size, align) * align;
1413 size += GET_MODE_SIZE (mode);
1414 apply_result_mode[regno] = mode;
1416 else
1417 apply_result_mode[regno] = VOIDmode;
1419 /* Allow targets that use untyped_call and untyped_return to override
1420 the size so that machine-specific information can be stored here. */
1421 #ifdef APPLY_RESULT_SIZE
1422 size = APPLY_RESULT_SIZE;
1423 #endif
1425 return size;
1428 /* Create a vector describing the result block RESULT. If SAVEP is true,
1429 the result block is used to save the values; otherwise it is used to
1430 restore the values. */
1432 static rtx
1433 result_vector (int savep, rtx result)
1435 int regno, size, align, nelts;
1436 machine_mode mode;
1437 rtx reg, mem;
1438 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1440 size = nelts = 0;
1441 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1442 if ((mode = apply_result_mode[regno]) != VOIDmode)
1444 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1445 if (size % align != 0)
1446 size = CEIL (size, align) * align;
1447 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1448 mem = adjust_address (result, mode, size);
1449 savevec[nelts++] = (savep
1450 ? gen_rtx_SET (mem, reg)
1451 : gen_rtx_SET (reg, mem));
1452 size += GET_MODE_SIZE (mode);
1454 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1457 /* Save the state required to perform an untyped call with the same
1458 arguments as were passed to the current function. */
1460 static rtx
1461 expand_builtin_apply_args_1 (void)
1463 rtx registers, tem;
1464 int size, align, regno;
1465 machine_mode mode;
1466 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1468 /* Create a block where the arg-pointer, structure value address,
1469 and argument registers can be saved. */
1470 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1472 /* Walk past the arg-pointer and structure value address. */
1473 size = GET_MODE_SIZE (Pmode);
1474 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1475 size += GET_MODE_SIZE (Pmode);
1477 /* Save each register used in calling a function to the block. */
1478 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1479 if ((mode = apply_args_mode[regno]) != VOIDmode)
1481 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1482 if (size % align != 0)
1483 size = CEIL (size, align) * align;
1485 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1487 emit_move_insn (adjust_address (registers, mode, size), tem);
1488 size += GET_MODE_SIZE (mode);
1491 /* Save the arg pointer to the block. */
1492 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1493 /* We need the pointer as the caller actually passed them to us, not
1494 as we might have pretended they were passed. Make sure it's a valid
1495 operand, as emit_move_insn isn't expected to handle a PLUS. */
1496 if (STACK_GROWS_DOWNWARD)
1498 = force_operand (plus_constant (Pmode, tem,
1499 crtl->args.pretend_args_size),
1500 NULL_RTX);
1501 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1503 size = GET_MODE_SIZE (Pmode);
1505 /* Save the structure value address unless this is passed as an
1506 "invisible" first argument. */
1507 if (struct_incoming_value)
1509 emit_move_insn (adjust_address (registers, Pmode, size),
1510 copy_to_reg (struct_incoming_value));
1511 size += GET_MODE_SIZE (Pmode);
1514 /* Return the address of the block. */
1515 return copy_addr_to_reg (XEXP (registers, 0));
1518 /* __builtin_apply_args returns block of memory allocated on
1519 the stack into which is stored the arg pointer, structure
1520 value address, static chain, and all the registers that might
1521 possibly be used in performing a function call. The code is
1522 moved to the start of the function so the incoming values are
1523 saved. */
1525 static rtx
1526 expand_builtin_apply_args (void)
1528 /* Don't do __builtin_apply_args more than once in a function.
1529 Save the result of the first call and reuse it. */
1530 if (apply_args_value != 0)
1531 return apply_args_value;
1533 /* When this function is called, it means that registers must be
1534 saved on entry to this function. So we migrate the
1535 call to the first insn of this function. */
1536 rtx temp;
1538 start_sequence ();
1539 temp = expand_builtin_apply_args_1 ();
1540 rtx_insn *seq = get_insns ();
1541 end_sequence ();
1543 apply_args_value = temp;
1545 /* Put the insns after the NOTE that starts the function.
1546 If this is inside a start_sequence, make the outer-level insn
1547 chain current, so the code is placed at the start of the
1548 function. If internal_arg_pointer is a non-virtual pseudo,
1549 it needs to be placed after the function that initializes
1550 that pseudo. */
1551 push_topmost_sequence ();
1552 if (REG_P (crtl->args.internal_arg_pointer)
1553 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1554 emit_insn_before (seq, parm_birth_insn);
1555 else
1556 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1557 pop_topmost_sequence ();
1558 return temp;
1562 /* Perform an untyped call and save the state required to perform an
1563 untyped return of whatever value was returned by the given function. */
1565 static rtx
1566 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1568 int size, align, regno;
1569 machine_mode mode;
1570 rtx incoming_args, result, reg, dest, src;
1571 rtx_call_insn *call_insn;
1572 rtx old_stack_level = 0;
1573 rtx call_fusage = 0;
1574 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1576 arguments = convert_memory_address (Pmode, arguments);
1578 /* Create a block where the return registers can be saved. */
1579 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1581 /* Fetch the arg pointer from the ARGUMENTS block. */
1582 incoming_args = gen_reg_rtx (Pmode);
1583 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1584 if (!STACK_GROWS_DOWNWARD)
1585 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1586 incoming_args, 0, OPTAB_LIB_WIDEN);
1588 /* Push a new argument block and copy the arguments. Do not allow
1589 the (potential) memcpy call below to interfere with our stack
1590 manipulations. */
1591 do_pending_stack_adjust ();
1592 NO_DEFER_POP;
1594 /* Save the stack with nonlocal if available. */
1595 if (targetm.have_save_stack_nonlocal ())
1596 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1597 else
1598 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1600 /* Allocate a block of memory onto the stack and copy the memory
1601 arguments to the outgoing arguments address. We can pass TRUE
1602 as the 4th argument because we just saved the stack pointer
1603 and will restore it right after the call. */
1604 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1606 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1607 may have already set current_function_calls_alloca to true.
1608 current_function_calls_alloca won't be set if argsize is zero,
1609 so we have to guarantee need_drap is true here. */
1610 if (SUPPORTS_STACK_ALIGNMENT)
1611 crtl->need_drap = true;
1613 dest = virtual_outgoing_args_rtx;
1614 if (!STACK_GROWS_DOWNWARD)
1616 if (CONST_INT_P (argsize))
1617 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1618 else
1619 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1621 dest = gen_rtx_MEM (BLKmode, dest);
1622 set_mem_align (dest, PARM_BOUNDARY);
1623 src = gen_rtx_MEM (BLKmode, incoming_args);
1624 set_mem_align (src, PARM_BOUNDARY);
1625 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1627 /* Refer to the argument block. */
1628 apply_args_size ();
1629 arguments = gen_rtx_MEM (BLKmode, arguments);
1630 set_mem_align (arguments, PARM_BOUNDARY);
1632 /* Walk past the arg-pointer and structure value address. */
1633 size = GET_MODE_SIZE (Pmode);
1634 if (struct_value)
1635 size += GET_MODE_SIZE (Pmode);
1637 /* Restore each of the registers previously saved. Make USE insns
1638 for each of these registers for use in making the call. */
1639 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1640 if ((mode = apply_args_mode[regno]) != VOIDmode)
1642 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1643 if (size % align != 0)
1644 size = CEIL (size, align) * align;
1645 reg = gen_rtx_REG (mode, regno);
1646 emit_move_insn (reg, adjust_address (arguments, mode, size));
1647 use_reg (&call_fusage, reg);
1648 size += GET_MODE_SIZE (mode);
1651 /* Restore the structure value address unless this is passed as an
1652 "invisible" first argument. */
1653 size = GET_MODE_SIZE (Pmode);
1654 if (struct_value)
1656 rtx value = gen_reg_rtx (Pmode);
1657 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1658 emit_move_insn (struct_value, value);
1659 if (REG_P (struct_value))
1660 use_reg (&call_fusage, struct_value);
1661 size += GET_MODE_SIZE (Pmode);
1664 /* All arguments and registers used for the call are set up by now! */
1665 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1667 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1668 and we don't want to load it into a register as an optimization,
1669 because prepare_call_address already did it if it should be done. */
1670 if (GET_CODE (function) != SYMBOL_REF)
1671 function = memory_address (FUNCTION_MODE, function);
1673 /* Generate the actual call instruction and save the return value. */
1674 if (targetm.have_untyped_call ())
1676 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1677 emit_call_insn (targetm.gen_untyped_call (mem, result,
1678 result_vector (1, result)));
1680 else if (targetm.have_call_value ())
1682 rtx valreg = 0;
1684 /* Locate the unique return register. It is not possible to
1685 express a call that sets more than one return register using
1686 call_value; use untyped_call for that. In fact, untyped_call
1687 only needs to save the return registers in the given block. */
1688 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1689 if ((mode = apply_result_mode[regno]) != VOIDmode)
1691 gcc_assert (!valreg); /* have_untyped_call required. */
1693 valreg = gen_rtx_REG (mode, regno);
1696 emit_insn (targetm.gen_call_value (valreg,
1697 gen_rtx_MEM (FUNCTION_MODE, function),
1698 const0_rtx, NULL_RTX, const0_rtx));
1700 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1702 else
1703 gcc_unreachable ();
1705 /* Find the CALL insn we just emitted, and attach the register usage
1706 information. */
1707 call_insn = last_call_insn ();
1708 add_function_usage_to (call_insn, call_fusage);
1710 /* Restore the stack. */
1711 if (targetm.have_save_stack_nonlocal ())
1712 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1713 else
1714 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1715 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1717 OK_DEFER_POP;
1719 /* Return the address of the result block. */
1720 result = copy_addr_to_reg (XEXP (result, 0));
1721 return convert_memory_address (ptr_mode, result);
1724 /* Perform an untyped return. */
1726 static void
1727 expand_builtin_return (rtx result)
1729 int size, align, regno;
1730 machine_mode mode;
1731 rtx reg;
1732 rtx_insn *call_fusage = 0;
1734 result = convert_memory_address (Pmode, result);
1736 apply_result_size ();
1737 result = gen_rtx_MEM (BLKmode, result);
1739 if (targetm.have_untyped_return ())
1741 rtx vector = result_vector (0, result);
1742 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1743 emit_barrier ();
1744 return;
1747 /* Restore the return value and note that each value is used. */
1748 size = 0;
1749 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1750 if ((mode = apply_result_mode[regno]) != VOIDmode)
1752 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1753 if (size % align != 0)
1754 size = CEIL (size, align) * align;
1755 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1756 emit_move_insn (reg, adjust_address (result, mode, size));
1758 push_to_sequence (call_fusage);
1759 emit_use (reg);
1760 call_fusage = get_insns ();
1761 end_sequence ();
1762 size += GET_MODE_SIZE (mode);
1765 /* Put the USE insns before the return. */
1766 emit_insn (call_fusage);
1768 /* Return whatever values was restored by jumping directly to the end
1769 of the function. */
1770 expand_naked_return ();
1773 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1775 static enum type_class
1776 type_to_class (tree type)
1778 switch (TREE_CODE (type))
1780 case VOID_TYPE: return void_type_class;
1781 case INTEGER_TYPE: return integer_type_class;
1782 case ENUMERAL_TYPE: return enumeral_type_class;
1783 case BOOLEAN_TYPE: return boolean_type_class;
1784 case POINTER_TYPE: return pointer_type_class;
1785 case REFERENCE_TYPE: return reference_type_class;
1786 case OFFSET_TYPE: return offset_type_class;
1787 case REAL_TYPE: return real_type_class;
1788 case COMPLEX_TYPE: return complex_type_class;
1789 case FUNCTION_TYPE: return function_type_class;
1790 case METHOD_TYPE: return method_type_class;
1791 case RECORD_TYPE: return record_type_class;
1792 case UNION_TYPE:
1793 case QUAL_UNION_TYPE: return union_type_class;
1794 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1795 ? string_type_class : array_type_class);
1796 case LANG_TYPE: return lang_type_class;
1797 default: return no_type_class;
1801 /* Expand a call EXP to __builtin_classify_type. */
1803 static rtx
1804 expand_builtin_classify_type (tree exp)
1806 if (call_expr_nargs (exp))
1807 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1808 return GEN_INT (no_type_class);
1811 /* This helper macro, meant to be used in mathfn_built_in below,
1812 determines which among a set of three builtin math functions is
1813 appropriate for a given type mode. The `F' and `L' cases are
1814 automatically generated from the `double' case. */
1815 #define CASE_MATHFN(MATHFN) \
1816 CASE_CFN_##MATHFN: \
1817 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1818 fcodel = BUILT_IN_##MATHFN##L ; break;
1819 /* Similar to above, but appends _R after any F/L suffix. */
1820 #define CASE_MATHFN_REENT(MATHFN) \
1821 case CFN_BUILT_IN_##MATHFN##_R: \
1822 case CFN_BUILT_IN_##MATHFN##F_R: \
1823 case CFN_BUILT_IN_##MATHFN##L_R: \
1824 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1825 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1827 /* Return a function equivalent to FN but operating on floating-point
1828 values of type TYPE, or END_BUILTINS if no such function exists.
1829 This is purely an operation on function codes; it does not guarantee
1830 that the target actually has an implementation of the function. */
1832 static built_in_function
1833 mathfn_built_in_2 (tree type, combined_fn fn)
1835 built_in_function fcode, fcodef, fcodel;
1837 switch (fn)
1839 CASE_MATHFN (ACOS)
1840 CASE_MATHFN (ACOSH)
1841 CASE_MATHFN (ASIN)
1842 CASE_MATHFN (ASINH)
1843 CASE_MATHFN (ATAN)
1844 CASE_MATHFN (ATAN2)
1845 CASE_MATHFN (ATANH)
1846 CASE_MATHFN (CBRT)
1847 CASE_MATHFN (CEIL)
1848 CASE_MATHFN (CEXPI)
1849 CASE_MATHFN (COPYSIGN)
1850 CASE_MATHFN (COS)
1851 CASE_MATHFN (COSH)
1852 CASE_MATHFN (DREM)
1853 CASE_MATHFN (ERF)
1854 CASE_MATHFN (ERFC)
1855 CASE_MATHFN (EXP)
1856 CASE_MATHFN (EXP10)
1857 CASE_MATHFN (EXP2)
1858 CASE_MATHFN (EXPM1)
1859 CASE_MATHFN (FABS)
1860 CASE_MATHFN (FDIM)
1861 CASE_MATHFN (FLOOR)
1862 CASE_MATHFN (FMA)
1863 CASE_MATHFN (FMAX)
1864 CASE_MATHFN (FMIN)
1865 CASE_MATHFN (FMOD)
1866 CASE_MATHFN (FREXP)
1867 CASE_MATHFN (GAMMA)
1868 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1869 CASE_MATHFN (HUGE_VAL)
1870 CASE_MATHFN (HYPOT)
1871 CASE_MATHFN (ILOGB)
1872 CASE_MATHFN (ICEIL)
1873 CASE_MATHFN (IFLOOR)
1874 CASE_MATHFN (INF)
1875 CASE_MATHFN (IRINT)
1876 CASE_MATHFN (IROUND)
1877 CASE_MATHFN (ISINF)
1878 CASE_MATHFN (J0)
1879 CASE_MATHFN (J1)
1880 CASE_MATHFN (JN)
1881 CASE_MATHFN (LCEIL)
1882 CASE_MATHFN (LDEXP)
1883 CASE_MATHFN (LFLOOR)
1884 CASE_MATHFN (LGAMMA)
1885 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1886 CASE_MATHFN (LLCEIL)
1887 CASE_MATHFN (LLFLOOR)
1888 CASE_MATHFN (LLRINT)
1889 CASE_MATHFN (LLROUND)
1890 CASE_MATHFN (LOG)
1891 CASE_MATHFN (LOG10)
1892 CASE_MATHFN (LOG1P)
1893 CASE_MATHFN (LOG2)
1894 CASE_MATHFN (LOGB)
1895 CASE_MATHFN (LRINT)
1896 CASE_MATHFN (LROUND)
1897 CASE_MATHFN (MODF)
1898 CASE_MATHFN (NAN)
1899 CASE_MATHFN (NANS)
1900 CASE_MATHFN (NEARBYINT)
1901 CASE_MATHFN (NEXTAFTER)
1902 CASE_MATHFN (NEXTTOWARD)
1903 CASE_MATHFN (POW)
1904 CASE_MATHFN (POWI)
1905 CASE_MATHFN (POW10)
1906 CASE_MATHFN (REMAINDER)
1907 CASE_MATHFN (REMQUO)
1908 CASE_MATHFN (RINT)
1909 CASE_MATHFN (ROUND)
1910 CASE_MATHFN (SCALB)
1911 CASE_MATHFN (SCALBLN)
1912 CASE_MATHFN (SCALBN)
1913 CASE_MATHFN (SIGNBIT)
1914 CASE_MATHFN (SIGNIFICAND)
1915 CASE_MATHFN (SIN)
1916 CASE_MATHFN (SINCOS)
1917 CASE_MATHFN (SINH)
1918 CASE_MATHFN (SQRT)
1919 CASE_MATHFN (TAN)
1920 CASE_MATHFN (TANH)
1921 CASE_MATHFN (TGAMMA)
1922 CASE_MATHFN (TRUNC)
1923 CASE_MATHFN (Y0)
1924 CASE_MATHFN (Y1)
1925 CASE_MATHFN (YN)
1927 default:
1928 return END_BUILTINS;
1931 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1932 return fcode;
1933 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1934 return fcodef;
1935 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1936 return fcodel;
1937 else
1938 return END_BUILTINS;
1941 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1942 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1943 otherwise use the explicit declaration. If we can't do the conversion,
1944 return null. */
1946 static tree
1947 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
1949 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1950 if (fcode2 == END_BUILTINS)
1951 return NULL_TREE;
1953 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1954 return NULL_TREE;
1956 return builtin_decl_explicit (fcode2);
1959 /* Like mathfn_built_in_1, but always use the implicit array. */
1961 tree
1962 mathfn_built_in (tree type, combined_fn fn)
1964 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1967 /* Like mathfn_built_in_1, but take a built_in_function and
1968 always use the implicit array. */
1970 tree
1971 mathfn_built_in (tree type, enum built_in_function fn)
1973 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
1976 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
1977 return its code, otherwise return IFN_LAST. Note that this function
1978 only tests whether the function is defined in internals.def, not whether
1979 it is actually available on the target. */
1981 internal_fn
1982 associated_internal_fn (tree fndecl)
1984 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
1985 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
1986 switch (DECL_FUNCTION_CODE (fndecl))
1988 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
1989 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1990 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
1991 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1992 #include "internal-fn.def"
1994 CASE_FLT_FN (BUILT_IN_POW10):
1995 return IFN_EXP10;
1997 CASE_FLT_FN (BUILT_IN_DREM):
1998 return IFN_REMAINDER;
2000 CASE_FLT_FN (BUILT_IN_SCALBN):
2001 CASE_FLT_FN (BUILT_IN_SCALBLN):
2002 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2003 return IFN_LDEXP;
2004 return IFN_LAST;
2006 default:
2007 return IFN_LAST;
2011 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2012 on the current target by a call to an internal function, return the
2013 code of that internal function, otherwise return IFN_LAST. The caller
2014 is responsible for ensuring that any side-effects of the built-in
2015 call are dealt with correctly. E.g. if CALL sets errno, the caller
2016 must decide that the errno result isn't needed or make it available
2017 in some other way. */
2019 internal_fn
2020 replacement_internal_fn (gcall *call)
2022 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2024 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2025 if (ifn != IFN_LAST)
2027 tree_pair types = direct_internal_fn_types (ifn, call);
2028 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2029 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2030 return ifn;
2033 return IFN_LAST;
2036 /* Expand a call to the builtin trinary math functions (fma).
2037 Return NULL_RTX if a normal call should be emitted rather than expanding the
2038 function in-line. EXP is the expression that is a call to the builtin
2039 function; if convenient, the result should be placed in TARGET.
2040 SUBTARGET may be used as the target for computing one of EXP's
2041 operands. */
2043 static rtx
2044 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2046 optab builtin_optab;
2047 rtx op0, op1, op2, result;
2048 rtx_insn *insns;
2049 tree fndecl = get_callee_fndecl (exp);
2050 tree arg0, arg1, arg2;
2051 machine_mode mode;
2053 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2054 return NULL_RTX;
2056 arg0 = CALL_EXPR_ARG (exp, 0);
2057 arg1 = CALL_EXPR_ARG (exp, 1);
2058 arg2 = CALL_EXPR_ARG (exp, 2);
2060 switch (DECL_FUNCTION_CODE (fndecl))
2062 CASE_FLT_FN (BUILT_IN_FMA):
2063 builtin_optab = fma_optab; break;
2064 default:
2065 gcc_unreachable ();
2068 /* Make a suitable register to place result in. */
2069 mode = TYPE_MODE (TREE_TYPE (exp));
2071 /* Before working hard, check whether the instruction is available. */
2072 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2073 return NULL_RTX;
2075 result = gen_reg_rtx (mode);
2077 /* Always stabilize the argument list. */
2078 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2079 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2080 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2082 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2083 op1 = expand_normal (arg1);
2084 op2 = expand_normal (arg2);
2086 start_sequence ();
2088 /* Compute into RESULT.
2089 Set RESULT to wherever the result comes back. */
2090 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2091 result, 0);
2093 /* If we were unable to expand via the builtin, stop the sequence
2094 (without outputting the insns) and call to the library function
2095 with the stabilized argument list. */
2096 if (result == 0)
2098 end_sequence ();
2099 return expand_call (exp, target, target == const0_rtx);
2102 /* Output the entire sequence. */
2103 insns = get_insns ();
2104 end_sequence ();
2105 emit_insn (insns);
2107 return result;
2110 /* Expand a call to the builtin sin and cos math functions.
2111 Return NULL_RTX if a normal call should be emitted rather than expanding the
2112 function in-line. EXP is the expression that is a call to the builtin
2113 function; if convenient, the result should be placed in TARGET.
2114 SUBTARGET may be used as the target for computing one of EXP's
2115 operands. */
2117 static rtx
2118 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2120 optab builtin_optab;
2121 rtx op0;
2122 rtx_insn *insns;
2123 tree fndecl = get_callee_fndecl (exp);
2124 machine_mode mode;
2125 tree arg;
2127 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2128 return NULL_RTX;
2130 arg = CALL_EXPR_ARG (exp, 0);
2132 switch (DECL_FUNCTION_CODE (fndecl))
2134 CASE_FLT_FN (BUILT_IN_SIN):
2135 CASE_FLT_FN (BUILT_IN_COS):
2136 builtin_optab = sincos_optab; break;
2137 default:
2138 gcc_unreachable ();
2141 /* Make a suitable register to place result in. */
2142 mode = TYPE_MODE (TREE_TYPE (exp));
2144 /* Check if sincos insn is available, otherwise fallback
2145 to sin or cos insn. */
2146 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2147 switch (DECL_FUNCTION_CODE (fndecl))
2149 CASE_FLT_FN (BUILT_IN_SIN):
2150 builtin_optab = sin_optab; break;
2151 CASE_FLT_FN (BUILT_IN_COS):
2152 builtin_optab = cos_optab; break;
2153 default:
2154 gcc_unreachable ();
2157 /* Before working hard, check whether the instruction is available. */
2158 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2160 rtx result = gen_reg_rtx (mode);
2162 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2163 need to expand the argument again. This way, we will not perform
2164 side-effects more the once. */
2165 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2167 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2169 start_sequence ();
2171 /* Compute into RESULT.
2172 Set RESULT to wherever the result comes back. */
2173 if (builtin_optab == sincos_optab)
2175 int ok;
2177 switch (DECL_FUNCTION_CODE (fndecl))
2179 CASE_FLT_FN (BUILT_IN_SIN):
2180 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2181 break;
2182 CASE_FLT_FN (BUILT_IN_COS):
2183 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2184 break;
2185 default:
2186 gcc_unreachable ();
2188 gcc_assert (ok);
2190 else
2191 result = expand_unop (mode, builtin_optab, op0, result, 0);
2193 if (result != 0)
2195 /* Output the entire sequence. */
2196 insns = get_insns ();
2197 end_sequence ();
2198 emit_insn (insns);
2199 return result;
2202 /* If we were unable to expand via the builtin, stop the sequence
2203 (without outputting the insns) and call to the library function
2204 with the stabilized argument list. */
2205 end_sequence ();
2208 return expand_call (exp, target, target == const0_rtx);
2211 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2212 return an RTL instruction code that implements the functionality.
2213 If that isn't possible or available return CODE_FOR_nothing. */
2215 static enum insn_code
2216 interclass_mathfn_icode (tree arg, tree fndecl)
2218 bool errno_set = false;
2219 optab builtin_optab = unknown_optab;
2220 machine_mode mode;
2222 switch (DECL_FUNCTION_CODE (fndecl))
2224 CASE_FLT_FN (BUILT_IN_ILOGB):
2225 errno_set = true; builtin_optab = ilogb_optab; break;
2226 CASE_FLT_FN (BUILT_IN_ISINF):
2227 builtin_optab = isinf_optab; break;
2228 case BUILT_IN_ISNORMAL:
2229 case BUILT_IN_ISFINITE:
2230 CASE_FLT_FN (BUILT_IN_FINITE):
2231 case BUILT_IN_FINITED32:
2232 case BUILT_IN_FINITED64:
2233 case BUILT_IN_FINITED128:
2234 case BUILT_IN_ISINFD32:
2235 case BUILT_IN_ISINFD64:
2236 case BUILT_IN_ISINFD128:
2237 /* These builtins have no optabs (yet). */
2238 break;
2239 default:
2240 gcc_unreachable ();
2243 /* There's no easy way to detect the case we need to set EDOM. */
2244 if (flag_errno_math && errno_set)
2245 return CODE_FOR_nothing;
2247 /* Optab mode depends on the mode of the input argument. */
2248 mode = TYPE_MODE (TREE_TYPE (arg));
2250 if (builtin_optab)
2251 return optab_handler (builtin_optab, mode);
2252 return CODE_FOR_nothing;
2255 /* Expand a call to one of the builtin math functions that operate on
2256 floating point argument and output an integer result (ilogb, isinf,
2257 isnan, etc).
2258 Return 0 if a normal call should be emitted rather than expanding the
2259 function in-line. EXP is the expression that is a call to the builtin
2260 function; if convenient, the result should be placed in TARGET. */
2262 static rtx
2263 expand_builtin_interclass_mathfn (tree exp, rtx target)
2265 enum insn_code icode = CODE_FOR_nothing;
2266 rtx op0;
2267 tree fndecl = get_callee_fndecl (exp);
2268 machine_mode mode;
2269 tree arg;
2271 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2272 return NULL_RTX;
2274 arg = CALL_EXPR_ARG (exp, 0);
2275 icode = interclass_mathfn_icode (arg, fndecl);
2276 mode = TYPE_MODE (TREE_TYPE (arg));
2278 if (icode != CODE_FOR_nothing)
2280 struct expand_operand ops[1];
2281 rtx_insn *last = get_last_insn ();
2282 tree orig_arg = arg;
2284 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2285 need to expand the argument again. This way, we will not perform
2286 side-effects more the once. */
2287 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2289 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2291 if (mode != GET_MODE (op0))
2292 op0 = convert_to_mode (mode, op0, 0);
2294 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2295 if (maybe_legitimize_operands (icode, 0, 1, ops)
2296 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2297 return ops[0].value;
2299 delete_insns_since (last);
2300 CALL_EXPR_ARG (exp, 0) = orig_arg;
2303 return NULL_RTX;
2306 /* Expand a call to the builtin sincos math function.
2307 Return NULL_RTX if a normal call should be emitted rather than expanding the
2308 function in-line. EXP is the expression that is a call to the builtin
2309 function. */
2311 static rtx
2312 expand_builtin_sincos (tree exp)
2314 rtx op0, op1, op2, target1, target2;
2315 machine_mode mode;
2316 tree arg, sinp, cosp;
2317 int result;
2318 location_t loc = EXPR_LOCATION (exp);
2319 tree alias_type, alias_off;
2321 if (!validate_arglist (exp, REAL_TYPE,
2322 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2323 return NULL_RTX;
2325 arg = CALL_EXPR_ARG (exp, 0);
2326 sinp = CALL_EXPR_ARG (exp, 1);
2327 cosp = CALL_EXPR_ARG (exp, 2);
2329 /* Make a suitable register to place result in. */
2330 mode = TYPE_MODE (TREE_TYPE (arg));
2332 /* Check if sincos insn is available, otherwise emit the call. */
2333 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2334 return NULL_RTX;
2336 target1 = gen_reg_rtx (mode);
2337 target2 = gen_reg_rtx (mode);
2339 op0 = expand_normal (arg);
2340 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2341 alias_off = build_int_cst (alias_type, 0);
2342 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2343 sinp, alias_off));
2344 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2345 cosp, alias_off));
2347 /* Compute into target1 and target2.
2348 Set TARGET to wherever the result comes back. */
2349 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2350 gcc_assert (result);
2352 /* Move target1 and target2 to the memory locations indicated
2353 by op1 and op2. */
2354 emit_move_insn (op1, target1);
2355 emit_move_insn (op2, target2);
2357 return const0_rtx;
2360 /* Expand a call to the internal cexpi builtin to the sincos math function.
2361 EXP is the expression that is a call to the builtin function; if convenient,
2362 the result should be placed in TARGET. */
2364 static rtx
2365 expand_builtin_cexpi (tree exp, rtx target)
2367 tree fndecl = get_callee_fndecl (exp);
2368 tree arg, type;
2369 machine_mode mode;
2370 rtx op0, op1, op2;
2371 location_t loc = EXPR_LOCATION (exp);
2373 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2374 return NULL_RTX;
2376 arg = CALL_EXPR_ARG (exp, 0);
2377 type = TREE_TYPE (arg);
2378 mode = TYPE_MODE (TREE_TYPE (arg));
2380 /* Try expanding via a sincos optab, fall back to emitting a libcall
2381 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2382 is only generated from sincos, cexp or if we have either of them. */
2383 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2385 op1 = gen_reg_rtx (mode);
2386 op2 = gen_reg_rtx (mode);
2388 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2390 /* Compute into op1 and op2. */
2391 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2393 else if (targetm.libc_has_function (function_sincos))
2395 tree call, fn = NULL_TREE;
2396 tree top1, top2;
2397 rtx op1a, op2a;
2399 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2400 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2401 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2402 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2403 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2404 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2405 else
2406 gcc_unreachable ();
2408 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2409 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2410 op1a = copy_addr_to_reg (XEXP (op1, 0));
2411 op2a = copy_addr_to_reg (XEXP (op2, 0));
2412 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2413 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2415 /* Make sure not to fold the sincos call again. */
2416 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2417 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2418 call, 3, arg, top1, top2));
2420 else
2422 tree call, fn = NULL_TREE, narg;
2423 tree ctype = build_complex_type (type);
2425 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2426 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2427 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2428 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2429 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2430 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2431 else
2432 gcc_unreachable ();
2434 /* If we don't have a decl for cexp create one. This is the
2435 friendliest fallback if the user calls __builtin_cexpi
2436 without full target C99 function support. */
2437 if (fn == NULL_TREE)
2439 tree fntype;
2440 const char *name = NULL;
2442 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2443 name = "cexpf";
2444 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2445 name = "cexp";
2446 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2447 name = "cexpl";
2449 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2450 fn = build_fn_decl (name, fntype);
2453 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2454 build_real (type, dconst0), arg);
2456 /* Make sure not to fold the cexp call again. */
2457 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2458 return expand_expr (build_call_nary (ctype, call, 1, narg),
2459 target, VOIDmode, EXPAND_NORMAL);
2462 /* Now build the proper return type. */
2463 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2464 make_tree (TREE_TYPE (arg), op2),
2465 make_tree (TREE_TYPE (arg), op1)),
2466 target, VOIDmode, EXPAND_NORMAL);
2469 /* Conveniently construct a function call expression. FNDECL names the
2470 function to be called, N is the number of arguments, and the "..."
2471 parameters are the argument expressions. Unlike build_call_exr
2472 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2474 static tree
2475 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2477 va_list ap;
2478 tree fntype = TREE_TYPE (fndecl);
2479 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2481 va_start (ap, n);
2482 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2483 va_end (ap);
2484 SET_EXPR_LOCATION (fn, loc);
2485 return fn;
2488 /* Expand a call to one of the builtin rounding functions gcc defines
2489 as an extension (lfloor and lceil). As these are gcc extensions we
2490 do not need to worry about setting errno to EDOM.
2491 If expanding via optab fails, lower expression to (int)(floor(x)).
2492 EXP is the expression that is a call to the builtin function;
2493 if convenient, the result should be placed in TARGET. */
2495 static rtx
2496 expand_builtin_int_roundingfn (tree exp, rtx target)
2498 convert_optab builtin_optab;
2499 rtx op0, tmp;
2500 rtx_insn *insns;
2501 tree fndecl = get_callee_fndecl (exp);
2502 enum built_in_function fallback_fn;
2503 tree fallback_fndecl;
2504 machine_mode mode;
2505 tree arg;
2507 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2508 gcc_unreachable ();
2510 arg = CALL_EXPR_ARG (exp, 0);
2512 switch (DECL_FUNCTION_CODE (fndecl))
2514 CASE_FLT_FN (BUILT_IN_ICEIL):
2515 CASE_FLT_FN (BUILT_IN_LCEIL):
2516 CASE_FLT_FN (BUILT_IN_LLCEIL):
2517 builtin_optab = lceil_optab;
2518 fallback_fn = BUILT_IN_CEIL;
2519 break;
2521 CASE_FLT_FN (BUILT_IN_IFLOOR):
2522 CASE_FLT_FN (BUILT_IN_LFLOOR):
2523 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2524 builtin_optab = lfloor_optab;
2525 fallback_fn = BUILT_IN_FLOOR;
2526 break;
2528 default:
2529 gcc_unreachable ();
2532 /* Make a suitable register to place result in. */
2533 mode = TYPE_MODE (TREE_TYPE (exp));
2535 target = gen_reg_rtx (mode);
2537 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2538 need to expand the argument again. This way, we will not perform
2539 side-effects more the once. */
2540 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2542 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2544 start_sequence ();
2546 /* Compute into TARGET. */
2547 if (expand_sfix_optab (target, op0, builtin_optab))
2549 /* Output the entire sequence. */
2550 insns = get_insns ();
2551 end_sequence ();
2552 emit_insn (insns);
2553 return target;
2556 /* If we were unable to expand via the builtin, stop the sequence
2557 (without outputting the insns). */
2558 end_sequence ();
2560 /* Fall back to floating point rounding optab. */
2561 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2563 /* For non-C99 targets we may end up without a fallback fndecl here
2564 if the user called __builtin_lfloor directly. In this case emit
2565 a call to the floor/ceil variants nevertheless. This should result
2566 in the best user experience for not full C99 targets. */
2567 if (fallback_fndecl == NULL_TREE)
2569 tree fntype;
2570 const char *name = NULL;
2572 switch (DECL_FUNCTION_CODE (fndecl))
2574 case BUILT_IN_ICEIL:
2575 case BUILT_IN_LCEIL:
2576 case BUILT_IN_LLCEIL:
2577 name = "ceil";
2578 break;
2579 case BUILT_IN_ICEILF:
2580 case BUILT_IN_LCEILF:
2581 case BUILT_IN_LLCEILF:
2582 name = "ceilf";
2583 break;
2584 case BUILT_IN_ICEILL:
2585 case BUILT_IN_LCEILL:
2586 case BUILT_IN_LLCEILL:
2587 name = "ceill";
2588 break;
2589 case BUILT_IN_IFLOOR:
2590 case BUILT_IN_LFLOOR:
2591 case BUILT_IN_LLFLOOR:
2592 name = "floor";
2593 break;
2594 case BUILT_IN_IFLOORF:
2595 case BUILT_IN_LFLOORF:
2596 case BUILT_IN_LLFLOORF:
2597 name = "floorf";
2598 break;
2599 case BUILT_IN_IFLOORL:
2600 case BUILT_IN_LFLOORL:
2601 case BUILT_IN_LLFLOORL:
2602 name = "floorl";
2603 break;
2604 default:
2605 gcc_unreachable ();
2608 fntype = build_function_type_list (TREE_TYPE (arg),
2609 TREE_TYPE (arg), NULL_TREE);
2610 fallback_fndecl = build_fn_decl (name, fntype);
2613 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2615 tmp = expand_normal (exp);
2616 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2618 /* Truncate the result of floating point optab to integer
2619 via expand_fix (). */
2620 target = gen_reg_rtx (mode);
2621 expand_fix (target, tmp, 0);
2623 return target;
2626 /* Expand a call to one of the builtin math functions doing integer
2627 conversion (lrint).
2628 Return 0 if a normal call should be emitted rather than expanding the
2629 function in-line. EXP is the expression that is a call to the builtin
2630 function; if convenient, the result should be placed in TARGET. */
2632 static rtx
2633 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2635 convert_optab builtin_optab;
2636 rtx op0;
2637 rtx_insn *insns;
2638 tree fndecl = get_callee_fndecl (exp);
2639 tree arg;
2640 machine_mode mode;
2641 enum built_in_function fallback_fn = BUILT_IN_NONE;
2643 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2644 gcc_unreachable ();
2646 arg = CALL_EXPR_ARG (exp, 0);
2648 switch (DECL_FUNCTION_CODE (fndecl))
2650 CASE_FLT_FN (BUILT_IN_IRINT):
2651 fallback_fn = BUILT_IN_LRINT;
2652 gcc_fallthrough ();
2653 CASE_FLT_FN (BUILT_IN_LRINT):
2654 CASE_FLT_FN (BUILT_IN_LLRINT):
2655 builtin_optab = lrint_optab;
2656 break;
2658 CASE_FLT_FN (BUILT_IN_IROUND):
2659 fallback_fn = BUILT_IN_LROUND;
2660 gcc_fallthrough ();
2661 CASE_FLT_FN (BUILT_IN_LROUND):
2662 CASE_FLT_FN (BUILT_IN_LLROUND):
2663 builtin_optab = lround_optab;
2664 break;
2666 default:
2667 gcc_unreachable ();
2670 /* There's no easy way to detect the case we need to set EDOM. */
2671 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2672 return NULL_RTX;
2674 /* Make a suitable register to place result in. */
2675 mode = TYPE_MODE (TREE_TYPE (exp));
2677 /* There's no easy way to detect the case we need to set EDOM. */
2678 if (!flag_errno_math)
2680 rtx result = gen_reg_rtx (mode);
2682 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2683 need to expand the argument again. This way, we will not perform
2684 side-effects more the once. */
2685 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2687 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2689 start_sequence ();
2691 if (expand_sfix_optab (result, op0, builtin_optab))
2693 /* Output the entire sequence. */
2694 insns = get_insns ();
2695 end_sequence ();
2696 emit_insn (insns);
2697 return result;
2700 /* If we were unable to expand via the builtin, stop the sequence
2701 (without outputting the insns) and call to the library function
2702 with the stabilized argument list. */
2703 end_sequence ();
2706 if (fallback_fn != BUILT_IN_NONE)
2708 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2709 targets, (int) round (x) should never be transformed into
2710 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2711 a call to lround in the hope that the target provides at least some
2712 C99 functions. This should result in the best user experience for
2713 not full C99 targets. */
2714 tree fallback_fndecl = mathfn_built_in_1
2715 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2717 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2718 fallback_fndecl, 1, arg);
2720 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2721 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2722 return convert_to_mode (mode, target, 0);
2725 return expand_call (exp, target, target == const0_rtx);
2728 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2729 a normal call should be emitted rather than expanding the function
2730 in-line. EXP is the expression that is a call to the builtin
2731 function; if convenient, the result should be placed in TARGET. */
2733 static rtx
2734 expand_builtin_powi (tree exp, rtx target)
2736 tree arg0, arg1;
2737 rtx op0, op1;
2738 machine_mode mode;
2739 machine_mode mode2;
2741 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2742 return NULL_RTX;
2744 arg0 = CALL_EXPR_ARG (exp, 0);
2745 arg1 = CALL_EXPR_ARG (exp, 1);
2746 mode = TYPE_MODE (TREE_TYPE (exp));
2748 /* Emit a libcall to libgcc. */
2750 /* Mode of the 2nd argument must match that of an int. */
2751 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2753 if (target == NULL_RTX)
2754 target = gen_reg_rtx (mode);
2756 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2757 if (GET_MODE (op0) != mode)
2758 op0 = convert_to_mode (mode, op0, 0);
2759 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2760 if (GET_MODE (op1) != mode2)
2761 op1 = convert_to_mode (mode2, op1, 0);
2763 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2764 target, LCT_CONST, mode, 2,
2765 op0, mode, op1, mode2);
2767 return target;
2770 /* Expand expression EXP which is a call to the strlen builtin. Return
2771 NULL_RTX if we failed the caller should emit a normal call, otherwise
2772 try to get the result in TARGET, if convenient. */
2774 static rtx
2775 expand_builtin_strlen (tree exp, rtx target,
2776 machine_mode target_mode)
2778 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2779 return NULL_RTX;
2780 else
2782 struct expand_operand ops[4];
2783 rtx pat;
2784 tree len;
2785 tree src = CALL_EXPR_ARG (exp, 0);
2786 rtx src_reg;
2787 rtx_insn *before_strlen;
2788 machine_mode insn_mode = target_mode;
2789 enum insn_code icode = CODE_FOR_nothing;
2790 unsigned int align;
2792 /* If the length can be computed at compile-time, return it. */
2793 len = c_strlen (src, 0);
2794 if (len)
2795 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2797 /* If the length can be computed at compile-time and is constant
2798 integer, but there are side-effects in src, evaluate
2799 src for side-effects, then return len.
2800 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2801 can be optimized into: i++; x = 3; */
2802 len = c_strlen (src, 1);
2803 if (len && TREE_CODE (len) == INTEGER_CST)
2805 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2806 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2809 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2811 /* If SRC is not a pointer type, don't do this operation inline. */
2812 if (align == 0)
2813 return NULL_RTX;
2815 /* Bail out if we can't compute strlen in the right mode. */
2816 while (insn_mode != VOIDmode)
2818 icode = optab_handler (strlen_optab, insn_mode);
2819 if (icode != CODE_FOR_nothing)
2820 break;
2822 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2824 if (insn_mode == VOIDmode)
2825 return NULL_RTX;
2827 /* Make a place to hold the source address. We will not expand
2828 the actual source until we are sure that the expansion will
2829 not fail -- there are trees that cannot be expanded twice. */
2830 src_reg = gen_reg_rtx (Pmode);
2832 /* Mark the beginning of the strlen sequence so we can emit the
2833 source operand later. */
2834 before_strlen = get_last_insn ();
2836 create_output_operand (&ops[0], target, insn_mode);
2837 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2838 create_integer_operand (&ops[2], 0);
2839 create_integer_operand (&ops[3], align);
2840 if (!maybe_expand_insn (icode, 4, ops))
2841 return NULL_RTX;
2843 /* Now that we are assured of success, expand the source. */
2844 start_sequence ();
2845 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2846 if (pat != src_reg)
2848 #ifdef POINTERS_EXTEND_UNSIGNED
2849 if (GET_MODE (pat) != Pmode)
2850 pat = convert_to_mode (Pmode, pat,
2851 POINTERS_EXTEND_UNSIGNED);
2852 #endif
2853 emit_move_insn (src_reg, pat);
2855 pat = get_insns ();
2856 end_sequence ();
2858 if (before_strlen)
2859 emit_insn_after (pat, before_strlen);
2860 else
2861 emit_insn_before (pat, get_insns ());
2863 /* Return the value in the proper mode for this function. */
2864 if (GET_MODE (ops[0].value) == target_mode)
2865 target = ops[0].value;
2866 else if (target != 0)
2867 convert_move (target, ops[0].value, 0);
2868 else
2869 target = convert_to_mode (target_mode, ops[0].value, 0);
2871 return target;
2875 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2876 bytes from constant string DATA + OFFSET and return it as target
2877 constant. */
2879 static rtx
2880 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2881 machine_mode mode)
2883 const char *str = (const char *) data;
2885 gcc_assert (offset >= 0
2886 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2887 <= strlen (str) + 1));
2889 return c_readstr (str + offset, mode);
2892 /* LEN specify length of the block of memcpy/memset operation.
2893 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2894 In some cases we can make very likely guess on max size, then we
2895 set it into PROBABLE_MAX_SIZE. */
2897 static void
2898 determine_block_size (tree len, rtx len_rtx,
2899 unsigned HOST_WIDE_INT *min_size,
2900 unsigned HOST_WIDE_INT *max_size,
2901 unsigned HOST_WIDE_INT *probable_max_size)
2903 if (CONST_INT_P (len_rtx))
2905 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
2906 return;
2908 else
2910 wide_int min, max;
2911 enum value_range_type range_type = VR_UNDEFINED;
2913 /* Determine bounds from the type. */
2914 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2915 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2916 else
2917 *min_size = 0;
2918 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2919 *probable_max_size = *max_size
2920 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
2921 else
2922 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2924 if (TREE_CODE (len) == SSA_NAME)
2925 range_type = get_range_info (len, &min, &max);
2926 if (range_type == VR_RANGE)
2928 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
2929 *min_size = min.to_uhwi ();
2930 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
2931 *probable_max_size = *max_size = max.to_uhwi ();
2933 else if (range_type == VR_ANTI_RANGE)
2935 /* Anti range 0...N lets us to determine minimal size to N+1. */
2936 if (min == 0)
2938 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2939 *min_size = max.to_uhwi () + 1;
2941 /* Code like
2943 int n;
2944 if (n < 100)
2945 memcpy (a, b, n)
2947 Produce anti range allowing negative values of N. We still
2948 can use the information and make a guess that N is not negative.
2950 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
2951 *probable_max_size = min.to_uhwi () - 1;
2954 gcc_checking_assert (*max_size <=
2955 (unsigned HOST_WIDE_INT)
2956 GET_MODE_MASK (GET_MODE (len_rtx)));
2959 /* Helper function to do the actual work for expand_builtin_memcpy. */
2961 static rtx
2962 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
2964 const char *src_str;
2965 unsigned int src_align = get_pointer_alignment (src);
2966 unsigned int dest_align = get_pointer_alignment (dest);
2967 rtx dest_mem, src_mem, dest_addr, len_rtx;
2968 HOST_WIDE_INT expected_size = -1;
2969 unsigned int expected_align = 0;
2970 unsigned HOST_WIDE_INT min_size;
2971 unsigned HOST_WIDE_INT max_size;
2972 unsigned HOST_WIDE_INT probable_max_size;
2974 /* If DEST is not a pointer type, call the normal function. */
2975 if (dest_align == 0)
2976 return NULL_RTX;
2978 /* If either SRC is not a pointer type, don't do this
2979 operation in-line. */
2980 if (src_align == 0)
2981 return NULL_RTX;
2983 if (currently_expanding_gimple_stmt)
2984 stringop_block_profile (currently_expanding_gimple_stmt,
2985 &expected_align, &expected_size);
2987 if (expected_align < dest_align)
2988 expected_align = dest_align;
2989 dest_mem = get_memory_rtx (dest, len);
2990 set_mem_align (dest_mem, dest_align);
2991 len_rtx = expand_normal (len);
2992 determine_block_size (len, len_rtx, &min_size, &max_size,
2993 &probable_max_size);
2994 src_str = c_getstr (src);
2996 /* If SRC is a string constant and block move would be done
2997 by pieces, we can avoid loading the string from memory
2998 and only stored the computed constants. */
2999 if (src_str
3000 && CONST_INT_P (len_rtx)
3001 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3002 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3003 CONST_CAST (char *, src_str),
3004 dest_align, false))
3006 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3007 builtin_memcpy_read_str,
3008 CONST_CAST (char *, src_str),
3009 dest_align, false, 0);
3010 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3011 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3012 return dest_mem;
3015 src_mem = get_memory_rtx (src, len);
3016 set_mem_align (src_mem, src_align);
3018 /* Copy word part most expediently. */
3019 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3020 CALL_EXPR_TAILCALL (exp)
3021 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3022 expected_align, expected_size,
3023 min_size, max_size, probable_max_size);
3025 if (dest_addr == 0)
3027 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3028 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3031 return dest_addr;
3034 /* Try to verify that the sizes and lengths of the arguments to a string
3035 manipulation function given by EXP are within valid bounds and that
3036 the operation does not lead to buffer overflow. Arguments other than
3037 EXP may be null. When non-null, the arguments have the following
3038 meaning:
3039 SIZE is the user-supplied size argument to the function (such as in
3040 memcpy(d, s, SIZE) or strncpy(d, s, SIZE). It specifies the exact
3041 number of bytes to write.
3042 MAXLEN is the user-supplied bound on the length of the source sequence
3043 (such as in strncat(d, s, N). It specifies the upper limit on the number
3044 of bytes to write.
3045 STR is the source string (such as in strcpy(d, s)) when the epxression
3046 EXP is a string function call (as opposed to a memory call like memcpy).
3047 As an exception, STR can also be an integer denoting the precomputed
3048 length of the source string.
3049 OBJSIZE is the size of the destination object specified by the last
3050 argument to the _chk builtins, typically resulting from the expansion
3051 of __builtin_object_size (such as in __builtin___strcpy_chk(d, s,
3052 OBJSIZE).
3054 When SIZE is null LEN is checked to verify that it doesn't exceed
3055 SIZE_MAX.
3057 If the call is successfully verified as safe from buffer overflow
3058 the function returns true, otherwise false.. */
3060 static bool
3061 check_sizes (int opt, tree exp, tree size, tree maxlen, tree str, tree objsize)
3063 /* The size of the largest object is half the address space, or
3064 SSIZE_MAX. (This is way too permissive.) */
3065 tree maxobjsize = TYPE_MAX_VALUE (ssizetype);
3067 tree slen = NULL_TREE;
3069 /* Set to true when the exact number of bytes written by a string
3070 function like strcpy is not known and the only thing that is
3071 known is that it must be at least one (for the terminating nul). */
3072 bool at_least_one = false;
3073 if (str)
3075 /* STR is normally a pointer to string but as a special case
3076 it can be an integer denoting the length of a string. */
3077 if (POINTER_TYPE_P (TREE_TYPE (str)))
3079 /* Try to determine the range of lengths the source string
3080 refers to. If it can be determined add one to it for
3081 the terminating nul. Otherwise, set it to one for
3082 the same reason. */
3083 tree lenrange[2];
3084 get_range_strlen (str, lenrange);
3085 if (lenrange[0])
3086 slen = fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3087 size_one_node);
3088 else
3090 at_least_one = true;
3091 slen = size_one_node;
3094 else
3095 slen = str;
3098 if (!size && !maxlen)
3100 /* When the only available piece of data is the object size
3101 there is nothing to do. */
3102 if (!slen)
3103 return true;
3105 /* Otherwise, when the length of the source sequence is known
3106 (as with with strlen), set SIZE to it. */
3107 size = slen;
3110 if (!objsize)
3111 objsize = maxobjsize;
3113 /* The SIZE is exact if it's non-null, constant, and in range of
3114 unsigned HOST_WIDE_INT. */
3115 bool exactsize = size && tree_fits_uhwi_p (size);
3117 tree range[2] = { NULL_TREE, NULL_TREE };
3118 if (size)
3119 get_size_range (size, range);
3121 /* First check the number of bytes to be written against the maximum
3122 object size. */
3123 if (range[0] && tree_int_cst_lt (maxobjsize, range[0]))
3125 location_t loc = tree_nonartificial_location (exp);
3127 if (range[0] == range[1])
3128 warning_at (loc, opt,
3129 "%K%qD: specified size %wu "
3130 "exceeds maximum object size %wu",
3131 exp, get_callee_fndecl (exp),
3132 tree_to_uhwi (range[0]),
3133 tree_to_uhwi (maxobjsize));
3134 else
3135 warning_at (loc, opt,
3136 "%K%qD: specified size between %wu and %wu "
3137 "exceeds maximum object size %wu",
3138 exp, get_callee_fndecl (exp),
3139 tree_to_uhwi (range[0]),
3140 tree_to_uhwi (range[1]),
3141 tree_to_uhwi (maxobjsize));
3142 return false;
3145 /* Next check the number of bytes to be written against the destination
3146 object size. */
3147 if (range[0] || !exactsize || integer_all_onesp (size))
3149 if (range[0]
3150 && ((tree_fits_uhwi_p (objsize)
3151 && tree_int_cst_lt (objsize, range[0]))
3152 || (tree_fits_uhwi_p (size)
3153 && tree_int_cst_lt (size, range[0]))))
3155 unsigned HOST_WIDE_INT uwir0 = tree_to_uhwi (range[0]);
3157 location_t loc = tree_nonartificial_location (exp);
3159 if (at_least_one)
3160 warning_at (loc, opt,
3161 "%K%qD: writing at least %wu byte into a region "
3162 "of size %wu overflows the destination",
3163 exp, get_callee_fndecl (exp), uwir0,
3164 tree_to_uhwi (objsize));
3165 else if (range[0] == range[1])
3166 warning_at (loc, opt,
3167 (uwir0 == 1
3168 ? G_("%K%qD: writing %wu byte into a region "
3169 "of size %wu overflows the destination")
3170 : G_("%K%qD writing %wu bytes into a region "
3171 "of size %wu overflows the destination")),
3172 exp, get_callee_fndecl (exp), uwir0,
3173 tree_to_uhwi (objsize));
3174 else
3175 warning_at (loc, opt,
3176 "%K%qD: writing between %wu and %wu bytes "
3177 "into a region of size %wu overflows "
3178 "the destination",
3179 exp, get_callee_fndecl (exp), uwir0,
3180 tree_to_uhwi (range[1]), tree_to_uhwi (objsize));
3182 /* Return error when an overflow has been detected. */
3183 return false;
3187 /* Check the maximum length of the source sequence against the size
3188 of the destination object if known, or against the maximum size
3189 of an object. */
3190 if (maxlen)
3192 get_size_range (maxlen, range);
3194 if (range[0] && objsize && tree_fits_uhwi_p (objsize))
3196 location_t loc = tree_nonartificial_location (exp);
3198 if (tree_int_cst_lt (maxobjsize, range[0]))
3200 /* Warn about crazy big sizes first since that's more
3201 likely to be meaningful than saying that the bound
3202 is greater than the object size if both are big. */
3203 if (range[0] == range[1])
3204 warning_at (loc, opt,
3205 "%K%qD: specified bound %wu "
3206 "exceeds maximum object size %wu",
3207 exp, get_callee_fndecl (exp),
3208 tree_to_uhwi (range[0]),
3209 tree_to_uhwi (maxobjsize));
3210 else
3211 warning_at (loc, opt,
3212 "%K%qD: specified bound between %wu and %wu "
3213 " exceeds maximum object size %wu",
3214 exp, get_callee_fndecl (exp),
3215 tree_to_uhwi (range[0]),
3216 tree_to_uhwi (range[1]),
3217 tree_to_uhwi (maxobjsize));
3219 return false;
3222 if (objsize != maxobjsize && tree_int_cst_lt (objsize, range[0]))
3224 if (range[0] == range[1])
3225 warning_at (loc, opt,
3226 "%K%qD: specified bound %wu "
3227 "exceeds the size %wu of the destination",
3228 exp, get_callee_fndecl (exp),
3229 tree_to_uhwi (range[0]),
3230 tree_to_uhwi (objsize));
3231 else
3232 warning_at (loc, opt,
3233 "%K%qD: specified bound between %wu and %wu "
3234 " exceeds the size %wu of the destination",
3235 exp, get_callee_fndecl (exp),
3236 tree_to_uhwi (range[0]),
3237 tree_to_uhwi (range[1]),
3238 tree_to_uhwi (objsize));
3239 return false;
3244 return true;
3247 /* Helper to compute the size of the object referenced by the DEST
3248 expression which must of of pointer type, using Object Size type
3249 OSTYPE (only the least significant 2 bits are used). Return
3250 the size of the object if successful or NULL when the size cannot
3251 be determined. */
3253 static inline tree
3254 compute_dest_size (tree dest, int ostype)
3256 unsigned HOST_WIDE_INT size;
3257 if (compute_builtin_object_size (dest, ostype & 3, &size))
3258 return build_int_cst (sizetype, size);
3260 return NULL_TREE;
3263 /* Helper to determine and check the sizes of the source and the destination
3264 of calls to __builtin_{bzero,memcpy,memset} calls. Use Object Size type-0
3265 regardless of the OPT_Wstringop_overflow_ setting. Returns true on success
3266 (no overflow or invalid sizes), false otherwise. */
3268 static bool
3269 check_memop_sizes (tree exp, tree dest, tree size)
3271 if (!warn_stringop_overflow)
3272 return true;
3274 /* For functions like memset and memcpy that operate on raw memory
3275 try to determine the size of the largest destination object using
3276 type-0 Object Size regardless of the object size type specified
3277 by the option. */
3278 tree objsize = compute_dest_size (dest, 0);
3280 return check_sizes (OPT_Wstringop_overflow_, exp,
3281 size, /*maxlen=*/NULL_TREE, /*str=*/NULL_TREE, objsize);
3284 /* Expand a call EXP to the memcpy builtin.
3285 Return NULL_RTX if we failed, the caller should emit a normal call,
3286 otherwise try to get the result in TARGET, if convenient (and in
3287 mode MODE if that's convenient). */
3289 static rtx
3290 expand_builtin_memcpy (tree exp, rtx target)
3292 if (!validate_arglist (exp,
3293 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3294 return NULL_RTX;
3296 tree dest = CALL_EXPR_ARG (exp, 0);
3297 tree src = CALL_EXPR_ARG (exp, 1);
3298 tree len = CALL_EXPR_ARG (exp, 2);
3300 check_memop_sizes (exp, dest, len);
3302 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3305 /* Expand an instrumented call EXP to the memcpy builtin.
3306 Return NULL_RTX if we failed, the caller should emit a normal call,
3307 otherwise try to get the result in TARGET, if convenient (and in
3308 mode MODE if that's convenient). */
3310 static rtx
3311 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3313 if (!validate_arglist (exp,
3314 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3315 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3316 INTEGER_TYPE, VOID_TYPE))
3317 return NULL_RTX;
3318 else
3320 tree dest = CALL_EXPR_ARG (exp, 0);
3321 tree src = CALL_EXPR_ARG (exp, 2);
3322 tree len = CALL_EXPR_ARG (exp, 4);
3323 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3325 /* Return src bounds with the result. */
3326 if (res)
3328 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3329 expand_normal (CALL_EXPR_ARG (exp, 1)));
3330 res = chkp_join_splitted_slot (res, bnd);
3332 return res;
3336 /* Expand a call EXP to the mempcpy builtin.
3337 Return NULL_RTX if we failed; the caller should emit a normal call,
3338 otherwise try to get the result in TARGET, if convenient (and in
3339 mode MODE if that's convenient). If ENDP is 0 return the
3340 destination pointer, if ENDP is 1 return the end pointer ala
3341 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3342 stpcpy. */
3344 static rtx
3345 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3347 if (!validate_arglist (exp,
3348 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3349 return NULL_RTX;
3351 tree dest = CALL_EXPR_ARG (exp, 0);
3352 tree src = CALL_EXPR_ARG (exp, 1);
3353 tree len = CALL_EXPR_ARG (exp, 2);
3355 /* Avoid expanding mempcpy into memcpy when the call is determined
3356 to overflow the buffer. This also prevents the same overflow
3357 from being diagnosed again when expanding memcpy. */
3358 if (!check_memop_sizes (exp, dest, len))
3359 return NULL_RTX;
3361 return expand_builtin_mempcpy_args (dest, src, len,
3362 target, mode, /*endp=*/ 1,
3363 exp);
3366 /* Expand an instrumented call EXP to the mempcpy builtin.
3367 Return NULL_RTX if we failed, the caller should emit a normal call,
3368 otherwise try to get the result in TARGET, if convenient (and in
3369 mode MODE if that's convenient). */
3371 static rtx
3372 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3374 if (!validate_arglist (exp,
3375 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3376 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3377 INTEGER_TYPE, VOID_TYPE))
3378 return NULL_RTX;
3379 else
3381 tree dest = CALL_EXPR_ARG (exp, 0);
3382 tree src = CALL_EXPR_ARG (exp, 2);
3383 tree len = CALL_EXPR_ARG (exp, 4);
3384 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3385 mode, 1, exp);
3387 /* Return src bounds with the result. */
3388 if (res)
3390 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3391 expand_normal (CALL_EXPR_ARG (exp, 1)));
3392 res = chkp_join_splitted_slot (res, bnd);
3394 return res;
3398 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3399 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3400 so that this can also be called without constructing an actual CALL_EXPR.
3401 The other arguments and return value are the same as for
3402 expand_builtin_mempcpy. */
3404 static rtx
3405 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3406 rtx target, machine_mode mode, int endp,
3407 tree orig_exp)
3409 tree fndecl = get_callee_fndecl (orig_exp);
3411 /* If return value is ignored, transform mempcpy into memcpy. */
3412 if (target == const0_rtx
3413 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3414 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3416 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3417 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3418 dest, src, len);
3419 return expand_expr (result, target, mode, EXPAND_NORMAL);
3421 else if (target == const0_rtx
3422 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3424 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3425 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3426 dest, src, len);
3427 return expand_expr (result, target, mode, EXPAND_NORMAL);
3429 else
3431 const char *src_str;
3432 unsigned int src_align = get_pointer_alignment (src);
3433 unsigned int dest_align = get_pointer_alignment (dest);
3434 rtx dest_mem, src_mem, len_rtx;
3436 /* If either SRC or DEST is not a pointer type, don't do this
3437 operation in-line. */
3438 if (dest_align == 0 || src_align == 0)
3439 return NULL_RTX;
3441 /* If LEN is not constant, call the normal function. */
3442 if (! tree_fits_uhwi_p (len))
3443 return NULL_RTX;
3445 len_rtx = expand_normal (len);
3446 src_str = c_getstr (src);
3448 /* If SRC is a string constant and block move would be done
3449 by pieces, we can avoid loading the string from memory
3450 and only stored the computed constants. */
3451 if (src_str
3452 && CONST_INT_P (len_rtx)
3453 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3454 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3455 CONST_CAST (char *, src_str),
3456 dest_align, false))
3458 dest_mem = get_memory_rtx (dest, len);
3459 set_mem_align (dest_mem, dest_align);
3460 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3461 builtin_memcpy_read_str,
3462 CONST_CAST (char *, src_str),
3463 dest_align, false, endp);
3464 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3465 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3466 return dest_mem;
3469 if (CONST_INT_P (len_rtx)
3470 && can_move_by_pieces (INTVAL (len_rtx),
3471 MIN (dest_align, src_align)))
3473 dest_mem = get_memory_rtx (dest, len);
3474 set_mem_align (dest_mem, dest_align);
3475 src_mem = get_memory_rtx (src, len);
3476 set_mem_align (src_mem, src_align);
3477 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3478 MIN (dest_align, src_align), endp);
3479 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3480 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3481 return dest_mem;
3484 return NULL_RTX;
3488 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3489 we failed, the caller should emit a normal call, otherwise try to
3490 get the result in TARGET, if convenient. If ENDP is 0 return the
3491 destination pointer, if ENDP is 1 return the end pointer ala
3492 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3493 stpcpy. */
3495 static rtx
3496 expand_movstr (tree dest, tree src, rtx target, int endp)
3498 struct expand_operand ops[3];
3499 rtx dest_mem;
3500 rtx src_mem;
3502 if (!targetm.have_movstr ())
3503 return NULL_RTX;
3505 dest_mem = get_memory_rtx (dest, NULL);
3506 src_mem = get_memory_rtx (src, NULL);
3507 if (!endp)
3509 target = force_reg (Pmode, XEXP (dest_mem, 0));
3510 dest_mem = replace_equiv_address (dest_mem, target);
3513 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3514 create_fixed_operand (&ops[1], dest_mem);
3515 create_fixed_operand (&ops[2], src_mem);
3516 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3517 return NULL_RTX;
3519 if (endp && target != const0_rtx)
3521 target = ops[0].value;
3522 /* movstr is supposed to set end to the address of the NUL
3523 terminator. If the caller requested a mempcpy-like return value,
3524 adjust it. */
3525 if (endp == 1)
3527 rtx tem = plus_constant (GET_MODE (target),
3528 gen_lowpart (GET_MODE (target), target), 1);
3529 emit_move_insn (target, force_operand (tem, NULL_RTX));
3532 return target;
3535 /* Do some very basic size validation of a call to the strcpy builtin
3536 given by EXP. Return NULL_RTX to have the built-in expand to a call
3537 to the library function. */
3539 static rtx
3540 expand_builtin_strcat (tree exp, rtx)
3542 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3543 || !warn_stringop_overflow)
3544 return NULL_RTX;
3546 tree dest = CALL_EXPR_ARG (exp, 0);
3547 tree src = CALL_EXPR_ARG (exp, 1);
3549 /* There is no way here to determine the length of the string in
3550 the destination to which the SRC string is being appended so
3551 just diagnose cases when the souce string is longer than
3552 the destination object. */
3554 tree destsize = compute_dest_size (dest, warn_stringop_overflow - 1);
3556 check_sizes (OPT_Wstringop_overflow_,
3557 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3559 return NULL_RTX;
3562 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3563 NULL_RTX if we failed the caller should emit a normal call, otherwise
3564 try to get the result in TARGET, if convenient (and in mode MODE if that's
3565 convenient). */
3567 static rtx
3568 expand_builtin_strcpy (tree exp, rtx target)
3570 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3571 return NULL_RTX;
3573 tree dest = CALL_EXPR_ARG (exp, 0);
3574 tree src = CALL_EXPR_ARG (exp, 1);
3576 if (warn_stringop_overflow)
3578 tree destsize = compute_dest_size (dest, warn_stringop_overflow - 1);
3579 check_sizes (OPT_Wstringop_overflow_,
3580 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3583 return expand_builtin_strcpy_args (dest, src, target);
3586 /* Helper function to do the actual work for expand_builtin_strcpy. The
3587 arguments to the builtin_strcpy call DEST and SRC are broken out
3588 so that this can also be called without constructing an actual CALL_EXPR.
3589 The other arguments and return value are the same as for
3590 expand_builtin_strcpy. */
3592 static rtx
3593 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3595 return expand_movstr (dest, src, target, /*endp=*/0);
3598 /* Expand a call EXP to the stpcpy builtin.
3599 Return NULL_RTX if we failed the caller should emit a normal call,
3600 otherwise try to get the result in TARGET, if convenient (and in
3601 mode MODE if that's convenient). */
3603 static rtx
3604 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3606 tree dst, src;
3607 location_t loc = EXPR_LOCATION (exp);
3609 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3610 return NULL_RTX;
3612 dst = CALL_EXPR_ARG (exp, 0);
3613 src = CALL_EXPR_ARG (exp, 1);
3615 /* If return value is ignored, transform stpcpy into strcpy. */
3616 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3618 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3619 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3620 return expand_expr (result, target, mode, EXPAND_NORMAL);
3622 else
3624 tree len, lenp1;
3625 rtx ret;
3627 /* Ensure we get an actual string whose length can be evaluated at
3628 compile-time, not an expression containing a string. This is
3629 because the latter will potentially produce pessimized code
3630 when used to produce the return value. */
3631 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3632 return expand_movstr (dst, src, target, /*endp=*/2);
3634 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3635 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3636 target, mode, /*endp=*/2,
3637 exp);
3639 if (ret)
3640 return ret;
3642 if (TREE_CODE (len) == INTEGER_CST)
3644 rtx len_rtx = expand_normal (len);
3646 if (CONST_INT_P (len_rtx))
3648 ret = expand_builtin_strcpy_args (dst, src, target);
3650 if (ret)
3652 if (! target)
3654 if (mode != VOIDmode)
3655 target = gen_reg_rtx (mode);
3656 else
3657 target = gen_reg_rtx (GET_MODE (ret));
3659 if (GET_MODE (target) != GET_MODE (ret))
3660 ret = gen_lowpart (GET_MODE (target), ret);
3662 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3663 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3664 gcc_assert (ret);
3666 return target;
3671 return expand_movstr (dst, src, target, /*endp=*/2);
3675 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3676 bytes from constant string DATA + OFFSET and return it as target
3677 constant. */
3680 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3681 machine_mode mode)
3683 const char *str = (const char *) data;
3685 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3686 return const0_rtx;
3688 return c_readstr (str + offset, mode);
3691 /* Helper to check the sizes of sequences and the destination of calls
3692 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3693 success (no overflow or invalid sizes), false otherwise. */
3695 static bool
3696 check_strncat_sizes (tree exp, tree objsize)
3698 tree dest = CALL_EXPR_ARG (exp, 0);
3699 tree src = CALL_EXPR_ARG (exp, 1);
3700 tree maxlen = CALL_EXPR_ARG (exp, 2);
3702 /* Try to determine the range of lengths that the source expression
3703 refers to. */
3704 tree lenrange[2];
3705 get_range_strlen (src, lenrange);
3707 /* Try to verify that the destination is big enough for the shortest
3708 string. */
3710 if (!objsize && warn_stringop_overflow)
3712 /* If it hasn't been provided by __strncat_chk, try to determine
3713 the size of the destination object into which the source is
3714 being copied. */
3715 objsize = compute_dest_size (dest, warn_stringop_overflow - 1);
3718 /* Add one for the terminating nul. */
3719 tree srclen = (lenrange[0]
3720 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3721 size_one_node)
3722 : NULL_TREE);
3724 /* Strncat copies at most MAXLEN bytes and always appends the terminating
3725 nul so the specified upper bound should never be equal to (or greater
3726 than) the size of the destination. */
3727 if (tree_fits_uhwi_p (maxlen) && tree_fits_uhwi_p (objsize)
3728 && tree_int_cst_equal (objsize, maxlen))
3730 warning_at (EXPR_LOCATION (exp), OPT_Wstringop_overflow_,
3731 "specified bound %wu "
3732 "equals the size of the destination",
3733 tree_to_uhwi (maxlen));
3735 return false;
3738 if (!srclen
3739 || (maxlen && tree_fits_uhwi_p (maxlen)
3740 && tree_fits_uhwi_p (srclen)
3741 && tree_int_cst_lt (maxlen, srclen)))
3742 srclen = maxlen;
3744 /* The number of bytes to write is LEN but check_sizes will also
3745 check SRCLEN if LEN's value isn't known. */
3746 return check_sizes (OPT_Wstringop_overflow_,
3747 exp, /*size=*/NULL_TREE, maxlen, srclen, objsize);
3750 /* Similar to expand_builtin_strcat, do some very basic size validation
3751 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
3752 the built-in expand to a call to the library function. */
3754 static rtx
3755 expand_builtin_strncat (tree exp, rtx)
3757 if (!validate_arglist (exp,
3758 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3759 || !warn_stringop_overflow)
3760 return NULL_RTX;
3762 tree dest = CALL_EXPR_ARG (exp, 0);
3763 tree src = CALL_EXPR_ARG (exp, 1);
3764 /* The upper bound on the number of bytes to write. */
3765 tree maxlen = CALL_EXPR_ARG (exp, 2);
3766 /* The length of the source sequence. */
3767 tree slen = c_strlen (src, 1);
3769 /* Try to determine the range of lengths that the source expression
3770 refers to. */
3771 tree lenrange[2];
3772 if (slen)
3773 lenrange[0] = lenrange[1] = slen;
3774 else
3775 get_range_strlen (src, lenrange);
3777 /* Try to verify that the destination is big enough for the shortest
3778 string. First try to determine the size of the destination object
3779 into which the source is being copied. */
3780 tree destsize = compute_dest_size (dest, warn_stringop_overflow - 1);
3782 /* Add one for the terminating nul. */
3783 tree srclen = (lenrange[0]
3784 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3785 size_one_node)
3786 : NULL_TREE);
3788 /* Strncat copies at most MAXLEN bytes and always appends the terminating
3789 nul so the specified upper bound should never be equal to (or greater
3790 than) the size of the destination. */
3791 if (tree_fits_uhwi_p (maxlen) && tree_fits_uhwi_p (destsize)
3792 && tree_int_cst_equal (destsize, maxlen))
3794 warning_at (EXPR_LOCATION (exp), OPT_Wstringop_overflow_,
3795 "specified bound %wu "
3796 "equals the size of the destination",
3797 tree_to_uhwi (maxlen));
3799 return NULL_RTX;
3802 if (!srclen
3803 || (maxlen && tree_fits_uhwi_p (maxlen)
3804 && tree_fits_uhwi_p (srclen)
3805 && tree_int_cst_lt (maxlen, srclen)))
3806 srclen = maxlen;
3808 /* The number of bytes to write is LEN but check_sizes will also
3809 check SRCLEN if LEN's value isn't known. */
3810 check_sizes (OPT_Wstringop_overflow_,
3811 exp, /*size=*/NULL_TREE, maxlen, srclen, destsize);
3813 return NULL_RTX;
3816 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3817 NULL_RTX if we failed the caller should emit a normal call. */
3819 static rtx
3820 expand_builtin_strncpy (tree exp, rtx target)
3822 location_t loc = EXPR_LOCATION (exp);
3824 if (validate_arglist (exp,
3825 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3827 tree dest = CALL_EXPR_ARG (exp, 0);
3828 tree src = CALL_EXPR_ARG (exp, 1);
3829 /* The number of bytes to write (not the maximum). */
3830 tree len = CALL_EXPR_ARG (exp, 2);
3831 /* The length of the source sequence. */
3832 tree slen = c_strlen (src, 1);
3834 if (warn_stringop_overflow)
3836 /* Try to determine the range of lengths that the source expression
3837 refers to. */
3838 tree lenrange[2];
3839 if (slen)
3840 lenrange[0] = lenrange[1] = slen;
3841 else
3843 get_range_strlen (src, lenrange);
3844 slen = lenrange[0];
3847 tree destsize = compute_dest_size (dest,
3848 warn_stringop_overflow - 1);
3850 /* The number of bytes to write is LEN but check_sizes will also
3851 check SLEN if LEN's value isn't known. */
3852 check_sizes (OPT_Wstringop_overflow_,
3853 exp, len, /*maxlen=*/NULL_TREE, slen, destsize);
3856 /* We must be passed a constant len and src parameter. */
3857 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3858 return NULL_RTX;
3860 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3862 /* We're required to pad with trailing zeros if the requested
3863 len is greater than strlen(s2)+1. In that case try to
3864 use store_by_pieces, if it fails, punt. */
3865 if (tree_int_cst_lt (slen, len))
3867 unsigned int dest_align = get_pointer_alignment (dest);
3868 const char *p = c_getstr (src);
3869 rtx dest_mem;
3871 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3872 || !can_store_by_pieces (tree_to_uhwi (len),
3873 builtin_strncpy_read_str,
3874 CONST_CAST (char *, p),
3875 dest_align, false))
3876 return NULL_RTX;
3878 dest_mem = get_memory_rtx (dest, len);
3879 store_by_pieces (dest_mem, tree_to_uhwi (len),
3880 builtin_strncpy_read_str,
3881 CONST_CAST (char *, p), dest_align, false, 0);
3882 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3883 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3884 return dest_mem;
3887 return NULL_RTX;
3890 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3891 bytes from constant string DATA + OFFSET and return it as target
3892 constant. */
3895 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3896 machine_mode mode)
3898 const char *c = (const char *) data;
3899 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3901 memset (p, *c, GET_MODE_SIZE (mode));
3903 return c_readstr (p, mode);
3906 /* Callback routine for store_by_pieces. Return the RTL of a register
3907 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3908 char value given in the RTL register data. For example, if mode is
3909 4 bytes wide, return the RTL for 0x01010101*data. */
3911 static rtx
3912 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3913 machine_mode mode)
3915 rtx target, coeff;
3916 size_t size;
3917 char *p;
3919 size = GET_MODE_SIZE (mode);
3920 if (size == 1)
3921 return (rtx) data;
3923 p = XALLOCAVEC (char, size);
3924 memset (p, 1, size);
3925 coeff = c_readstr (p, mode);
3927 target = convert_to_mode (mode, (rtx) data, 1);
3928 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3929 return force_reg (mode, target);
3932 /* Expand expression EXP, which is a call to the memset builtin. Return
3933 NULL_RTX if we failed the caller should emit a normal call, otherwise
3934 try to get the result in TARGET, if convenient (and in mode MODE if that's
3935 convenient). */
3937 static rtx
3938 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3940 if (!validate_arglist (exp,
3941 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3942 return NULL_RTX;
3944 tree dest = CALL_EXPR_ARG (exp, 0);
3945 tree val = CALL_EXPR_ARG (exp, 1);
3946 tree len = CALL_EXPR_ARG (exp, 2);
3948 check_memop_sizes (exp, dest, len);
3950 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3953 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3954 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3955 try to get the result in TARGET, if convenient (and in mode MODE if that's
3956 convenient). */
3958 static rtx
3959 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3961 if (!validate_arglist (exp,
3962 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3963 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3964 return NULL_RTX;
3965 else
3967 tree dest = CALL_EXPR_ARG (exp, 0);
3968 tree val = CALL_EXPR_ARG (exp, 2);
3969 tree len = CALL_EXPR_ARG (exp, 3);
3970 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3972 /* Return src bounds with the result. */
3973 if (res)
3975 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3976 expand_normal (CALL_EXPR_ARG (exp, 1)));
3977 res = chkp_join_splitted_slot (res, bnd);
3979 return res;
3983 /* Helper function to do the actual work for expand_builtin_memset. The
3984 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3985 so that this can also be called without constructing an actual CALL_EXPR.
3986 The other arguments and return value are the same as for
3987 expand_builtin_memset. */
3989 static rtx
3990 expand_builtin_memset_args (tree dest, tree val, tree len,
3991 rtx target, machine_mode mode, tree orig_exp)
3993 tree fndecl, fn;
3994 enum built_in_function fcode;
3995 machine_mode val_mode;
3996 char c;
3997 unsigned int dest_align;
3998 rtx dest_mem, dest_addr, len_rtx;
3999 HOST_WIDE_INT expected_size = -1;
4000 unsigned int expected_align = 0;
4001 unsigned HOST_WIDE_INT min_size;
4002 unsigned HOST_WIDE_INT max_size;
4003 unsigned HOST_WIDE_INT probable_max_size;
4005 dest_align = get_pointer_alignment (dest);
4007 /* If DEST is not a pointer type, don't do this operation in-line. */
4008 if (dest_align == 0)
4009 return NULL_RTX;
4011 if (currently_expanding_gimple_stmt)
4012 stringop_block_profile (currently_expanding_gimple_stmt,
4013 &expected_align, &expected_size);
4015 if (expected_align < dest_align)
4016 expected_align = dest_align;
4018 /* If the LEN parameter is zero, return DEST. */
4019 if (integer_zerop (len))
4021 /* Evaluate and ignore VAL in case it has side-effects. */
4022 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4023 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4026 /* Stabilize the arguments in case we fail. */
4027 dest = builtin_save_expr (dest);
4028 val = builtin_save_expr (val);
4029 len = builtin_save_expr (len);
4031 len_rtx = expand_normal (len);
4032 determine_block_size (len, len_rtx, &min_size, &max_size,
4033 &probable_max_size);
4034 dest_mem = get_memory_rtx (dest, len);
4035 val_mode = TYPE_MODE (unsigned_char_type_node);
4037 if (TREE_CODE (val) != INTEGER_CST)
4039 rtx val_rtx;
4041 val_rtx = expand_normal (val);
4042 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4044 /* Assume that we can memset by pieces if we can store
4045 * the coefficients by pieces (in the required modes).
4046 * We can't pass builtin_memset_gen_str as that emits RTL. */
4047 c = 1;
4048 if (tree_fits_uhwi_p (len)
4049 && can_store_by_pieces (tree_to_uhwi (len),
4050 builtin_memset_read_str, &c, dest_align,
4051 true))
4053 val_rtx = force_reg (val_mode, val_rtx);
4054 store_by_pieces (dest_mem, tree_to_uhwi (len),
4055 builtin_memset_gen_str, val_rtx, dest_align,
4056 true, 0);
4058 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4059 dest_align, expected_align,
4060 expected_size, min_size, max_size,
4061 probable_max_size))
4062 goto do_libcall;
4064 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4065 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4066 return dest_mem;
4069 if (target_char_cast (val, &c))
4070 goto do_libcall;
4072 if (c)
4074 if (tree_fits_uhwi_p (len)
4075 && can_store_by_pieces (tree_to_uhwi (len),
4076 builtin_memset_read_str, &c, dest_align,
4077 true))
4078 store_by_pieces (dest_mem, tree_to_uhwi (len),
4079 builtin_memset_read_str, &c, dest_align, true, 0);
4080 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4081 gen_int_mode (c, val_mode),
4082 dest_align, expected_align,
4083 expected_size, min_size, max_size,
4084 probable_max_size))
4085 goto do_libcall;
4087 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4088 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4089 return dest_mem;
4092 set_mem_align (dest_mem, dest_align);
4093 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4094 CALL_EXPR_TAILCALL (orig_exp)
4095 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4096 expected_align, expected_size,
4097 min_size, max_size,
4098 probable_max_size);
4100 if (dest_addr == 0)
4102 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4103 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4106 return dest_addr;
4108 do_libcall:
4109 fndecl = get_callee_fndecl (orig_exp);
4110 fcode = DECL_FUNCTION_CODE (fndecl);
4111 if (fcode == BUILT_IN_MEMSET
4112 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
4113 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4114 dest, val, len);
4115 else if (fcode == BUILT_IN_BZERO)
4116 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4117 dest, len);
4118 else
4119 gcc_unreachable ();
4120 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4121 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4122 return expand_call (fn, target, target == const0_rtx);
4125 /* Expand expression EXP, which is a call to the bzero builtin. Return
4126 NULL_RTX if we failed the caller should emit a normal call. */
4128 static rtx
4129 expand_builtin_bzero (tree exp)
4131 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4132 return NULL_RTX;
4134 tree dest = CALL_EXPR_ARG (exp, 0);
4135 tree size = CALL_EXPR_ARG (exp, 1);
4137 check_memop_sizes (exp, dest, size);
4139 /* New argument list transforming bzero(ptr x, int y) to
4140 memset(ptr x, int 0, size_t y). This is done this way
4141 so that if it isn't expanded inline, we fallback to
4142 calling bzero instead of memset. */
4144 location_t loc = EXPR_LOCATION (exp);
4146 return expand_builtin_memset_args (dest, integer_zero_node,
4147 fold_convert_loc (loc,
4148 size_type_node, size),
4149 const0_rtx, VOIDmode, exp);
4152 /* Try to expand cmpstr operation ICODE with the given operands.
4153 Return the result rtx on success, otherwise return null. */
4155 static rtx
4156 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4157 HOST_WIDE_INT align)
4159 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4161 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4162 target = NULL_RTX;
4164 struct expand_operand ops[4];
4165 create_output_operand (&ops[0], target, insn_mode);
4166 create_fixed_operand (&ops[1], arg1_rtx);
4167 create_fixed_operand (&ops[2], arg2_rtx);
4168 create_integer_operand (&ops[3], align);
4169 if (maybe_expand_insn (icode, 4, ops))
4170 return ops[0].value;
4171 return NULL_RTX;
4174 /* Expand expression EXP, which is a call to the memcmp built-in function.
4175 Return NULL_RTX if we failed and the caller should emit a normal call,
4176 otherwise try to get the result in TARGET, if convenient.
4177 RESULT_EQ is true if we can relax the returned value to be either zero
4178 or nonzero, without caring about the sign. */
4180 static rtx
4181 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4183 if (!validate_arglist (exp,
4184 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4185 return NULL_RTX;
4187 tree arg1 = CALL_EXPR_ARG (exp, 0);
4188 tree arg2 = CALL_EXPR_ARG (exp, 1);
4189 tree len = CALL_EXPR_ARG (exp, 2);
4190 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4191 location_t loc = EXPR_LOCATION (exp);
4193 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4194 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4196 /* If we don't have POINTER_TYPE, call the function. */
4197 if (arg1_align == 0 || arg2_align == 0)
4198 return NULL_RTX;
4200 rtx arg1_rtx = get_memory_rtx (arg1, len);
4201 rtx arg2_rtx = get_memory_rtx (arg2, len);
4202 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4204 /* Set MEM_SIZE as appropriate. */
4205 if (CONST_INT_P (len_rtx))
4207 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4208 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4211 by_pieces_constfn constfn = NULL;
4213 const char *src_str = c_getstr (arg2);
4214 if (result_eq && src_str == NULL)
4216 src_str = c_getstr (arg1);
4217 if (src_str != NULL)
4218 std::swap (arg1_rtx, arg2_rtx);
4221 /* If SRC is a string constant and block move would be done
4222 by pieces, we can avoid loading the string from memory
4223 and only stored the computed constants. */
4224 if (src_str
4225 && CONST_INT_P (len_rtx)
4226 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4227 constfn = builtin_memcpy_read_str;
4229 rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4230 TREE_TYPE (len), target,
4231 result_eq, constfn,
4232 CONST_CAST (char *, src_str));
4234 if (result)
4236 /* Return the value in the proper mode for this function. */
4237 if (GET_MODE (result) == mode)
4238 return result;
4240 if (target != 0)
4242 convert_move (target, result, 0);
4243 return target;
4246 return convert_to_mode (mode, result, 0);
4249 return NULL_RTX;
4252 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4253 if we failed the caller should emit a normal call, otherwise try to get
4254 the result in TARGET, if convenient. */
4256 static rtx
4257 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4259 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4260 return NULL_RTX;
4262 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4263 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4264 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
4266 rtx arg1_rtx, arg2_rtx;
4267 tree fndecl, fn;
4268 tree arg1 = CALL_EXPR_ARG (exp, 0);
4269 tree arg2 = CALL_EXPR_ARG (exp, 1);
4270 rtx result = NULL_RTX;
4272 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4273 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4275 /* If we don't have POINTER_TYPE, call the function. */
4276 if (arg1_align == 0 || arg2_align == 0)
4277 return NULL_RTX;
4279 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4280 arg1 = builtin_save_expr (arg1);
4281 arg2 = builtin_save_expr (arg2);
4283 arg1_rtx = get_memory_rtx (arg1, NULL);
4284 arg2_rtx = get_memory_rtx (arg2, NULL);
4286 /* Try to call cmpstrsi. */
4287 if (cmpstr_icode != CODE_FOR_nothing)
4288 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4289 MIN (arg1_align, arg2_align));
4291 /* Try to determine at least one length and call cmpstrnsi. */
4292 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4294 tree len;
4295 rtx arg3_rtx;
4297 tree len1 = c_strlen (arg1, 1);
4298 tree len2 = c_strlen (arg2, 1);
4300 if (len1)
4301 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4302 if (len2)
4303 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4305 /* If we don't have a constant length for the first, use the length
4306 of the second, if we know it. We don't require a constant for
4307 this case; some cost analysis could be done if both are available
4308 but neither is constant. For now, assume they're equally cheap,
4309 unless one has side effects. If both strings have constant lengths,
4310 use the smaller. */
4312 if (!len1)
4313 len = len2;
4314 else if (!len2)
4315 len = len1;
4316 else if (TREE_SIDE_EFFECTS (len1))
4317 len = len2;
4318 else if (TREE_SIDE_EFFECTS (len2))
4319 len = len1;
4320 else if (TREE_CODE (len1) != INTEGER_CST)
4321 len = len2;
4322 else if (TREE_CODE (len2) != INTEGER_CST)
4323 len = len1;
4324 else if (tree_int_cst_lt (len1, len2))
4325 len = len1;
4326 else
4327 len = len2;
4329 /* If both arguments have side effects, we cannot optimize. */
4330 if (len && !TREE_SIDE_EFFECTS (len))
4332 arg3_rtx = expand_normal (len);
4333 result = expand_cmpstrn_or_cmpmem
4334 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4335 arg3_rtx, MIN (arg1_align, arg2_align));
4339 if (result)
4341 /* Return the value in the proper mode for this function. */
4342 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4343 if (GET_MODE (result) == mode)
4344 return result;
4345 if (target == 0)
4346 return convert_to_mode (mode, result, 0);
4347 convert_move (target, result, 0);
4348 return target;
4351 /* Expand the library call ourselves using a stabilized argument
4352 list to avoid re-evaluating the function's arguments twice. */
4353 fndecl = get_callee_fndecl (exp);
4354 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4355 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4356 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4357 return expand_call (fn, target, target == const0_rtx);
4359 return NULL_RTX;
4362 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4363 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4364 the result in TARGET, if convenient. */
4366 static rtx
4367 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4368 ATTRIBUTE_UNUSED machine_mode mode)
4370 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4372 if (!validate_arglist (exp,
4373 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4374 return NULL_RTX;
4376 /* If c_strlen can determine an expression for one of the string
4377 lengths, and it doesn't have side effects, then emit cmpstrnsi
4378 using length MIN(strlen(string)+1, arg3). */
4379 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4380 if (cmpstrn_icode != CODE_FOR_nothing)
4382 tree len, len1, len2, len3;
4383 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4384 rtx result;
4385 tree fndecl, fn;
4386 tree arg1 = CALL_EXPR_ARG (exp, 0);
4387 tree arg2 = CALL_EXPR_ARG (exp, 1);
4388 tree arg3 = CALL_EXPR_ARG (exp, 2);
4390 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4391 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4393 len1 = c_strlen (arg1, 1);
4394 len2 = c_strlen (arg2, 1);
4396 if (len1)
4397 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4398 if (len2)
4399 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4401 len3 = fold_convert_loc (loc, sizetype, arg3);
4403 /* If we don't have a constant length for the first, use the length
4404 of the second, if we know it. If neither string is constant length,
4405 use the given length argument. We don't require a constant for
4406 this case; some cost analysis could be done if both are available
4407 but neither is constant. For now, assume they're equally cheap,
4408 unless one has side effects. If both strings have constant lengths,
4409 use the smaller. */
4411 if (!len1 && !len2)
4412 len = len3;
4413 else if (!len1)
4414 len = len2;
4415 else if (!len2)
4416 len = len1;
4417 else if (TREE_SIDE_EFFECTS (len1))
4418 len = len2;
4419 else if (TREE_SIDE_EFFECTS (len2))
4420 len = len1;
4421 else if (TREE_CODE (len1) != INTEGER_CST)
4422 len = len2;
4423 else if (TREE_CODE (len2) != INTEGER_CST)
4424 len = len1;
4425 else if (tree_int_cst_lt (len1, len2))
4426 len = len1;
4427 else
4428 len = len2;
4430 /* If we are not using the given length, we must incorporate it here.
4431 The actual new length parameter will be MIN(len,arg3) in this case. */
4432 if (len != len3)
4433 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4434 arg1_rtx = get_memory_rtx (arg1, len);
4435 arg2_rtx = get_memory_rtx (arg2, len);
4436 arg3_rtx = expand_normal (len);
4437 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4438 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4439 MIN (arg1_align, arg2_align));
4440 if (result)
4442 /* Return the value in the proper mode for this function. */
4443 mode = TYPE_MODE (TREE_TYPE (exp));
4444 if (GET_MODE (result) == mode)
4445 return result;
4446 if (target == 0)
4447 return convert_to_mode (mode, result, 0);
4448 convert_move (target, result, 0);
4449 return target;
4452 /* Expand the library call ourselves using a stabilized argument
4453 list to avoid re-evaluating the function's arguments twice. */
4454 fndecl = get_callee_fndecl (exp);
4455 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4456 arg1, arg2, len);
4457 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4458 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4459 return expand_call (fn, target, target == const0_rtx);
4461 return NULL_RTX;
4464 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4465 if that's convenient. */
4468 expand_builtin_saveregs (void)
4470 rtx val;
4471 rtx_insn *seq;
4473 /* Don't do __builtin_saveregs more than once in a function.
4474 Save the result of the first call and reuse it. */
4475 if (saveregs_value != 0)
4476 return saveregs_value;
4478 /* When this function is called, it means that registers must be
4479 saved on entry to this function. So we migrate the call to the
4480 first insn of this function. */
4482 start_sequence ();
4484 /* Do whatever the machine needs done in this case. */
4485 val = targetm.calls.expand_builtin_saveregs ();
4487 seq = get_insns ();
4488 end_sequence ();
4490 saveregs_value = val;
4492 /* Put the insns after the NOTE that starts the function. If this
4493 is inside a start_sequence, make the outer-level insn chain current, so
4494 the code is placed at the start of the function. */
4495 push_topmost_sequence ();
4496 emit_insn_after (seq, entry_of_function ());
4497 pop_topmost_sequence ();
4499 return val;
4502 /* Expand a call to __builtin_next_arg. */
4504 static rtx
4505 expand_builtin_next_arg (void)
4507 /* Checking arguments is already done in fold_builtin_next_arg
4508 that must be called before this function. */
4509 return expand_binop (ptr_mode, add_optab,
4510 crtl->args.internal_arg_pointer,
4511 crtl->args.arg_offset_rtx,
4512 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4515 /* Make it easier for the backends by protecting the valist argument
4516 from multiple evaluations. */
4518 static tree
4519 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4521 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4523 /* The current way of determining the type of valist is completely
4524 bogus. We should have the information on the va builtin instead. */
4525 if (!vatype)
4526 vatype = targetm.fn_abi_va_list (cfun->decl);
4528 if (TREE_CODE (vatype) == ARRAY_TYPE)
4530 if (TREE_SIDE_EFFECTS (valist))
4531 valist = save_expr (valist);
4533 /* For this case, the backends will be expecting a pointer to
4534 vatype, but it's possible we've actually been given an array
4535 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4536 So fix it. */
4537 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4539 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4540 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4543 else
4545 tree pt = build_pointer_type (vatype);
4547 if (! needs_lvalue)
4549 if (! TREE_SIDE_EFFECTS (valist))
4550 return valist;
4552 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4553 TREE_SIDE_EFFECTS (valist) = 1;
4556 if (TREE_SIDE_EFFECTS (valist))
4557 valist = save_expr (valist);
4558 valist = fold_build2_loc (loc, MEM_REF,
4559 vatype, valist, build_int_cst (pt, 0));
4562 return valist;
4565 /* The "standard" definition of va_list is void*. */
4567 tree
4568 std_build_builtin_va_list (void)
4570 return ptr_type_node;
4573 /* The "standard" abi va_list is va_list_type_node. */
4575 tree
4576 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4578 return va_list_type_node;
4581 /* The "standard" type of va_list is va_list_type_node. */
4583 tree
4584 std_canonical_va_list_type (tree type)
4586 tree wtype, htype;
4588 wtype = va_list_type_node;
4589 htype = type;
4591 if (TREE_CODE (wtype) == ARRAY_TYPE)
4593 /* If va_list is an array type, the argument may have decayed
4594 to a pointer type, e.g. by being passed to another function.
4595 In that case, unwrap both types so that we can compare the
4596 underlying records. */
4597 if (TREE_CODE (htype) == ARRAY_TYPE
4598 || POINTER_TYPE_P (htype))
4600 wtype = TREE_TYPE (wtype);
4601 htype = TREE_TYPE (htype);
4604 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4605 return va_list_type_node;
4607 return NULL_TREE;
4610 /* The "standard" implementation of va_start: just assign `nextarg' to
4611 the variable. */
4613 void
4614 std_expand_builtin_va_start (tree valist, rtx nextarg)
4616 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4617 convert_move (va_r, nextarg, 0);
4619 /* We do not have any valid bounds for the pointer, so
4620 just store zero bounds for it. */
4621 if (chkp_function_instrumented_p (current_function_decl))
4622 chkp_expand_bounds_reset_for_mem (valist,
4623 make_tree (TREE_TYPE (valist),
4624 nextarg));
4627 /* Expand EXP, a call to __builtin_va_start. */
4629 static rtx
4630 expand_builtin_va_start (tree exp)
4632 rtx nextarg;
4633 tree valist;
4634 location_t loc = EXPR_LOCATION (exp);
4636 if (call_expr_nargs (exp) < 2)
4638 error_at (loc, "too few arguments to function %<va_start%>");
4639 return const0_rtx;
4642 if (fold_builtin_next_arg (exp, true))
4643 return const0_rtx;
4645 nextarg = expand_builtin_next_arg ();
4646 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4648 if (targetm.expand_builtin_va_start)
4649 targetm.expand_builtin_va_start (valist, nextarg);
4650 else
4651 std_expand_builtin_va_start (valist, nextarg);
4653 return const0_rtx;
4656 /* Expand EXP, a call to __builtin_va_end. */
4658 static rtx
4659 expand_builtin_va_end (tree exp)
4661 tree valist = CALL_EXPR_ARG (exp, 0);
4663 /* Evaluate for side effects, if needed. I hate macros that don't
4664 do that. */
4665 if (TREE_SIDE_EFFECTS (valist))
4666 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4668 return const0_rtx;
4671 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4672 builtin rather than just as an assignment in stdarg.h because of the
4673 nastiness of array-type va_list types. */
4675 static rtx
4676 expand_builtin_va_copy (tree exp)
4678 tree dst, src, t;
4679 location_t loc = EXPR_LOCATION (exp);
4681 dst = CALL_EXPR_ARG (exp, 0);
4682 src = CALL_EXPR_ARG (exp, 1);
4684 dst = stabilize_va_list_loc (loc, dst, 1);
4685 src = stabilize_va_list_loc (loc, src, 0);
4687 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4689 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4691 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4692 TREE_SIDE_EFFECTS (t) = 1;
4693 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4695 else
4697 rtx dstb, srcb, size;
4699 /* Evaluate to pointers. */
4700 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4701 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4702 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4703 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4705 dstb = convert_memory_address (Pmode, dstb);
4706 srcb = convert_memory_address (Pmode, srcb);
4708 /* "Dereference" to BLKmode memories. */
4709 dstb = gen_rtx_MEM (BLKmode, dstb);
4710 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4711 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4712 srcb = gen_rtx_MEM (BLKmode, srcb);
4713 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4714 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4716 /* Copy. */
4717 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4720 return const0_rtx;
4723 /* Expand a call to one of the builtin functions __builtin_frame_address or
4724 __builtin_return_address. */
4726 static rtx
4727 expand_builtin_frame_address (tree fndecl, tree exp)
4729 /* The argument must be a nonnegative integer constant.
4730 It counts the number of frames to scan up the stack.
4731 The value is either the frame pointer value or the return
4732 address saved in that frame. */
4733 if (call_expr_nargs (exp) == 0)
4734 /* Warning about missing arg was already issued. */
4735 return const0_rtx;
4736 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4738 error ("invalid argument to %qD", fndecl);
4739 return const0_rtx;
4741 else
4743 /* Number of frames to scan up the stack. */
4744 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4746 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4748 /* Some ports cannot access arbitrary stack frames. */
4749 if (tem == NULL)
4751 warning (0, "unsupported argument to %qD", fndecl);
4752 return const0_rtx;
4755 if (count)
4757 /* Warn since no effort is made to ensure that any frame
4758 beyond the current one exists or can be safely reached. */
4759 warning (OPT_Wframe_address, "calling %qD with "
4760 "a nonzero argument is unsafe", fndecl);
4763 /* For __builtin_frame_address, return what we've got. */
4764 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4765 return tem;
4767 if (!REG_P (tem)
4768 && ! CONSTANT_P (tem))
4769 tem = copy_addr_to_reg (tem);
4770 return tem;
4774 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4775 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4776 is the same as for allocate_dynamic_stack_space. */
4778 static rtx
4779 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4781 rtx op0;
4782 rtx result;
4783 unsigned int align;
4784 tree fndecl = get_callee_fndecl (exp);
4785 bool alloca_with_align = (DECL_FUNCTION_CODE (fndecl)
4786 == BUILT_IN_ALLOCA_WITH_ALIGN);
4788 bool valid_arglist
4789 = (alloca_with_align
4790 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4791 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4793 if (!valid_arglist)
4794 return NULL_RTX;
4796 if ((alloca_with_align && !warn_vla_limit)
4797 || (!alloca_with_align && !warn_alloca_limit))
4799 /* -Walloca-larger-than and -Wvla-larger-than settings override
4800 the more general -Walloc-size-larger-than so unless either of
4801 the former options is specified check the alloca arguments for
4802 overflow. */
4803 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
4804 int idx[] = { 0, -1 };
4805 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
4808 /* Compute the argument. */
4809 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4811 /* Compute the alignment. */
4812 align = (alloca_with_align
4813 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4814 : BIGGEST_ALIGNMENT);
4816 /* Allocate the desired space. */
4817 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4818 result = convert_memory_address (ptr_mode, result);
4820 return result;
4823 /* Expand a call to bswap builtin in EXP.
4824 Return NULL_RTX if a normal call should be emitted rather than expanding the
4825 function in-line. If convenient, the result should be placed in TARGET.
4826 SUBTARGET may be used as the target for computing one of EXP's operands. */
4828 static rtx
4829 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4830 rtx subtarget)
4832 tree arg;
4833 rtx op0;
4835 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4836 return NULL_RTX;
4838 arg = CALL_EXPR_ARG (exp, 0);
4839 op0 = expand_expr (arg,
4840 subtarget && GET_MODE (subtarget) == target_mode
4841 ? subtarget : NULL_RTX,
4842 target_mode, EXPAND_NORMAL);
4843 if (GET_MODE (op0) != target_mode)
4844 op0 = convert_to_mode (target_mode, op0, 1);
4846 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4848 gcc_assert (target);
4850 return convert_to_mode (target_mode, target, 1);
4853 /* Expand a call to a unary builtin in EXP.
4854 Return NULL_RTX if a normal call should be emitted rather than expanding the
4855 function in-line. If convenient, the result should be placed in TARGET.
4856 SUBTARGET may be used as the target for computing one of EXP's operands. */
4858 static rtx
4859 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4860 rtx subtarget, optab op_optab)
4862 rtx op0;
4864 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4865 return NULL_RTX;
4867 /* Compute the argument. */
4868 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4869 (subtarget
4870 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4871 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4872 VOIDmode, EXPAND_NORMAL);
4873 /* Compute op, into TARGET if possible.
4874 Set TARGET to wherever the result comes back. */
4875 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4876 op_optab, op0, target, op_optab != clrsb_optab);
4877 gcc_assert (target);
4879 return convert_to_mode (target_mode, target, 0);
4882 /* Expand a call to __builtin_expect. We just return our argument
4883 as the builtin_expect semantic should've been already executed by
4884 tree branch prediction pass. */
4886 static rtx
4887 expand_builtin_expect (tree exp, rtx target)
4889 tree arg;
4891 if (call_expr_nargs (exp) < 2)
4892 return const0_rtx;
4893 arg = CALL_EXPR_ARG (exp, 0);
4895 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4896 /* When guessing was done, the hints should be already stripped away. */
4897 gcc_assert (!flag_guess_branch_prob
4898 || optimize == 0 || seen_error ());
4899 return target;
4902 /* Expand a call to __builtin_assume_aligned. We just return our first
4903 argument as the builtin_assume_aligned semantic should've been already
4904 executed by CCP. */
4906 static rtx
4907 expand_builtin_assume_aligned (tree exp, rtx target)
4909 if (call_expr_nargs (exp) < 2)
4910 return const0_rtx;
4911 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4912 EXPAND_NORMAL);
4913 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4914 && (call_expr_nargs (exp) < 3
4915 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4916 return target;
4919 void
4920 expand_builtin_trap (void)
4922 if (targetm.have_trap ())
4924 rtx_insn *insn = emit_insn (targetm.gen_trap ());
4925 /* For trap insns when not accumulating outgoing args force
4926 REG_ARGS_SIZE note to prevent crossjumping of calls with
4927 different args sizes. */
4928 if (!ACCUMULATE_OUTGOING_ARGS)
4929 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4931 else
4933 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
4934 tree call_expr = build_call_expr (fn, 0);
4935 expand_call (call_expr, NULL_RTX, false);
4938 emit_barrier ();
4941 /* Expand a call to __builtin_unreachable. We do nothing except emit
4942 a barrier saying that control flow will not pass here.
4944 It is the responsibility of the program being compiled to ensure
4945 that control flow does never reach __builtin_unreachable. */
4946 static void
4947 expand_builtin_unreachable (void)
4949 emit_barrier ();
4952 /* Expand EXP, a call to fabs, fabsf or fabsl.
4953 Return NULL_RTX if a normal call should be emitted rather than expanding
4954 the function inline. If convenient, the result should be placed
4955 in TARGET. SUBTARGET may be used as the target for computing
4956 the operand. */
4958 static rtx
4959 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4961 machine_mode mode;
4962 tree arg;
4963 rtx op0;
4965 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4966 return NULL_RTX;
4968 arg = CALL_EXPR_ARG (exp, 0);
4969 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4970 mode = TYPE_MODE (TREE_TYPE (arg));
4971 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4972 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4975 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4976 Return NULL is a normal call should be emitted rather than expanding the
4977 function inline. If convenient, the result should be placed in TARGET.
4978 SUBTARGET may be used as the target for computing the operand. */
4980 static rtx
4981 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4983 rtx op0, op1;
4984 tree arg;
4986 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4987 return NULL_RTX;
4989 arg = CALL_EXPR_ARG (exp, 0);
4990 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4992 arg = CALL_EXPR_ARG (exp, 1);
4993 op1 = expand_normal (arg);
4995 return expand_copysign (op0, op1, target);
4998 /* Expand a call to __builtin___clear_cache. */
5000 static rtx
5001 expand_builtin___clear_cache (tree exp)
5003 if (!targetm.code_for_clear_cache)
5005 #ifdef CLEAR_INSN_CACHE
5006 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5007 does something. Just do the default expansion to a call to
5008 __clear_cache(). */
5009 return NULL_RTX;
5010 #else
5011 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5012 does nothing. There is no need to call it. Do nothing. */
5013 return const0_rtx;
5014 #endif /* CLEAR_INSN_CACHE */
5017 /* We have a "clear_cache" insn, and it will handle everything. */
5018 tree begin, end;
5019 rtx begin_rtx, end_rtx;
5021 /* We must not expand to a library call. If we did, any
5022 fallback library function in libgcc that might contain a call to
5023 __builtin___clear_cache() would recurse infinitely. */
5024 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5026 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5027 return const0_rtx;
5030 if (targetm.have_clear_cache ())
5032 struct expand_operand ops[2];
5034 begin = CALL_EXPR_ARG (exp, 0);
5035 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5037 end = CALL_EXPR_ARG (exp, 1);
5038 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5040 create_address_operand (&ops[0], begin_rtx);
5041 create_address_operand (&ops[1], end_rtx);
5042 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5043 return const0_rtx;
5045 return const0_rtx;
5048 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5050 static rtx
5051 round_trampoline_addr (rtx tramp)
5053 rtx temp, addend, mask;
5055 /* If we don't need too much alignment, we'll have been guaranteed
5056 proper alignment by get_trampoline_type. */
5057 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5058 return tramp;
5060 /* Round address up to desired boundary. */
5061 temp = gen_reg_rtx (Pmode);
5062 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5063 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5065 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5066 temp, 0, OPTAB_LIB_WIDEN);
5067 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5068 temp, 0, OPTAB_LIB_WIDEN);
5070 return tramp;
5073 static rtx
5074 expand_builtin_init_trampoline (tree exp, bool onstack)
5076 tree t_tramp, t_func, t_chain;
5077 rtx m_tramp, r_tramp, r_chain, tmp;
5079 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5080 POINTER_TYPE, VOID_TYPE))
5081 return NULL_RTX;
5083 t_tramp = CALL_EXPR_ARG (exp, 0);
5084 t_func = CALL_EXPR_ARG (exp, 1);
5085 t_chain = CALL_EXPR_ARG (exp, 2);
5087 r_tramp = expand_normal (t_tramp);
5088 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5089 MEM_NOTRAP_P (m_tramp) = 1;
5091 /* If ONSTACK, the TRAMP argument should be the address of a field
5092 within the local function's FRAME decl. Either way, let's see if
5093 we can fill in the MEM_ATTRs for this memory. */
5094 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5095 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5097 /* Creator of a heap trampoline is responsible for making sure the
5098 address is aligned to at least STACK_BOUNDARY. Normally malloc
5099 will ensure this anyhow. */
5100 tmp = round_trampoline_addr (r_tramp);
5101 if (tmp != r_tramp)
5103 m_tramp = change_address (m_tramp, BLKmode, tmp);
5104 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5105 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5108 /* The FUNC argument should be the address of the nested function.
5109 Extract the actual function decl to pass to the hook. */
5110 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5111 t_func = TREE_OPERAND (t_func, 0);
5112 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5114 r_chain = expand_normal (t_chain);
5116 /* Generate insns to initialize the trampoline. */
5117 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5119 if (onstack)
5121 trampolines_created = 1;
5123 if (targetm.calls.custom_function_descriptors != 0)
5124 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5125 "trampoline generated for nested function %qD", t_func);
5128 return const0_rtx;
5131 static rtx
5132 expand_builtin_adjust_trampoline (tree exp)
5134 rtx tramp;
5136 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5137 return NULL_RTX;
5139 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5140 tramp = round_trampoline_addr (tramp);
5141 if (targetm.calls.trampoline_adjust_address)
5142 tramp = targetm.calls.trampoline_adjust_address (tramp);
5144 return tramp;
5147 /* Expand a call to the builtin descriptor initialization routine.
5148 A descriptor is made up of a couple of pointers to the static
5149 chain and the code entry in this order. */
5151 static rtx
5152 expand_builtin_init_descriptor (tree exp)
5154 tree t_descr, t_func, t_chain;
5155 rtx m_descr, r_descr, r_func, r_chain;
5157 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5158 VOID_TYPE))
5159 return NULL_RTX;
5161 t_descr = CALL_EXPR_ARG (exp, 0);
5162 t_func = CALL_EXPR_ARG (exp, 1);
5163 t_chain = CALL_EXPR_ARG (exp, 2);
5165 r_descr = expand_normal (t_descr);
5166 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5167 MEM_NOTRAP_P (m_descr) = 1;
5169 r_func = expand_normal (t_func);
5170 r_chain = expand_normal (t_chain);
5172 /* Generate insns to initialize the descriptor. */
5173 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5174 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5175 POINTER_SIZE / BITS_PER_UNIT), r_func);
5177 return const0_rtx;
5180 /* Expand a call to the builtin descriptor adjustment routine. */
5182 static rtx
5183 expand_builtin_adjust_descriptor (tree exp)
5185 rtx tramp;
5187 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5188 return NULL_RTX;
5190 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5192 /* Unalign the descriptor to allow runtime identification. */
5193 tramp = plus_constant (ptr_mode, tramp,
5194 targetm.calls.custom_function_descriptors);
5196 return force_operand (tramp, NULL_RTX);
5199 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5200 function. The function first checks whether the back end provides
5201 an insn to implement signbit for the respective mode. If not, it
5202 checks whether the floating point format of the value is such that
5203 the sign bit can be extracted. If that is not the case, error out.
5204 EXP is the expression that is a call to the builtin function; if
5205 convenient, the result should be placed in TARGET. */
5206 static rtx
5207 expand_builtin_signbit (tree exp, rtx target)
5209 const struct real_format *fmt;
5210 machine_mode fmode, imode, rmode;
5211 tree arg;
5212 int word, bitpos;
5213 enum insn_code icode;
5214 rtx temp;
5215 location_t loc = EXPR_LOCATION (exp);
5217 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5218 return NULL_RTX;
5220 arg = CALL_EXPR_ARG (exp, 0);
5221 fmode = TYPE_MODE (TREE_TYPE (arg));
5222 rmode = TYPE_MODE (TREE_TYPE (exp));
5223 fmt = REAL_MODE_FORMAT (fmode);
5225 arg = builtin_save_expr (arg);
5227 /* Expand the argument yielding a RTX expression. */
5228 temp = expand_normal (arg);
5230 /* Check if the back end provides an insn that handles signbit for the
5231 argument's mode. */
5232 icode = optab_handler (signbit_optab, fmode);
5233 if (icode != CODE_FOR_nothing)
5235 rtx_insn *last = get_last_insn ();
5236 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5237 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5238 return target;
5239 delete_insns_since (last);
5242 /* For floating point formats without a sign bit, implement signbit
5243 as "ARG < 0.0". */
5244 bitpos = fmt->signbit_ro;
5245 if (bitpos < 0)
5247 /* But we can't do this if the format supports signed zero. */
5248 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5250 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5251 build_real (TREE_TYPE (arg), dconst0));
5252 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5255 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5257 imode = int_mode_for_mode (fmode);
5258 gcc_assert (imode != BLKmode);
5259 temp = gen_lowpart (imode, temp);
5261 else
5263 imode = word_mode;
5264 /* Handle targets with different FP word orders. */
5265 if (FLOAT_WORDS_BIG_ENDIAN)
5266 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5267 else
5268 word = bitpos / BITS_PER_WORD;
5269 temp = operand_subword_force (temp, word, fmode);
5270 bitpos = bitpos % BITS_PER_WORD;
5273 /* Force the intermediate word_mode (or narrower) result into a
5274 register. This avoids attempting to create paradoxical SUBREGs
5275 of floating point modes below. */
5276 temp = force_reg (imode, temp);
5278 /* If the bitpos is within the "result mode" lowpart, the operation
5279 can be implement with a single bitwise AND. Otherwise, we need
5280 a right shift and an AND. */
5282 if (bitpos < GET_MODE_BITSIZE (rmode))
5284 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5286 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5287 temp = gen_lowpart (rmode, temp);
5288 temp = expand_binop (rmode, and_optab, temp,
5289 immed_wide_int_const (mask, rmode),
5290 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5292 else
5294 /* Perform a logical right shift to place the signbit in the least
5295 significant bit, then truncate the result to the desired mode
5296 and mask just this bit. */
5297 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5298 temp = gen_lowpart (rmode, temp);
5299 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5300 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5303 return temp;
5306 /* Expand fork or exec calls. TARGET is the desired target of the
5307 call. EXP is the call. FN is the
5308 identificator of the actual function. IGNORE is nonzero if the
5309 value is to be ignored. */
5311 static rtx
5312 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5314 tree id, decl;
5315 tree call;
5317 /* If we are not profiling, just call the function. */
5318 if (!profile_arc_flag)
5319 return NULL_RTX;
5321 /* Otherwise call the wrapper. This should be equivalent for the rest of
5322 compiler, so the code does not diverge, and the wrapper may run the
5323 code necessary for keeping the profiling sane. */
5325 switch (DECL_FUNCTION_CODE (fn))
5327 case BUILT_IN_FORK:
5328 id = get_identifier ("__gcov_fork");
5329 break;
5331 case BUILT_IN_EXECL:
5332 id = get_identifier ("__gcov_execl");
5333 break;
5335 case BUILT_IN_EXECV:
5336 id = get_identifier ("__gcov_execv");
5337 break;
5339 case BUILT_IN_EXECLP:
5340 id = get_identifier ("__gcov_execlp");
5341 break;
5343 case BUILT_IN_EXECLE:
5344 id = get_identifier ("__gcov_execle");
5345 break;
5347 case BUILT_IN_EXECVP:
5348 id = get_identifier ("__gcov_execvp");
5349 break;
5351 case BUILT_IN_EXECVE:
5352 id = get_identifier ("__gcov_execve");
5353 break;
5355 default:
5356 gcc_unreachable ();
5359 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5360 FUNCTION_DECL, id, TREE_TYPE (fn));
5361 DECL_EXTERNAL (decl) = 1;
5362 TREE_PUBLIC (decl) = 1;
5363 DECL_ARTIFICIAL (decl) = 1;
5364 TREE_NOTHROW (decl) = 1;
5365 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5366 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5367 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5368 return expand_call (call, target, ignore);
5373 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5374 the pointer in these functions is void*, the tree optimizers may remove
5375 casts. The mode computed in expand_builtin isn't reliable either, due
5376 to __sync_bool_compare_and_swap.
5378 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5379 group of builtins. This gives us log2 of the mode size. */
5381 static inline machine_mode
5382 get_builtin_sync_mode (int fcode_diff)
5384 /* The size is not negotiable, so ask not to get BLKmode in return
5385 if the target indicates that a smaller size would be better. */
5386 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5389 /* Expand the memory expression LOC and return the appropriate memory operand
5390 for the builtin_sync operations. */
5392 static rtx
5393 get_builtin_sync_mem (tree loc, machine_mode mode)
5395 rtx addr, mem;
5397 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5398 addr = convert_memory_address (Pmode, addr);
5400 /* Note that we explicitly do not want any alias information for this
5401 memory, so that we kill all other live memories. Otherwise we don't
5402 satisfy the full barrier semantics of the intrinsic. */
5403 mem = validize_mem (gen_rtx_MEM (mode, addr));
5405 /* The alignment needs to be at least according to that of the mode. */
5406 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5407 get_pointer_alignment (loc)));
5408 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5409 MEM_VOLATILE_P (mem) = 1;
5411 return mem;
5414 /* Make sure an argument is in the right mode.
5415 EXP is the tree argument.
5416 MODE is the mode it should be in. */
5418 static rtx
5419 expand_expr_force_mode (tree exp, machine_mode mode)
5421 rtx val;
5422 machine_mode old_mode;
5424 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5425 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5426 of CONST_INTs, where we know the old_mode only from the call argument. */
5428 old_mode = GET_MODE (val);
5429 if (old_mode == VOIDmode)
5430 old_mode = TYPE_MODE (TREE_TYPE (exp));
5431 val = convert_modes (mode, old_mode, val, 1);
5432 return val;
5436 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5437 EXP is the CALL_EXPR. CODE is the rtx code
5438 that corresponds to the arithmetic or logical operation from the name;
5439 an exception here is that NOT actually means NAND. TARGET is an optional
5440 place for us to store the results; AFTER is true if this is the
5441 fetch_and_xxx form. */
5443 static rtx
5444 expand_builtin_sync_operation (machine_mode mode, tree exp,
5445 enum rtx_code code, bool after,
5446 rtx target)
5448 rtx val, mem;
5449 location_t loc = EXPR_LOCATION (exp);
5451 if (code == NOT && warn_sync_nand)
5453 tree fndecl = get_callee_fndecl (exp);
5454 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5456 static bool warned_f_a_n, warned_n_a_f;
5458 switch (fcode)
5460 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5461 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5462 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5463 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5464 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5465 if (warned_f_a_n)
5466 break;
5468 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5469 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5470 warned_f_a_n = true;
5471 break;
5473 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5474 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5475 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5476 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5477 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5478 if (warned_n_a_f)
5479 break;
5481 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5482 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5483 warned_n_a_f = true;
5484 break;
5486 default:
5487 gcc_unreachable ();
5491 /* Expand the operands. */
5492 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5493 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5495 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5496 after);
5499 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5500 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5501 true if this is the boolean form. TARGET is a place for us to store the
5502 results; this is NOT optional if IS_BOOL is true. */
5504 static rtx
5505 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5506 bool is_bool, rtx target)
5508 rtx old_val, new_val, mem;
5509 rtx *pbool, *poval;
5511 /* Expand the operands. */
5512 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5513 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5514 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5516 pbool = poval = NULL;
5517 if (target != const0_rtx)
5519 if (is_bool)
5520 pbool = &target;
5521 else
5522 poval = &target;
5524 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5525 false, MEMMODEL_SYNC_SEQ_CST,
5526 MEMMODEL_SYNC_SEQ_CST))
5527 return NULL_RTX;
5529 return target;
5532 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5533 general form is actually an atomic exchange, and some targets only
5534 support a reduced form with the second argument being a constant 1.
5535 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5536 the results. */
5538 static rtx
5539 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5540 rtx target)
5542 rtx val, mem;
5544 /* Expand the operands. */
5545 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5546 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5548 return expand_sync_lock_test_and_set (target, mem, val);
5551 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5553 static void
5554 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5556 rtx mem;
5558 /* Expand the operands. */
5559 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5561 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5564 /* Given an integer representing an ``enum memmodel'', verify its
5565 correctness and return the memory model enum. */
5567 static enum memmodel
5568 get_memmodel (tree exp)
5570 rtx op;
5571 unsigned HOST_WIDE_INT val;
5572 source_location loc
5573 = expansion_point_location_if_in_system_header (input_location);
5575 /* If the parameter is not a constant, it's a run time value so we'll just
5576 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5577 if (TREE_CODE (exp) != INTEGER_CST)
5578 return MEMMODEL_SEQ_CST;
5580 op = expand_normal (exp);
5582 val = INTVAL (op);
5583 if (targetm.memmodel_check)
5584 val = targetm.memmodel_check (val);
5585 else if (val & ~MEMMODEL_MASK)
5587 warning_at (loc, OPT_Winvalid_memory_model,
5588 "unknown architecture specifier in memory model to builtin");
5589 return MEMMODEL_SEQ_CST;
5592 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5593 if (memmodel_base (val) >= MEMMODEL_LAST)
5595 warning_at (loc, OPT_Winvalid_memory_model,
5596 "invalid memory model argument to builtin");
5597 return MEMMODEL_SEQ_CST;
5600 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5601 be conservative and promote consume to acquire. */
5602 if (val == MEMMODEL_CONSUME)
5603 val = MEMMODEL_ACQUIRE;
5605 return (enum memmodel) val;
5608 /* Expand the __atomic_exchange intrinsic:
5609 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5610 EXP is the CALL_EXPR.
5611 TARGET is an optional place for us to store the results. */
5613 static rtx
5614 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5616 rtx val, mem;
5617 enum memmodel model;
5619 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5621 if (!flag_inline_atomics)
5622 return NULL_RTX;
5624 /* Expand the operands. */
5625 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5626 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5628 return expand_atomic_exchange (target, mem, val, model);
5631 /* Expand the __atomic_compare_exchange intrinsic:
5632 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5633 TYPE desired, BOOL weak,
5634 enum memmodel success,
5635 enum memmodel failure)
5636 EXP is the CALL_EXPR.
5637 TARGET is an optional place for us to store the results. */
5639 static rtx
5640 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5641 rtx target)
5643 rtx expect, desired, mem, oldval;
5644 rtx_code_label *label;
5645 enum memmodel success, failure;
5646 tree weak;
5647 bool is_weak;
5648 source_location loc
5649 = expansion_point_location_if_in_system_header (input_location);
5651 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5652 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5654 if (failure > success)
5656 warning_at (loc, OPT_Winvalid_memory_model,
5657 "failure memory model cannot be stronger than success "
5658 "memory model for %<__atomic_compare_exchange%>");
5659 success = MEMMODEL_SEQ_CST;
5662 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5664 warning_at (loc, OPT_Winvalid_memory_model,
5665 "invalid failure memory model for "
5666 "%<__atomic_compare_exchange%>");
5667 failure = MEMMODEL_SEQ_CST;
5668 success = MEMMODEL_SEQ_CST;
5672 if (!flag_inline_atomics)
5673 return NULL_RTX;
5675 /* Expand the operands. */
5676 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5678 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5679 expect = convert_memory_address (Pmode, expect);
5680 expect = gen_rtx_MEM (mode, expect);
5681 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5683 weak = CALL_EXPR_ARG (exp, 3);
5684 is_weak = false;
5685 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5686 is_weak = true;
5688 if (target == const0_rtx)
5689 target = NULL;
5691 /* Lest the rtl backend create a race condition with an imporoper store
5692 to memory, always create a new pseudo for OLDVAL. */
5693 oldval = NULL;
5695 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5696 is_weak, success, failure))
5697 return NULL_RTX;
5699 /* Conditionally store back to EXPECT, lest we create a race condition
5700 with an improper store to memory. */
5701 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5702 the normal case where EXPECT is totally private, i.e. a register. At
5703 which point the store can be unconditional. */
5704 label = gen_label_rtx ();
5705 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5706 GET_MODE (target), 1, label);
5707 emit_move_insn (expect, oldval);
5708 emit_label (label);
5710 return target;
5713 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5714 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5715 call. The weak parameter must be dropped to match the expected parameter
5716 list and the expected argument changed from value to pointer to memory
5717 slot. */
5719 static void
5720 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5722 unsigned int z;
5723 vec<tree, va_gc> *vec;
5725 vec_alloc (vec, 5);
5726 vec->quick_push (gimple_call_arg (call, 0));
5727 tree expected = gimple_call_arg (call, 1);
5728 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5729 TREE_TYPE (expected));
5730 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5731 if (expd != x)
5732 emit_move_insn (x, expd);
5733 tree v = make_tree (TREE_TYPE (expected), x);
5734 vec->quick_push (build1 (ADDR_EXPR,
5735 build_pointer_type (TREE_TYPE (expected)), v));
5736 vec->quick_push (gimple_call_arg (call, 2));
5737 /* Skip the boolean weak parameter. */
5738 for (z = 4; z < 6; z++)
5739 vec->quick_push (gimple_call_arg (call, z));
5740 built_in_function fncode
5741 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5742 + exact_log2 (GET_MODE_SIZE (mode)));
5743 tree fndecl = builtin_decl_explicit (fncode);
5744 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
5745 fndecl);
5746 tree exp = build_call_vec (boolean_type_node, fn, vec);
5747 tree lhs = gimple_call_lhs (call);
5748 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
5749 if (lhs)
5751 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5752 if (GET_MODE (boolret) != mode)
5753 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5754 x = force_reg (mode, x);
5755 write_complex_part (target, boolret, true);
5756 write_complex_part (target, x, false);
5760 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
5762 void
5763 expand_ifn_atomic_compare_exchange (gcall *call)
5765 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
5766 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
5767 machine_mode mode = mode_for_size (BITS_PER_UNIT * size, MODE_INT, 0);
5768 rtx expect, desired, mem, oldval, boolret;
5769 enum memmodel success, failure;
5770 tree lhs;
5771 bool is_weak;
5772 source_location loc
5773 = expansion_point_location_if_in_system_header (gimple_location (call));
5775 success = get_memmodel (gimple_call_arg (call, 4));
5776 failure = get_memmodel (gimple_call_arg (call, 5));
5778 if (failure > success)
5780 warning_at (loc, OPT_Winvalid_memory_model,
5781 "failure memory model cannot be stronger than success "
5782 "memory model for %<__atomic_compare_exchange%>");
5783 success = MEMMODEL_SEQ_CST;
5786 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5788 warning_at (loc, OPT_Winvalid_memory_model,
5789 "invalid failure memory model for "
5790 "%<__atomic_compare_exchange%>");
5791 failure = MEMMODEL_SEQ_CST;
5792 success = MEMMODEL_SEQ_CST;
5795 if (!flag_inline_atomics)
5797 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5798 return;
5801 /* Expand the operands. */
5802 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
5804 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
5805 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
5807 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
5809 boolret = NULL;
5810 oldval = NULL;
5812 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
5813 is_weak, success, failure))
5815 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5816 return;
5819 lhs = gimple_call_lhs (call);
5820 if (lhs)
5822 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5823 if (GET_MODE (boolret) != mode)
5824 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5825 write_complex_part (target, boolret, true);
5826 write_complex_part (target, oldval, false);
5830 /* Expand the __atomic_load intrinsic:
5831 TYPE __atomic_load (TYPE *object, enum memmodel)
5832 EXP is the CALL_EXPR.
5833 TARGET is an optional place for us to store the results. */
5835 static rtx
5836 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5838 rtx mem;
5839 enum memmodel model;
5841 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5842 if (is_mm_release (model) || is_mm_acq_rel (model))
5844 source_location loc
5845 = expansion_point_location_if_in_system_header (input_location);
5846 warning_at (loc, OPT_Winvalid_memory_model,
5847 "invalid memory model for %<__atomic_load%>");
5848 model = MEMMODEL_SEQ_CST;
5851 if (!flag_inline_atomics)
5852 return NULL_RTX;
5854 /* Expand the operand. */
5855 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5857 return expand_atomic_load (target, mem, model);
5861 /* Expand the __atomic_store intrinsic:
5862 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5863 EXP is the CALL_EXPR.
5864 TARGET is an optional place for us to store the results. */
5866 static rtx
5867 expand_builtin_atomic_store (machine_mode mode, tree exp)
5869 rtx mem, val;
5870 enum memmodel model;
5872 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5873 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5874 || is_mm_release (model)))
5876 source_location loc
5877 = expansion_point_location_if_in_system_header (input_location);
5878 warning_at (loc, OPT_Winvalid_memory_model,
5879 "invalid memory model for %<__atomic_store%>");
5880 model = MEMMODEL_SEQ_CST;
5883 if (!flag_inline_atomics)
5884 return NULL_RTX;
5886 /* Expand the operands. */
5887 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5888 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5890 return expand_atomic_store (mem, val, model, false);
5893 /* Expand the __atomic_fetch_XXX intrinsic:
5894 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5895 EXP is the CALL_EXPR.
5896 TARGET is an optional place for us to store the results.
5897 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5898 FETCH_AFTER is true if returning the result of the operation.
5899 FETCH_AFTER is false if returning the value before the operation.
5900 IGNORE is true if the result is not used.
5901 EXT_CALL is the correct builtin for an external call if this cannot be
5902 resolved to an instruction sequence. */
5904 static rtx
5905 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5906 enum rtx_code code, bool fetch_after,
5907 bool ignore, enum built_in_function ext_call)
5909 rtx val, mem, ret;
5910 enum memmodel model;
5911 tree fndecl;
5912 tree addr;
5914 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5916 /* Expand the operands. */
5917 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5918 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5920 /* Only try generating instructions if inlining is turned on. */
5921 if (flag_inline_atomics)
5923 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5924 if (ret)
5925 return ret;
5928 /* Return if a different routine isn't needed for the library call. */
5929 if (ext_call == BUILT_IN_NONE)
5930 return NULL_RTX;
5932 /* Change the call to the specified function. */
5933 fndecl = get_callee_fndecl (exp);
5934 addr = CALL_EXPR_FN (exp);
5935 STRIP_NOPS (addr);
5937 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5938 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5940 /* Expand the call here so we can emit trailing code. */
5941 ret = expand_call (exp, target, ignore);
5943 /* Replace the original function just in case it matters. */
5944 TREE_OPERAND (addr, 0) = fndecl;
5946 /* Then issue the arithmetic correction to return the right result. */
5947 if (!ignore)
5949 if (code == NOT)
5951 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5952 OPTAB_LIB_WIDEN);
5953 ret = expand_simple_unop (mode, NOT, ret, target, true);
5955 else
5956 ret = expand_simple_binop (mode, code, ret, val, target, true,
5957 OPTAB_LIB_WIDEN);
5959 return ret;
5962 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
5964 void
5965 expand_ifn_atomic_bit_test_and (gcall *call)
5967 tree ptr = gimple_call_arg (call, 0);
5968 tree bit = gimple_call_arg (call, 1);
5969 tree flag = gimple_call_arg (call, 2);
5970 tree lhs = gimple_call_lhs (call);
5971 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
5972 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
5973 enum rtx_code code;
5974 optab optab;
5975 struct expand_operand ops[5];
5977 gcc_assert (flag_inline_atomics);
5979 if (gimple_call_num_args (call) == 4)
5980 model = get_memmodel (gimple_call_arg (call, 3));
5982 rtx mem = get_builtin_sync_mem (ptr, mode);
5983 rtx val = expand_expr_force_mode (bit, mode);
5985 switch (gimple_call_internal_fn (call))
5987 case IFN_ATOMIC_BIT_TEST_AND_SET:
5988 code = IOR;
5989 optab = atomic_bit_test_and_set_optab;
5990 break;
5991 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
5992 code = XOR;
5993 optab = atomic_bit_test_and_complement_optab;
5994 break;
5995 case IFN_ATOMIC_BIT_TEST_AND_RESET:
5996 code = AND;
5997 optab = atomic_bit_test_and_reset_optab;
5998 break;
5999 default:
6000 gcc_unreachable ();
6003 if (lhs == NULL_TREE)
6005 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6006 val, NULL_RTX, true, OPTAB_DIRECT);
6007 if (code == AND)
6008 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6009 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6010 return;
6013 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6014 enum insn_code icode = direct_optab_handler (optab, mode);
6015 gcc_assert (icode != CODE_FOR_nothing);
6016 create_output_operand (&ops[0], target, mode);
6017 create_fixed_operand (&ops[1], mem);
6018 create_convert_operand_to (&ops[2], val, mode, true);
6019 create_integer_operand (&ops[3], model);
6020 create_integer_operand (&ops[4], integer_onep (flag));
6021 if (maybe_expand_insn (icode, 5, ops))
6022 return;
6024 rtx bitval = val;
6025 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6026 val, NULL_RTX, true, OPTAB_DIRECT);
6027 rtx maskval = val;
6028 if (code == AND)
6029 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6030 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6031 code, model, false);
6032 if (integer_onep (flag))
6034 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6035 NULL_RTX, true, OPTAB_DIRECT);
6036 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6037 true, OPTAB_DIRECT);
6039 else
6040 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6041 OPTAB_DIRECT);
6042 if (result != target)
6043 emit_move_insn (target, result);
6046 /* Expand an atomic clear operation.
6047 void _atomic_clear (BOOL *obj, enum memmodel)
6048 EXP is the call expression. */
6050 static rtx
6051 expand_builtin_atomic_clear (tree exp)
6053 machine_mode mode;
6054 rtx mem, ret;
6055 enum memmodel model;
6057 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
6058 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6059 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6061 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6063 source_location loc
6064 = expansion_point_location_if_in_system_header (input_location);
6065 warning_at (loc, OPT_Winvalid_memory_model,
6066 "invalid memory model for %<__atomic_store%>");
6067 model = MEMMODEL_SEQ_CST;
6070 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6071 Failing that, a store is issued by __atomic_store. The only way this can
6072 fail is if the bool type is larger than a word size. Unlikely, but
6073 handle it anyway for completeness. Assume a single threaded model since
6074 there is no atomic support in this case, and no barriers are required. */
6075 ret = expand_atomic_store (mem, const0_rtx, model, true);
6076 if (!ret)
6077 emit_move_insn (mem, const0_rtx);
6078 return const0_rtx;
6081 /* Expand an atomic test_and_set operation.
6082 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6083 EXP is the call expression. */
6085 static rtx
6086 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6088 rtx mem;
6089 enum memmodel model;
6090 machine_mode mode;
6092 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
6093 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6094 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6096 return expand_atomic_test_and_set (target, mem, model);
6100 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6101 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6103 static tree
6104 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6106 int size;
6107 machine_mode mode;
6108 unsigned int mode_align, type_align;
6110 if (TREE_CODE (arg0) != INTEGER_CST)
6111 return NULL_TREE;
6113 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6114 mode = mode_for_size (size, MODE_INT, 0);
6115 mode_align = GET_MODE_ALIGNMENT (mode);
6117 if (TREE_CODE (arg1) == INTEGER_CST)
6119 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6121 /* Either this argument is null, or it's a fake pointer encoding
6122 the alignment of the object. */
6123 val = least_bit_hwi (val);
6124 val *= BITS_PER_UNIT;
6126 if (val == 0 || mode_align < val)
6127 type_align = mode_align;
6128 else
6129 type_align = val;
6131 else
6133 tree ttype = TREE_TYPE (arg1);
6135 /* This function is usually invoked and folded immediately by the front
6136 end before anything else has a chance to look at it. The pointer
6137 parameter at this point is usually cast to a void *, so check for that
6138 and look past the cast. */
6139 if (CONVERT_EXPR_P (arg1)
6140 && POINTER_TYPE_P (ttype)
6141 && VOID_TYPE_P (TREE_TYPE (ttype))
6142 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6143 arg1 = TREE_OPERAND (arg1, 0);
6145 ttype = TREE_TYPE (arg1);
6146 gcc_assert (POINTER_TYPE_P (ttype));
6148 /* Get the underlying type of the object. */
6149 ttype = TREE_TYPE (ttype);
6150 type_align = TYPE_ALIGN (ttype);
6153 /* If the object has smaller alignment, the lock free routines cannot
6154 be used. */
6155 if (type_align < mode_align)
6156 return boolean_false_node;
6158 /* Check if a compare_and_swap pattern exists for the mode which represents
6159 the required size. The pattern is not allowed to fail, so the existence
6160 of the pattern indicates support is present. */
6161 if (can_compare_and_swap_p (mode, true))
6162 return boolean_true_node;
6163 else
6164 return boolean_false_node;
6167 /* Return true if the parameters to call EXP represent an object which will
6168 always generate lock free instructions. The first argument represents the
6169 size of the object, and the second parameter is a pointer to the object
6170 itself. If NULL is passed for the object, then the result is based on
6171 typical alignment for an object of the specified size. Otherwise return
6172 false. */
6174 static rtx
6175 expand_builtin_atomic_always_lock_free (tree exp)
6177 tree size;
6178 tree arg0 = CALL_EXPR_ARG (exp, 0);
6179 tree arg1 = CALL_EXPR_ARG (exp, 1);
6181 if (TREE_CODE (arg0) != INTEGER_CST)
6183 error ("non-constant argument 1 to __atomic_always_lock_free");
6184 return const0_rtx;
6187 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6188 if (size == boolean_true_node)
6189 return const1_rtx;
6190 return const0_rtx;
6193 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6194 is lock free on this architecture. */
6196 static tree
6197 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6199 if (!flag_inline_atomics)
6200 return NULL_TREE;
6202 /* If it isn't always lock free, don't generate a result. */
6203 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6204 return boolean_true_node;
6206 return NULL_TREE;
6209 /* Return true if the parameters to call EXP represent an object which will
6210 always generate lock free instructions. The first argument represents the
6211 size of the object, and the second parameter is a pointer to the object
6212 itself. If NULL is passed for the object, then the result is based on
6213 typical alignment for an object of the specified size. Otherwise return
6214 NULL*/
6216 static rtx
6217 expand_builtin_atomic_is_lock_free (tree exp)
6219 tree size;
6220 tree arg0 = CALL_EXPR_ARG (exp, 0);
6221 tree arg1 = CALL_EXPR_ARG (exp, 1);
6223 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6225 error ("non-integer argument 1 to __atomic_is_lock_free");
6226 return NULL_RTX;
6229 if (!flag_inline_atomics)
6230 return NULL_RTX;
6232 /* If the value is known at compile time, return the RTX for it. */
6233 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6234 if (size == boolean_true_node)
6235 return const1_rtx;
6237 return NULL_RTX;
6240 /* Expand the __atomic_thread_fence intrinsic:
6241 void __atomic_thread_fence (enum memmodel)
6242 EXP is the CALL_EXPR. */
6244 static void
6245 expand_builtin_atomic_thread_fence (tree exp)
6247 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6248 expand_mem_thread_fence (model);
6251 /* Expand the __atomic_signal_fence intrinsic:
6252 void __atomic_signal_fence (enum memmodel)
6253 EXP is the CALL_EXPR. */
6255 static void
6256 expand_builtin_atomic_signal_fence (tree exp)
6258 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6259 expand_mem_signal_fence (model);
6262 /* Expand the __sync_synchronize intrinsic. */
6264 static void
6265 expand_builtin_sync_synchronize (void)
6267 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6270 static rtx
6271 expand_builtin_thread_pointer (tree exp, rtx target)
6273 enum insn_code icode;
6274 if (!validate_arglist (exp, VOID_TYPE))
6275 return const0_rtx;
6276 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6277 if (icode != CODE_FOR_nothing)
6279 struct expand_operand op;
6280 /* If the target is not sutitable then create a new target. */
6281 if (target == NULL_RTX
6282 || !REG_P (target)
6283 || GET_MODE (target) != Pmode)
6284 target = gen_reg_rtx (Pmode);
6285 create_output_operand (&op, target, Pmode);
6286 expand_insn (icode, 1, &op);
6287 return target;
6289 error ("__builtin_thread_pointer is not supported on this target");
6290 return const0_rtx;
6293 static void
6294 expand_builtin_set_thread_pointer (tree exp)
6296 enum insn_code icode;
6297 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6298 return;
6299 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6300 if (icode != CODE_FOR_nothing)
6302 struct expand_operand op;
6303 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6304 Pmode, EXPAND_NORMAL);
6305 create_input_operand (&op, val, Pmode);
6306 expand_insn (icode, 1, &op);
6307 return;
6309 error ("__builtin_set_thread_pointer is not supported on this target");
6313 /* Emit code to restore the current value of stack. */
6315 static void
6316 expand_stack_restore (tree var)
6318 rtx_insn *prev;
6319 rtx sa = expand_normal (var);
6321 sa = convert_memory_address (Pmode, sa);
6323 prev = get_last_insn ();
6324 emit_stack_restore (SAVE_BLOCK, sa);
6326 record_new_stack_level ();
6328 fixup_args_size_notes (prev, get_last_insn (), 0);
6331 /* Emit code to save the current value of stack. */
6333 static rtx
6334 expand_stack_save (void)
6336 rtx ret = NULL_RTX;
6338 emit_stack_save (SAVE_BLOCK, &ret);
6339 return ret;
6343 /* Expand an expression EXP that calls a built-in function,
6344 with result going to TARGET if that's convenient
6345 (and in mode MODE if that's convenient).
6346 SUBTARGET may be used as the target for computing one of EXP's operands.
6347 IGNORE is nonzero if the value is to be ignored. */
6350 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
6351 int ignore)
6353 tree fndecl = get_callee_fndecl (exp);
6354 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6355 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6356 int flags;
6358 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6359 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6361 /* When ASan is enabled, we don't want to expand some memory/string
6362 builtins and rely on libsanitizer's hooks. This allows us to avoid
6363 redundant checks and be sure, that possible overflow will be detected
6364 by ASan. */
6366 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
6367 return expand_call (exp, target, ignore);
6369 /* When not optimizing, generate calls to library functions for a certain
6370 set of builtins. */
6371 if (!optimize
6372 && !called_as_built_in (fndecl)
6373 && fcode != BUILT_IN_FORK
6374 && fcode != BUILT_IN_EXECL
6375 && fcode != BUILT_IN_EXECV
6376 && fcode != BUILT_IN_EXECLP
6377 && fcode != BUILT_IN_EXECLE
6378 && fcode != BUILT_IN_EXECVP
6379 && fcode != BUILT_IN_EXECVE
6380 && fcode != BUILT_IN_ALLOCA
6381 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
6382 && fcode != BUILT_IN_FREE
6383 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
6384 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
6385 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
6386 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
6387 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6388 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
6389 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
6390 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
6391 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
6392 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
6393 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
6394 && fcode != BUILT_IN_CHKP_BNDRET)
6395 return expand_call (exp, target, ignore);
6397 /* The built-in function expanders test for target == const0_rtx
6398 to determine whether the function's result will be ignored. */
6399 if (ignore)
6400 target = const0_rtx;
6402 /* If the result of a pure or const built-in function is ignored, and
6403 none of its arguments are volatile, we can avoid expanding the
6404 built-in call and just evaluate the arguments for side-effects. */
6405 if (target == const0_rtx
6406 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6407 && !(flags & ECF_LOOPING_CONST_OR_PURE))
6409 bool volatilep = false;
6410 tree arg;
6411 call_expr_arg_iterator iter;
6413 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6414 if (TREE_THIS_VOLATILE (arg))
6416 volatilep = true;
6417 break;
6420 if (! volatilep)
6422 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6423 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6424 return const0_rtx;
6428 /* expand_builtin_with_bounds is supposed to be used for
6429 instrumented builtin calls. */
6430 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
6432 switch (fcode)
6434 CASE_FLT_FN (BUILT_IN_FABS):
6435 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
6436 case BUILT_IN_FABSD32:
6437 case BUILT_IN_FABSD64:
6438 case BUILT_IN_FABSD128:
6439 target = expand_builtin_fabs (exp, target, subtarget);
6440 if (target)
6441 return target;
6442 break;
6444 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6445 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
6446 target = expand_builtin_copysign (exp, target, subtarget);
6447 if (target)
6448 return target;
6449 break;
6451 /* Just do a normal library call if we were unable to fold
6452 the values. */
6453 CASE_FLT_FN (BUILT_IN_CABS):
6454 break;
6456 CASE_FLT_FN (BUILT_IN_FMA):
6457 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6458 if (target)
6459 return target;
6460 break;
6462 CASE_FLT_FN (BUILT_IN_ILOGB):
6463 if (! flag_unsafe_math_optimizations)
6464 break;
6465 gcc_fallthrough ();
6466 CASE_FLT_FN (BUILT_IN_ISINF):
6467 CASE_FLT_FN (BUILT_IN_FINITE):
6468 case BUILT_IN_ISFINITE:
6469 case BUILT_IN_ISNORMAL:
6470 target = expand_builtin_interclass_mathfn (exp, target);
6471 if (target)
6472 return target;
6473 break;
6475 CASE_FLT_FN (BUILT_IN_ICEIL):
6476 CASE_FLT_FN (BUILT_IN_LCEIL):
6477 CASE_FLT_FN (BUILT_IN_LLCEIL):
6478 CASE_FLT_FN (BUILT_IN_LFLOOR):
6479 CASE_FLT_FN (BUILT_IN_IFLOOR):
6480 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6481 target = expand_builtin_int_roundingfn (exp, target);
6482 if (target)
6483 return target;
6484 break;
6486 CASE_FLT_FN (BUILT_IN_IRINT):
6487 CASE_FLT_FN (BUILT_IN_LRINT):
6488 CASE_FLT_FN (BUILT_IN_LLRINT):
6489 CASE_FLT_FN (BUILT_IN_IROUND):
6490 CASE_FLT_FN (BUILT_IN_LROUND):
6491 CASE_FLT_FN (BUILT_IN_LLROUND):
6492 target = expand_builtin_int_roundingfn_2 (exp, target);
6493 if (target)
6494 return target;
6495 break;
6497 CASE_FLT_FN (BUILT_IN_POWI):
6498 target = expand_builtin_powi (exp, target);
6499 if (target)
6500 return target;
6501 break;
6503 CASE_FLT_FN (BUILT_IN_CEXPI):
6504 target = expand_builtin_cexpi (exp, target);
6505 gcc_assert (target);
6506 return target;
6508 CASE_FLT_FN (BUILT_IN_SIN):
6509 CASE_FLT_FN (BUILT_IN_COS):
6510 if (! flag_unsafe_math_optimizations)
6511 break;
6512 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6513 if (target)
6514 return target;
6515 break;
6517 CASE_FLT_FN (BUILT_IN_SINCOS):
6518 if (! flag_unsafe_math_optimizations)
6519 break;
6520 target = expand_builtin_sincos (exp);
6521 if (target)
6522 return target;
6523 break;
6525 case BUILT_IN_APPLY_ARGS:
6526 return expand_builtin_apply_args ();
6528 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6529 FUNCTION with a copy of the parameters described by
6530 ARGUMENTS, and ARGSIZE. It returns a block of memory
6531 allocated on the stack into which is stored all the registers
6532 that might possibly be used for returning the result of a
6533 function. ARGUMENTS is the value returned by
6534 __builtin_apply_args. ARGSIZE is the number of bytes of
6535 arguments that must be copied. ??? How should this value be
6536 computed? We'll also need a safe worst case value for varargs
6537 functions. */
6538 case BUILT_IN_APPLY:
6539 if (!validate_arglist (exp, POINTER_TYPE,
6540 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6541 && !validate_arglist (exp, REFERENCE_TYPE,
6542 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6543 return const0_rtx;
6544 else
6546 rtx ops[3];
6548 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6549 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6550 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6552 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6555 /* __builtin_return (RESULT) causes the function to return the
6556 value described by RESULT. RESULT is address of the block of
6557 memory returned by __builtin_apply. */
6558 case BUILT_IN_RETURN:
6559 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6560 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6561 return const0_rtx;
6563 case BUILT_IN_SAVEREGS:
6564 return expand_builtin_saveregs ();
6566 case BUILT_IN_VA_ARG_PACK:
6567 /* All valid uses of __builtin_va_arg_pack () are removed during
6568 inlining. */
6569 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6570 return const0_rtx;
6572 case BUILT_IN_VA_ARG_PACK_LEN:
6573 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6574 inlining. */
6575 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6576 return const0_rtx;
6578 /* Return the address of the first anonymous stack arg. */
6579 case BUILT_IN_NEXT_ARG:
6580 if (fold_builtin_next_arg (exp, false))
6581 return const0_rtx;
6582 return expand_builtin_next_arg ();
6584 case BUILT_IN_CLEAR_CACHE:
6585 target = expand_builtin___clear_cache (exp);
6586 if (target)
6587 return target;
6588 break;
6590 case BUILT_IN_CLASSIFY_TYPE:
6591 return expand_builtin_classify_type (exp);
6593 case BUILT_IN_CONSTANT_P:
6594 return const0_rtx;
6596 case BUILT_IN_FRAME_ADDRESS:
6597 case BUILT_IN_RETURN_ADDRESS:
6598 return expand_builtin_frame_address (fndecl, exp);
6600 /* Returns the address of the area where the structure is returned.
6601 0 otherwise. */
6602 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6603 if (call_expr_nargs (exp) != 0
6604 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6605 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6606 return const0_rtx;
6607 else
6608 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6610 case BUILT_IN_ALLOCA:
6611 case BUILT_IN_ALLOCA_WITH_ALIGN:
6612 /* If the allocation stems from the declaration of a variable-sized
6613 object, it cannot accumulate. */
6614 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6615 if (target)
6616 return target;
6617 break;
6619 case BUILT_IN_STACK_SAVE:
6620 return expand_stack_save ();
6622 case BUILT_IN_STACK_RESTORE:
6623 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6624 return const0_rtx;
6626 case BUILT_IN_BSWAP16:
6627 case BUILT_IN_BSWAP32:
6628 case BUILT_IN_BSWAP64:
6629 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6630 if (target)
6631 return target;
6632 break;
6634 CASE_INT_FN (BUILT_IN_FFS):
6635 target = expand_builtin_unop (target_mode, exp, target,
6636 subtarget, ffs_optab);
6637 if (target)
6638 return target;
6639 break;
6641 CASE_INT_FN (BUILT_IN_CLZ):
6642 target = expand_builtin_unop (target_mode, exp, target,
6643 subtarget, clz_optab);
6644 if (target)
6645 return target;
6646 break;
6648 CASE_INT_FN (BUILT_IN_CTZ):
6649 target = expand_builtin_unop (target_mode, exp, target,
6650 subtarget, ctz_optab);
6651 if (target)
6652 return target;
6653 break;
6655 CASE_INT_FN (BUILT_IN_CLRSB):
6656 target = expand_builtin_unop (target_mode, exp, target,
6657 subtarget, clrsb_optab);
6658 if (target)
6659 return target;
6660 break;
6662 CASE_INT_FN (BUILT_IN_POPCOUNT):
6663 target = expand_builtin_unop (target_mode, exp, target,
6664 subtarget, popcount_optab);
6665 if (target)
6666 return target;
6667 break;
6669 CASE_INT_FN (BUILT_IN_PARITY):
6670 target = expand_builtin_unop (target_mode, exp, target,
6671 subtarget, parity_optab);
6672 if (target)
6673 return target;
6674 break;
6676 case BUILT_IN_STRLEN:
6677 target = expand_builtin_strlen (exp, target, target_mode);
6678 if (target)
6679 return target;
6680 break;
6682 case BUILT_IN_STRCAT:
6683 target = expand_builtin_strcat (exp, target);
6684 if (target)
6685 return target;
6686 break;
6688 case BUILT_IN_STRCPY:
6689 target = expand_builtin_strcpy (exp, target);
6690 if (target)
6691 return target;
6692 break;
6694 case BUILT_IN_STRNCAT:
6695 target = expand_builtin_strncat (exp, target);
6696 if (target)
6697 return target;
6698 break;
6700 case BUILT_IN_STRNCPY:
6701 target = expand_builtin_strncpy (exp, target);
6702 if (target)
6703 return target;
6704 break;
6706 case BUILT_IN_STPCPY:
6707 target = expand_builtin_stpcpy (exp, target, mode);
6708 if (target)
6709 return target;
6710 break;
6712 case BUILT_IN_MEMCPY:
6713 target = expand_builtin_memcpy (exp, target);
6714 if (target)
6715 return target;
6716 break;
6718 case BUILT_IN_MEMPCPY:
6719 target = expand_builtin_mempcpy (exp, target, mode);
6720 if (target)
6721 return target;
6722 break;
6724 case BUILT_IN_MEMSET:
6725 target = expand_builtin_memset (exp, target, mode);
6726 if (target)
6727 return target;
6728 break;
6730 case BUILT_IN_BZERO:
6731 target = expand_builtin_bzero (exp);
6732 if (target)
6733 return target;
6734 break;
6736 case BUILT_IN_STRCMP:
6737 target = expand_builtin_strcmp (exp, target);
6738 if (target)
6739 return target;
6740 break;
6742 case BUILT_IN_STRNCMP:
6743 target = expand_builtin_strncmp (exp, target, mode);
6744 if (target)
6745 return target;
6746 break;
6748 case BUILT_IN_BCMP:
6749 case BUILT_IN_MEMCMP:
6750 case BUILT_IN_MEMCMP_EQ:
6751 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
6752 if (target)
6753 return target;
6754 if (fcode == BUILT_IN_MEMCMP_EQ)
6756 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
6757 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
6759 break;
6761 case BUILT_IN_SETJMP:
6762 /* This should have been lowered to the builtins below. */
6763 gcc_unreachable ();
6765 case BUILT_IN_SETJMP_SETUP:
6766 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6767 and the receiver label. */
6768 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6770 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6771 VOIDmode, EXPAND_NORMAL);
6772 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6773 rtx_insn *label_r = label_rtx (label);
6775 /* This is copied from the handling of non-local gotos. */
6776 expand_builtin_setjmp_setup (buf_addr, label_r);
6777 nonlocal_goto_handler_labels
6778 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6779 nonlocal_goto_handler_labels);
6780 /* ??? Do not let expand_label treat us as such since we would
6781 not want to be both on the list of non-local labels and on
6782 the list of forced labels. */
6783 FORCED_LABEL (label) = 0;
6784 return const0_rtx;
6786 break;
6788 case BUILT_IN_SETJMP_RECEIVER:
6789 /* __builtin_setjmp_receiver is passed the receiver label. */
6790 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6792 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6793 rtx_insn *label_r = label_rtx (label);
6795 expand_builtin_setjmp_receiver (label_r);
6796 return const0_rtx;
6798 break;
6800 /* __builtin_longjmp is passed a pointer to an array of five words.
6801 It's similar to the C library longjmp function but works with
6802 __builtin_setjmp above. */
6803 case BUILT_IN_LONGJMP:
6804 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6806 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6807 VOIDmode, EXPAND_NORMAL);
6808 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6810 if (value != const1_rtx)
6812 error ("%<__builtin_longjmp%> second argument must be 1");
6813 return const0_rtx;
6816 expand_builtin_longjmp (buf_addr, value);
6817 return const0_rtx;
6819 break;
6821 case BUILT_IN_NONLOCAL_GOTO:
6822 target = expand_builtin_nonlocal_goto (exp);
6823 if (target)
6824 return target;
6825 break;
6827 /* This updates the setjmp buffer that is its argument with the value
6828 of the current stack pointer. */
6829 case BUILT_IN_UPDATE_SETJMP_BUF:
6830 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6832 rtx buf_addr
6833 = expand_normal (CALL_EXPR_ARG (exp, 0));
6835 expand_builtin_update_setjmp_buf (buf_addr);
6836 return const0_rtx;
6838 break;
6840 case BUILT_IN_TRAP:
6841 expand_builtin_trap ();
6842 return const0_rtx;
6844 case BUILT_IN_UNREACHABLE:
6845 expand_builtin_unreachable ();
6846 return const0_rtx;
6848 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6849 case BUILT_IN_SIGNBITD32:
6850 case BUILT_IN_SIGNBITD64:
6851 case BUILT_IN_SIGNBITD128:
6852 target = expand_builtin_signbit (exp, target);
6853 if (target)
6854 return target;
6855 break;
6857 /* Various hooks for the DWARF 2 __throw routine. */
6858 case BUILT_IN_UNWIND_INIT:
6859 expand_builtin_unwind_init ();
6860 return const0_rtx;
6861 case BUILT_IN_DWARF_CFA:
6862 return virtual_cfa_rtx;
6863 #ifdef DWARF2_UNWIND_INFO
6864 case BUILT_IN_DWARF_SP_COLUMN:
6865 return expand_builtin_dwarf_sp_column ();
6866 case BUILT_IN_INIT_DWARF_REG_SIZES:
6867 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6868 return const0_rtx;
6869 #endif
6870 case BUILT_IN_FROB_RETURN_ADDR:
6871 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6872 case BUILT_IN_EXTRACT_RETURN_ADDR:
6873 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6874 case BUILT_IN_EH_RETURN:
6875 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6876 CALL_EXPR_ARG (exp, 1));
6877 return const0_rtx;
6878 case BUILT_IN_EH_RETURN_DATA_REGNO:
6879 return expand_builtin_eh_return_data_regno (exp);
6880 case BUILT_IN_EXTEND_POINTER:
6881 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6882 case BUILT_IN_EH_POINTER:
6883 return expand_builtin_eh_pointer (exp);
6884 case BUILT_IN_EH_FILTER:
6885 return expand_builtin_eh_filter (exp);
6886 case BUILT_IN_EH_COPY_VALUES:
6887 return expand_builtin_eh_copy_values (exp);
6889 case BUILT_IN_VA_START:
6890 return expand_builtin_va_start (exp);
6891 case BUILT_IN_VA_END:
6892 return expand_builtin_va_end (exp);
6893 case BUILT_IN_VA_COPY:
6894 return expand_builtin_va_copy (exp);
6895 case BUILT_IN_EXPECT:
6896 return expand_builtin_expect (exp, target);
6897 case BUILT_IN_ASSUME_ALIGNED:
6898 return expand_builtin_assume_aligned (exp, target);
6899 case BUILT_IN_PREFETCH:
6900 expand_builtin_prefetch (exp);
6901 return const0_rtx;
6903 case BUILT_IN_INIT_TRAMPOLINE:
6904 return expand_builtin_init_trampoline (exp, true);
6905 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6906 return expand_builtin_init_trampoline (exp, false);
6907 case BUILT_IN_ADJUST_TRAMPOLINE:
6908 return expand_builtin_adjust_trampoline (exp);
6910 case BUILT_IN_INIT_DESCRIPTOR:
6911 return expand_builtin_init_descriptor (exp);
6912 case BUILT_IN_ADJUST_DESCRIPTOR:
6913 return expand_builtin_adjust_descriptor (exp);
6915 case BUILT_IN_FORK:
6916 case BUILT_IN_EXECL:
6917 case BUILT_IN_EXECV:
6918 case BUILT_IN_EXECLP:
6919 case BUILT_IN_EXECLE:
6920 case BUILT_IN_EXECVP:
6921 case BUILT_IN_EXECVE:
6922 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6923 if (target)
6924 return target;
6925 break;
6927 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6928 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6929 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6930 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6931 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6932 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6933 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6934 if (target)
6935 return target;
6936 break;
6938 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6939 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6940 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6941 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6942 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6943 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6944 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6945 if (target)
6946 return target;
6947 break;
6949 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6950 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6951 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6952 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6953 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6954 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6955 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6956 if (target)
6957 return target;
6958 break;
6960 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6961 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6962 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6963 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6964 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6965 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6966 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6967 if (target)
6968 return target;
6969 break;
6971 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6972 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6973 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6974 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6975 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6976 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6977 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6978 if (target)
6979 return target;
6980 break;
6982 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6983 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6984 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6985 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6986 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6987 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6988 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6989 if (target)
6990 return target;
6991 break;
6993 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6994 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6995 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6996 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6997 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6998 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6999 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7000 if (target)
7001 return target;
7002 break;
7004 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7005 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7006 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7007 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7008 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7009 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7010 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7011 if (target)
7012 return target;
7013 break;
7015 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7016 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7017 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7018 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7019 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7020 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7021 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7022 if (target)
7023 return target;
7024 break;
7026 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7027 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7028 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7029 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7030 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7031 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7032 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7033 if (target)
7034 return target;
7035 break;
7037 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7038 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7039 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7040 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7041 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7042 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7043 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7044 if (target)
7045 return target;
7046 break;
7048 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7049 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7050 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7051 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7052 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7053 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7054 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7055 if (target)
7056 return target;
7057 break;
7059 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7060 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7061 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7062 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7063 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7064 if (mode == VOIDmode)
7065 mode = TYPE_MODE (boolean_type_node);
7066 if (!target || !register_operand (target, mode))
7067 target = gen_reg_rtx (mode);
7069 mode = get_builtin_sync_mode
7070 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7071 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7072 if (target)
7073 return target;
7074 break;
7076 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7077 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7078 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7079 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7080 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7081 mode = get_builtin_sync_mode
7082 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7083 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7084 if (target)
7085 return target;
7086 break;
7088 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7089 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7090 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7091 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7092 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7093 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7094 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7095 if (target)
7096 return target;
7097 break;
7099 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7100 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7101 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7102 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7103 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7104 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7105 expand_builtin_sync_lock_release (mode, exp);
7106 return const0_rtx;
7108 case BUILT_IN_SYNC_SYNCHRONIZE:
7109 expand_builtin_sync_synchronize ();
7110 return const0_rtx;
7112 case BUILT_IN_ATOMIC_EXCHANGE_1:
7113 case BUILT_IN_ATOMIC_EXCHANGE_2:
7114 case BUILT_IN_ATOMIC_EXCHANGE_4:
7115 case BUILT_IN_ATOMIC_EXCHANGE_8:
7116 case BUILT_IN_ATOMIC_EXCHANGE_16:
7117 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7118 target = expand_builtin_atomic_exchange (mode, exp, target);
7119 if (target)
7120 return target;
7121 break;
7123 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7124 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7125 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7126 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7127 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7129 unsigned int nargs, z;
7130 vec<tree, va_gc> *vec;
7132 mode =
7133 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7134 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7135 if (target)
7136 return target;
7138 /* If this is turned into an external library call, the weak parameter
7139 must be dropped to match the expected parameter list. */
7140 nargs = call_expr_nargs (exp);
7141 vec_alloc (vec, nargs - 1);
7142 for (z = 0; z < 3; z++)
7143 vec->quick_push (CALL_EXPR_ARG (exp, z));
7144 /* Skip the boolean weak parameter. */
7145 for (z = 4; z < 6; z++)
7146 vec->quick_push (CALL_EXPR_ARG (exp, z));
7147 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7148 break;
7151 case BUILT_IN_ATOMIC_LOAD_1:
7152 case BUILT_IN_ATOMIC_LOAD_2:
7153 case BUILT_IN_ATOMIC_LOAD_4:
7154 case BUILT_IN_ATOMIC_LOAD_8:
7155 case BUILT_IN_ATOMIC_LOAD_16:
7156 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7157 target = expand_builtin_atomic_load (mode, exp, target);
7158 if (target)
7159 return target;
7160 break;
7162 case BUILT_IN_ATOMIC_STORE_1:
7163 case BUILT_IN_ATOMIC_STORE_2:
7164 case BUILT_IN_ATOMIC_STORE_4:
7165 case BUILT_IN_ATOMIC_STORE_8:
7166 case BUILT_IN_ATOMIC_STORE_16:
7167 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7168 target = expand_builtin_atomic_store (mode, exp);
7169 if (target)
7170 return const0_rtx;
7171 break;
7173 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7174 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7175 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7176 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7177 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7179 enum built_in_function lib;
7180 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7181 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7182 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7183 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7184 ignore, lib);
7185 if (target)
7186 return target;
7187 break;
7189 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7190 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7191 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7192 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7193 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7195 enum built_in_function lib;
7196 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7197 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7198 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7199 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7200 ignore, lib);
7201 if (target)
7202 return target;
7203 break;
7205 case BUILT_IN_ATOMIC_AND_FETCH_1:
7206 case BUILT_IN_ATOMIC_AND_FETCH_2:
7207 case BUILT_IN_ATOMIC_AND_FETCH_4:
7208 case BUILT_IN_ATOMIC_AND_FETCH_8:
7209 case BUILT_IN_ATOMIC_AND_FETCH_16:
7211 enum built_in_function lib;
7212 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7213 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7214 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7215 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7216 ignore, lib);
7217 if (target)
7218 return target;
7219 break;
7221 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7222 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7223 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7224 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7225 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7227 enum built_in_function lib;
7228 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7229 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7230 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7231 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7232 ignore, lib);
7233 if (target)
7234 return target;
7235 break;
7237 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7238 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7239 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7240 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7241 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7243 enum built_in_function lib;
7244 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7245 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7246 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7247 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7248 ignore, lib);
7249 if (target)
7250 return target;
7251 break;
7253 case BUILT_IN_ATOMIC_OR_FETCH_1:
7254 case BUILT_IN_ATOMIC_OR_FETCH_2:
7255 case BUILT_IN_ATOMIC_OR_FETCH_4:
7256 case BUILT_IN_ATOMIC_OR_FETCH_8:
7257 case BUILT_IN_ATOMIC_OR_FETCH_16:
7259 enum built_in_function lib;
7260 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7261 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7262 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7263 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7264 ignore, lib);
7265 if (target)
7266 return target;
7267 break;
7269 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7270 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7271 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7272 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7273 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7274 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7275 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7276 ignore, BUILT_IN_NONE);
7277 if (target)
7278 return target;
7279 break;
7281 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7282 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7283 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7284 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7285 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7286 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7287 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7288 ignore, BUILT_IN_NONE);
7289 if (target)
7290 return target;
7291 break;
7293 case BUILT_IN_ATOMIC_FETCH_AND_1:
7294 case BUILT_IN_ATOMIC_FETCH_AND_2:
7295 case BUILT_IN_ATOMIC_FETCH_AND_4:
7296 case BUILT_IN_ATOMIC_FETCH_AND_8:
7297 case BUILT_IN_ATOMIC_FETCH_AND_16:
7298 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7299 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7300 ignore, BUILT_IN_NONE);
7301 if (target)
7302 return target;
7303 break;
7305 case BUILT_IN_ATOMIC_FETCH_NAND_1:
7306 case BUILT_IN_ATOMIC_FETCH_NAND_2:
7307 case BUILT_IN_ATOMIC_FETCH_NAND_4:
7308 case BUILT_IN_ATOMIC_FETCH_NAND_8:
7309 case BUILT_IN_ATOMIC_FETCH_NAND_16:
7310 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
7311 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
7312 ignore, BUILT_IN_NONE);
7313 if (target)
7314 return target;
7315 break;
7317 case BUILT_IN_ATOMIC_FETCH_XOR_1:
7318 case BUILT_IN_ATOMIC_FETCH_XOR_2:
7319 case BUILT_IN_ATOMIC_FETCH_XOR_4:
7320 case BUILT_IN_ATOMIC_FETCH_XOR_8:
7321 case BUILT_IN_ATOMIC_FETCH_XOR_16:
7322 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
7323 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
7324 ignore, BUILT_IN_NONE);
7325 if (target)
7326 return target;
7327 break;
7329 case BUILT_IN_ATOMIC_FETCH_OR_1:
7330 case BUILT_IN_ATOMIC_FETCH_OR_2:
7331 case BUILT_IN_ATOMIC_FETCH_OR_4:
7332 case BUILT_IN_ATOMIC_FETCH_OR_8:
7333 case BUILT_IN_ATOMIC_FETCH_OR_16:
7334 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
7335 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
7336 ignore, BUILT_IN_NONE);
7337 if (target)
7338 return target;
7339 break;
7341 case BUILT_IN_ATOMIC_TEST_AND_SET:
7342 return expand_builtin_atomic_test_and_set (exp, target);
7344 case BUILT_IN_ATOMIC_CLEAR:
7345 return expand_builtin_atomic_clear (exp);
7347 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
7348 return expand_builtin_atomic_always_lock_free (exp);
7350 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
7351 target = expand_builtin_atomic_is_lock_free (exp);
7352 if (target)
7353 return target;
7354 break;
7356 case BUILT_IN_ATOMIC_THREAD_FENCE:
7357 expand_builtin_atomic_thread_fence (exp);
7358 return const0_rtx;
7360 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
7361 expand_builtin_atomic_signal_fence (exp);
7362 return const0_rtx;
7364 case BUILT_IN_OBJECT_SIZE:
7365 return expand_builtin_object_size (exp);
7367 case BUILT_IN_MEMCPY_CHK:
7368 case BUILT_IN_MEMPCPY_CHK:
7369 case BUILT_IN_MEMMOVE_CHK:
7370 case BUILT_IN_MEMSET_CHK:
7371 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7372 if (target)
7373 return target;
7374 break;
7376 case BUILT_IN_STRCPY_CHK:
7377 case BUILT_IN_STPCPY_CHK:
7378 case BUILT_IN_STRNCPY_CHK:
7379 case BUILT_IN_STPNCPY_CHK:
7380 case BUILT_IN_STRCAT_CHK:
7381 case BUILT_IN_STRNCAT_CHK:
7382 case BUILT_IN_SNPRINTF_CHK:
7383 case BUILT_IN_VSNPRINTF_CHK:
7384 maybe_emit_chk_warning (exp, fcode);
7385 break;
7387 case BUILT_IN_SPRINTF_CHK:
7388 case BUILT_IN_VSPRINTF_CHK:
7389 maybe_emit_sprintf_chk_warning (exp, fcode);
7390 break;
7392 case BUILT_IN_FREE:
7393 if (warn_free_nonheap_object)
7394 maybe_emit_free_warning (exp);
7395 break;
7397 case BUILT_IN_THREAD_POINTER:
7398 return expand_builtin_thread_pointer (exp, target);
7400 case BUILT_IN_SET_THREAD_POINTER:
7401 expand_builtin_set_thread_pointer (exp);
7402 return const0_rtx;
7404 case BUILT_IN_CILK_DETACH:
7405 expand_builtin_cilk_detach (exp);
7406 return const0_rtx;
7408 case BUILT_IN_CILK_POP_FRAME:
7409 expand_builtin_cilk_pop_frame (exp);
7410 return const0_rtx;
7412 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
7413 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
7414 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
7415 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
7416 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7417 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7418 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7419 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7420 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7421 case BUILT_IN_CHKP_GET_PTR_LBOUND:
7422 case BUILT_IN_CHKP_GET_PTR_UBOUND:
7423 /* We allow user CHKP builtins if Pointer Bounds
7424 Checker is off. */
7425 if (!chkp_function_instrumented_p (current_function_decl))
7427 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7428 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7429 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7430 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7431 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7432 return expand_normal (CALL_EXPR_ARG (exp, 0));
7433 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7434 return expand_normal (size_zero_node);
7435 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7436 return expand_normal (size_int (-1));
7437 else
7438 return const0_rtx;
7440 /* FALLTHROUGH */
7442 case BUILT_IN_CHKP_BNDMK:
7443 case BUILT_IN_CHKP_BNDSTX:
7444 case BUILT_IN_CHKP_BNDCL:
7445 case BUILT_IN_CHKP_BNDCU:
7446 case BUILT_IN_CHKP_BNDLDX:
7447 case BUILT_IN_CHKP_BNDRET:
7448 case BUILT_IN_CHKP_INTERSECT:
7449 case BUILT_IN_CHKP_NARROW:
7450 case BUILT_IN_CHKP_EXTRACT_LOWER:
7451 case BUILT_IN_CHKP_EXTRACT_UPPER:
7452 /* Software implementation of Pointer Bounds Checker is NYI.
7453 Target support is required. */
7454 error ("Your target platform does not support -fcheck-pointer-bounds");
7455 break;
7457 case BUILT_IN_ACC_ON_DEVICE:
7458 /* Do library call, if we failed to expand the builtin when
7459 folding. */
7460 break;
7462 default: /* just do library call, if unknown builtin */
7463 break;
7466 /* The switch statement above can drop through to cause the function
7467 to be called normally. */
7468 return expand_call (exp, target, ignore);
7471 /* Similar to expand_builtin but is used for instrumented calls. */
7474 expand_builtin_with_bounds (tree exp, rtx target,
7475 rtx subtarget ATTRIBUTE_UNUSED,
7476 machine_mode mode, int ignore)
7478 tree fndecl = get_callee_fndecl (exp);
7479 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7481 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7483 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7484 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7486 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7487 && fcode < END_CHKP_BUILTINS);
7489 switch (fcode)
7491 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7492 target = expand_builtin_memcpy_with_bounds (exp, target);
7493 if (target)
7494 return target;
7495 break;
7497 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7498 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7499 if (target)
7500 return target;
7501 break;
7503 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7504 target = expand_builtin_memset_with_bounds (exp, target, mode);
7505 if (target)
7506 return target;
7507 break;
7509 default:
7510 break;
7513 /* The switch statement above can drop through to cause the function
7514 to be called normally. */
7515 return expand_call (exp, target, ignore);
7518 /* Determine whether a tree node represents a call to a built-in
7519 function. If the tree T is a call to a built-in function with
7520 the right number of arguments of the appropriate types, return
7521 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7522 Otherwise the return value is END_BUILTINS. */
7524 enum built_in_function
7525 builtin_mathfn_code (const_tree t)
7527 const_tree fndecl, arg, parmlist;
7528 const_tree argtype, parmtype;
7529 const_call_expr_arg_iterator iter;
7531 if (TREE_CODE (t) != CALL_EXPR
7532 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7533 return END_BUILTINS;
7535 fndecl = get_callee_fndecl (t);
7536 if (fndecl == NULL_TREE
7537 || TREE_CODE (fndecl) != FUNCTION_DECL
7538 || ! DECL_BUILT_IN (fndecl)
7539 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7540 return END_BUILTINS;
7542 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7543 init_const_call_expr_arg_iterator (t, &iter);
7544 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7546 /* If a function doesn't take a variable number of arguments,
7547 the last element in the list will have type `void'. */
7548 parmtype = TREE_VALUE (parmlist);
7549 if (VOID_TYPE_P (parmtype))
7551 if (more_const_call_expr_args_p (&iter))
7552 return END_BUILTINS;
7553 return DECL_FUNCTION_CODE (fndecl);
7556 if (! more_const_call_expr_args_p (&iter))
7557 return END_BUILTINS;
7559 arg = next_const_call_expr_arg (&iter);
7560 argtype = TREE_TYPE (arg);
7562 if (SCALAR_FLOAT_TYPE_P (parmtype))
7564 if (! SCALAR_FLOAT_TYPE_P (argtype))
7565 return END_BUILTINS;
7567 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7569 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7570 return END_BUILTINS;
7572 else if (POINTER_TYPE_P (parmtype))
7574 if (! POINTER_TYPE_P (argtype))
7575 return END_BUILTINS;
7577 else if (INTEGRAL_TYPE_P (parmtype))
7579 if (! INTEGRAL_TYPE_P (argtype))
7580 return END_BUILTINS;
7582 else
7583 return END_BUILTINS;
7586 /* Variable-length argument list. */
7587 return DECL_FUNCTION_CODE (fndecl);
7590 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7591 evaluate to a constant. */
7593 static tree
7594 fold_builtin_constant_p (tree arg)
7596 /* We return 1 for a numeric type that's known to be a constant
7597 value at compile-time or for an aggregate type that's a
7598 literal constant. */
7599 STRIP_NOPS (arg);
7601 /* If we know this is a constant, emit the constant of one. */
7602 if (CONSTANT_CLASS_P (arg)
7603 || (TREE_CODE (arg) == CONSTRUCTOR
7604 && TREE_CONSTANT (arg)))
7605 return integer_one_node;
7606 if (TREE_CODE (arg) == ADDR_EXPR)
7608 tree op = TREE_OPERAND (arg, 0);
7609 if (TREE_CODE (op) == STRING_CST
7610 || (TREE_CODE (op) == ARRAY_REF
7611 && integer_zerop (TREE_OPERAND (op, 1))
7612 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7613 return integer_one_node;
7616 /* If this expression has side effects, show we don't know it to be a
7617 constant. Likewise if it's a pointer or aggregate type since in
7618 those case we only want literals, since those are only optimized
7619 when generating RTL, not later.
7620 And finally, if we are compiling an initializer, not code, we
7621 need to return a definite result now; there's not going to be any
7622 more optimization done. */
7623 if (TREE_SIDE_EFFECTS (arg)
7624 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7625 || POINTER_TYPE_P (TREE_TYPE (arg))
7626 || cfun == 0
7627 || folding_initializer
7628 || force_folding_builtin_constant_p)
7629 return integer_zero_node;
7631 return NULL_TREE;
7634 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7635 return it as a truthvalue. */
7637 static tree
7638 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7639 tree predictor)
7641 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7643 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7644 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7645 ret_type = TREE_TYPE (TREE_TYPE (fn));
7646 pred_type = TREE_VALUE (arg_types);
7647 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7649 pred = fold_convert_loc (loc, pred_type, pred);
7650 expected = fold_convert_loc (loc, expected_type, expected);
7651 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7652 predictor);
7654 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7655 build_int_cst (ret_type, 0));
7658 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7659 NULL_TREE if no simplification is possible. */
7661 tree
7662 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7664 tree inner, fndecl, inner_arg0;
7665 enum tree_code code;
7667 /* Distribute the expected value over short-circuiting operators.
7668 See through the cast from truthvalue_type_node to long. */
7669 inner_arg0 = arg0;
7670 while (CONVERT_EXPR_P (inner_arg0)
7671 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7672 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7673 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7675 /* If this is a builtin_expect within a builtin_expect keep the
7676 inner one. See through a comparison against a constant. It
7677 might have been added to create a thruthvalue. */
7678 inner = inner_arg0;
7680 if (COMPARISON_CLASS_P (inner)
7681 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7682 inner = TREE_OPERAND (inner, 0);
7684 if (TREE_CODE (inner) == CALL_EXPR
7685 && (fndecl = get_callee_fndecl (inner))
7686 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7687 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7688 return arg0;
7690 inner = inner_arg0;
7691 code = TREE_CODE (inner);
7692 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7694 tree op0 = TREE_OPERAND (inner, 0);
7695 tree op1 = TREE_OPERAND (inner, 1);
7697 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7698 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7699 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7701 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7704 /* If the argument isn't invariant then there's nothing else we can do. */
7705 if (!TREE_CONSTANT (inner_arg0))
7706 return NULL_TREE;
7708 /* If we expect that a comparison against the argument will fold to
7709 a constant return the constant. In practice, this means a true
7710 constant or the address of a non-weak symbol. */
7711 inner = inner_arg0;
7712 STRIP_NOPS (inner);
7713 if (TREE_CODE (inner) == ADDR_EXPR)
7717 inner = TREE_OPERAND (inner, 0);
7719 while (TREE_CODE (inner) == COMPONENT_REF
7720 || TREE_CODE (inner) == ARRAY_REF);
7721 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
7722 return NULL_TREE;
7725 /* Otherwise, ARG0 already has the proper type for the return value. */
7726 return arg0;
7729 /* Fold a call to __builtin_classify_type with argument ARG. */
7731 static tree
7732 fold_builtin_classify_type (tree arg)
7734 if (arg == 0)
7735 return build_int_cst (integer_type_node, no_type_class);
7737 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7740 /* Fold a call to __builtin_strlen with argument ARG. */
7742 static tree
7743 fold_builtin_strlen (location_t loc, tree type, tree arg)
7745 if (!validate_arg (arg, POINTER_TYPE))
7746 return NULL_TREE;
7747 else
7749 tree len = c_strlen (arg, 0);
7751 if (len)
7752 return fold_convert_loc (loc, type, len);
7754 return NULL_TREE;
7758 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7760 static tree
7761 fold_builtin_inf (location_t loc, tree type, int warn)
7763 REAL_VALUE_TYPE real;
7765 /* __builtin_inff is intended to be usable to define INFINITY on all
7766 targets. If an infinity is not available, INFINITY expands "to a
7767 positive constant of type float that overflows at translation
7768 time", footnote "In this case, using INFINITY will violate the
7769 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7770 Thus we pedwarn to ensure this constraint violation is
7771 diagnosed. */
7772 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7773 pedwarn (loc, 0, "target format does not support infinity");
7775 real_inf (&real);
7776 return build_real (type, real);
7779 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7780 NULL_TREE if no simplification can be made. */
7782 static tree
7783 fold_builtin_sincos (location_t loc,
7784 tree arg0, tree arg1, tree arg2)
7786 tree type;
7787 tree fndecl, call = NULL_TREE;
7789 if (!validate_arg (arg0, REAL_TYPE)
7790 || !validate_arg (arg1, POINTER_TYPE)
7791 || !validate_arg (arg2, POINTER_TYPE))
7792 return NULL_TREE;
7794 type = TREE_TYPE (arg0);
7796 /* Calculate the result when the argument is a constant. */
7797 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
7798 if (fn == END_BUILTINS)
7799 return NULL_TREE;
7801 /* Canonicalize sincos to cexpi. */
7802 if (TREE_CODE (arg0) == REAL_CST)
7804 tree complex_type = build_complex_type (type);
7805 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
7807 if (!call)
7809 if (!targetm.libc_has_function (function_c99_math_complex)
7810 || !builtin_decl_implicit_p (fn))
7811 return NULL_TREE;
7812 fndecl = builtin_decl_explicit (fn);
7813 call = build_call_expr_loc (loc, fndecl, 1, arg0);
7814 call = builtin_save_expr (call);
7817 return build2 (COMPOUND_EXPR, void_type_node,
7818 build2 (MODIFY_EXPR, void_type_node,
7819 build_fold_indirect_ref_loc (loc, arg1),
7820 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
7821 build2 (MODIFY_EXPR, void_type_node,
7822 build_fold_indirect_ref_loc (loc, arg2),
7823 fold_build1_loc (loc, REALPART_EXPR, type, call)));
7826 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7827 Return NULL_TREE if no simplification can be made. */
7829 static tree
7830 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
7832 if (!validate_arg (arg1, POINTER_TYPE)
7833 || !validate_arg (arg2, POINTER_TYPE)
7834 || !validate_arg (len, INTEGER_TYPE))
7835 return NULL_TREE;
7837 /* If the LEN parameter is zero, return zero. */
7838 if (integer_zerop (len))
7839 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7840 arg1, arg2);
7842 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7843 if (operand_equal_p (arg1, arg2, 0))
7844 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7846 /* If len parameter is one, return an expression corresponding to
7847 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7848 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7850 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7851 tree cst_uchar_ptr_node
7852 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7854 tree ind1
7855 = fold_convert_loc (loc, integer_type_node,
7856 build1 (INDIRECT_REF, cst_uchar_node,
7857 fold_convert_loc (loc,
7858 cst_uchar_ptr_node,
7859 arg1)));
7860 tree ind2
7861 = fold_convert_loc (loc, integer_type_node,
7862 build1 (INDIRECT_REF, cst_uchar_node,
7863 fold_convert_loc (loc,
7864 cst_uchar_ptr_node,
7865 arg2)));
7866 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7869 return NULL_TREE;
7872 /* Fold a call to builtin isascii with argument ARG. */
7874 static tree
7875 fold_builtin_isascii (location_t loc, tree arg)
7877 if (!validate_arg (arg, INTEGER_TYPE))
7878 return NULL_TREE;
7879 else
7881 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
7882 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
7883 build_int_cst (integer_type_node,
7884 ~ (unsigned HOST_WIDE_INT) 0x7f));
7885 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
7886 arg, integer_zero_node);
7890 /* Fold a call to builtin toascii with argument ARG. */
7892 static tree
7893 fold_builtin_toascii (location_t loc, tree arg)
7895 if (!validate_arg (arg, INTEGER_TYPE))
7896 return NULL_TREE;
7898 /* Transform toascii(c) -> (c & 0x7f). */
7899 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
7900 build_int_cst (integer_type_node, 0x7f));
7903 /* Fold a call to builtin isdigit with argument ARG. */
7905 static tree
7906 fold_builtin_isdigit (location_t loc, tree arg)
7908 if (!validate_arg (arg, INTEGER_TYPE))
7909 return NULL_TREE;
7910 else
7912 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
7913 /* According to the C standard, isdigit is unaffected by locale.
7914 However, it definitely is affected by the target character set. */
7915 unsigned HOST_WIDE_INT target_digit0
7916 = lang_hooks.to_target_charset ('0');
7918 if (target_digit0 == 0)
7919 return NULL_TREE;
7921 arg = fold_convert_loc (loc, unsigned_type_node, arg);
7922 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
7923 build_int_cst (unsigned_type_node, target_digit0));
7924 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
7925 build_int_cst (unsigned_type_node, 9));
7929 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
7931 static tree
7932 fold_builtin_fabs (location_t loc, tree arg, tree type)
7934 if (!validate_arg (arg, REAL_TYPE))
7935 return NULL_TREE;
7937 arg = fold_convert_loc (loc, type, arg);
7938 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7941 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
7943 static tree
7944 fold_builtin_abs (location_t loc, tree arg, tree type)
7946 if (!validate_arg (arg, INTEGER_TYPE))
7947 return NULL_TREE;
7949 arg = fold_convert_loc (loc, type, arg);
7950 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7953 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
7955 static tree
7956 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
7958 /* ??? Only expand to FMA_EXPR if it's directly supported. */
7959 if (validate_arg (arg0, REAL_TYPE)
7960 && validate_arg (arg1, REAL_TYPE)
7961 && validate_arg (arg2, REAL_TYPE)
7962 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
7963 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
7965 return NULL_TREE;
7968 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
7970 static tree
7971 fold_builtin_carg (location_t loc, tree arg, tree type)
7973 if (validate_arg (arg, COMPLEX_TYPE)
7974 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7976 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
7978 if (atan2_fn)
7980 tree new_arg = builtin_save_expr (arg);
7981 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
7982 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
7983 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
7987 return NULL_TREE;
7990 /* Fold a call to builtin frexp, we can assume the base is 2. */
7992 static tree
7993 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
7995 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7996 return NULL_TREE;
7998 STRIP_NOPS (arg0);
8000 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8001 return NULL_TREE;
8003 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8005 /* Proceed if a valid pointer type was passed in. */
8006 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8008 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8009 tree frac, exp;
8011 switch (value->cl)
8013 case rvc_zero:
8014 /* For +-0, return (*exp = 0, +-0). */
8015 exp = integer_zero_node;
8016 frac = arg0;
8017 break;
8018 case rvc_nan:
8019 case rvc_inf:
8020 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8021 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8022 case rvc_normal:
8024 /* Since the frexp function always expects base 2, and in
8025 GCC normalized significands are already in the range
8026 [0.5, 1.0), we have exactly what frexp wants. */
8027 REAL_VALUE_TYPE frac_rvt = *value;
8028 SET_REAL_EXP (&frac_rvt, 0);
8029 frac = build_real (rettype, frac_rvt);
8030 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8032 break;
8033 default:
8034 gcc_unreachable ();
8037 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8038 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8039 TREE_SIDE_EFFECTS (arg1) = 1;
8040 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8043 return NULL_TREE;
8046 /* Fold a call to builtin modf. */
8048 static tree
8049 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8051 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8052 return NULL_TREE;
8054 STRIP_NOPS (arg0);
8056 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8057 return NULL_TREE;
8059 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8061 /* Proceed if a valid pointer type was passed in. */
8062 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8064 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8065 REAL_VALUE_TYPE trunc, frac;
8067 switch (value->cl)
8069 case rvc_nan:
8070 case rvc_zero:
8071 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8072 trunc = frac = *value;
8073 break;
8074 case rvc_inf:
8075 /* For +-Inf, return (*arg1 = arg0, +-0). */
8076 frac = dconst0;
8077 frac.sign = value->sign;
8078 trunc = *value;
8079 break;
8080 case rvc_normal:
8081 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8082 real_trunc (&trunc, VOIDmode, value);
8083 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8084 /* If the original number was negative and already
8085 integral, then the fractional part is -0.0. */
8086 if (value->sign && frac.cl == rvc_zero)
8087 frac.sign = value->sign;
8088 break;
8091 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8092 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8093 build_real (rettype, trunc));
8094 TREE_SIDE_EFFECTS (arg1) = 1;
8095 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8096 build_real (rettype, frac));
8099 return NULL_TREE;
8102 /* Given a location LOC, an interclass builtin function decl FNDECL
8103 and its single argument ARG, return an folded expression computing
8104 the same, or NULL_TREE if we either couldn't or didn't want to fold
8105 (the latter happen if there's an RTL instruction available). */
8107 static tree
8108 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8110 machine_mode mode;
8112 if (!validate_arg (arg, REAL_TYPE))
8113 return NULL_TREE;
8115 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8116 return NULL_TREE;
8118 mode = TYPE_MODE (TREE_TYPE (arg));
8120 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8122 /* If there is no optab, try generic code. */
8123 switch (DECL_FUNCTION_CODE (fndecl))
8125 tree result;
8127 CASE_FLT_FN (BUILT_IN_ISINF):
8129 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8130 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8131 tree type = TREE_TYPE (arg);
8132 REAL_VALUE_TYPE r;
8133 char buf[128];
8135 if (is_ibm_extended)
8137 /* NaN and Inf are encoded in the high-order double value
8138 only. The low-order value is not significant. */
8139 type = double_type_node;
8140 mode = DFmode;
8141 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8143 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8144 real_from_string (&r, buf);
8145 result = build_call_expr (isgr_fn, 2,
8146 fold_build1_loc (loc, ABS_EXPR, type, arg),
8147 build_real (type, r));
8148 return result;
8150 CASE_FLT_FN (BUILT_IN_FINITE):
8151 case BUILT_IN_ISFINITE:
8153 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8154 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8155 tree type = TREE_TYPE (arg);
8156 REAL_VALUE_TYPE r;
8157 char buf[128];
8159 if (is_ibm_extended)
8161 /* NaN and Inf are encoded in the high-order double value
8162 only. The low-order value is not significant. */
8163 type = double_type_node;
8164 mode = DFmode;
8165 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8167 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8168 real_from_string (&r, buf);
8169 result = build_call_expr (isle_fn, 2,
8170 fold_build1_loc (loc, ABS_EXPR, type, arg),
8171 build_real (type, r));
8172 /*result = fold_build2_loc (loc, UNGT_EXPR,
8173 TREE_TYPE (TREE_TYPE (fndecl)),
8174 fold_build1_loc (loc, ABS_EXPR, type, arg),
8175 build_real (type, r));
8176 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8177 TREE_TYPE (TREE_TYPE (fndecl)),
8178 result);*/
8179 return result;
8181 case BUILT_IN_ISNORMAL:
8183 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8184 islessequal(fabs(x),DBL_MAX). */
8185 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8186 tree type = TREE_TYPE (arg);
8187 tree orig_arg, max_exp, min_exp;
8188 machine_mode orig_mode = mode;
8189 REAL_VALUE_TYPE rmax, rmin;
8190 char buf[128];
8192 orig_arg = arg = builtin_save_expr (arg);
8193 if (is_ibm_extended)
8195 /* Use double to test the normal range of IBM extended
8196 precision. Emin for IBM extended precision is
8197 different to emin for IEEE double, being 53 higher
8198 since the low double exponent is at least 53 lower
8199 than the high double exponent. */
8200 type = double_type_node;
8201 mode = DFmode;
8202 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8204 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8206 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8207 real_from_string (&rmax, buf);
8208 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8209 real_from_string (&rmin, buf);
8210 max_exp = build_real (type, rmax);
8211 min_exp = build_real (type, rmin);
8213 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8214 if (is_ibm_extended)
8216 /* Testing the high end of the range is done just using
8217 the high double, using the same test as isfinite().
8218 For the subnormal end of the range we first test the
8219 high double, then if its magnitude is equal to the
8220 limit of 0x1p-969, we test whether the low double is
8221 non-zero and opposite sign to the high double. */
8222 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8223 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8224 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8225 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8226 arg, min_exp);
8227 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8228 complex_double_type_node, orig_arg);
8229 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8230 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8231 tree zero = build_real (type, dconst0);
8232 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8233 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8234 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8235 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8236 fold_build3 (COND_EXPR,
8237 integer_type_node,
8238 hilt, logt, lolt));
8239 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8240 eq_min, ok_lo);
8241 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8242 gt_min, eq_min);
8244 else
8246 tree const isge_fn
8247 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8248 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8250 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8251 max_exp, min_exp);
8252 return result;
8254 default:
8255 break;
8258 return NULL_TREE;
8261 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8262 ARG is the argument for the call. */
8264 static tree
8265 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8267 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8269 if (!validate_arg (arg, REAL_TYPE))
8270 return NULL_TREE;
8272 switch (builtin_index)
8274 case BUILT_IN_ISINF:
8275 if (!HONOR_INFINITIES (arg))
8276 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8278 return NULL_TREE;
8280 case BUILT_IN_ISINF_SIGN:
8282 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8283 /* In a boolean context, GCC will fold the inner COND_EXPR to
8284 1. So e.g. "if (isinf_sign(x))" would be folded to just
8285 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8286 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8287 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8288 tree tmp = NULL_TREE;
8290 arg = builtin_save_expr (arg);
8292 if (signbit_fn && isinf_fn)
8294 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8295 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8297 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8298 signbit_call, integer_zero_node);
8299 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8300 isinf_call, integer_zero_node);
8302 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8303 integer_minus_one_node, integer_one_node);
8304 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8305 isinf_call, tmp,
8306 integer_zero_node);
8309 return tmp;
8312 case BUILT_IN_ISFINITE:
8313 if (!HONOR_NANS (arg)
8314 && !HONOR_INFINITIES (arg))
8315 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8317 return NULL_TREE;
8319 case BUILT_IN_ISNAN:
8320 if (!HONOR_NANS (arg))
8321 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8324 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8325 if (is_ibm_extended)
8327 /* NaN and Inf are encoded in the high-order double value
8328 only. The low-order value is not significant. */
8329 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8332 arg = builtin_save_expr (arg);
8333 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8335 default:
8336 gcc_unreachable ();
8340 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8341 This builtin will generate code to return the appropriate floating
8342 point classification depending on the value of the floating point
8343 number passed in. The possible return values must be supplied as
8344 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8345 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8346 one floating point argument which is "type generic". */
8348 static tree
8349 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8351 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8352 arg, type, res, tmp;
8353 machine_mode mode;
8354 REAL_VALUE_TYPE r;
8355 char buf[128];
8357 /* Verify the required arguments in the original call. */
8358 if (nargs != 6
8359 || !validate_arg (args[0], INTEGER_TYPE)
8360 || !validate_arg (args[1], INTEGER_TYPE)
8361 || !validate_arg (args[2], INTEGER_TYPE)
8362 || !validate_arg (args[3], INTEGER_TYPE)
8363 || !validate_arg (args[4], INTEGER_TYPE)
8364 || !validate_arg (args[5], REAL_TYPE))
8365 return NULL_TREE;
8367 fp_nan = args[0];
8368 fp_infinite = args[1];
8369 fp_normal = args[2];
8370 fp_subnormal = args[3];
8371 fp_zero = args[4];
8372 arg = args[5];
8373 type = TREE_TYPE (arg);
8374 mode = TYPE_MODE (type);
8375 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8377 /* fpclassify(x) ->
8378 isnan(x) ? FP_NAN :
8379 (fabs(x) == Inf ? FP_INFINITE :
8380 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8381 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8383 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8384 build_real (type, dconst0));
8385 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8386 tmp, fp_zero, fp_subnormal);
8388 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8389 real_from_string (&r, buf);
8390 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8391 arg, build_real (type, r));
8392 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8394 if (HONOR_INFINITIES (mode))
8396 real_inf (&r);
8397 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8398 build_real (type, r));
8399 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8400 fp_infinite, res);
8403 if (HONOR_NANS (mode))
8405 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8406 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8409 return res;
8412 /* Fold a call to an unordered comparison function such as
8413 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8414 being called and ARG0 and ARG1 are the arguments for the call.
8415 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8416 the opposite of the desired result. UNORDERED_CODE is used
8417 for modes that can hold NaNs and ORDERED_CODE is used for
8418 the rest. */
8420 static tree
8421 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8422 enum tree_code unordered_code,
8423 enum tree_code ordered_code)
8425 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8426 enum tree_code code;
8427 tree type0, type1;
8428 enum tree_code code0, code1;
8429 tree cmp_type = NULL_TREE;
8431 type0 = TREE_TYPE (arg0);
8432 type1 = TREE_TYPE (arg1);
8434 code0 = TREE_CODE (type0);
8435 code1 = TREE_CODE (type1);
8437 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8438 /* Choose the wider of two real types. */
8439 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8440 ? type0 : type1;
8441 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8442 cmp_type = type0;
8443 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8444 cmp_type = type1;
8446 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8447 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8449 if (unordered_code == UNORDERED_EXPR)
8451 if (!HONOR_NANS (arg0))
8452 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8453 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8456 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8457 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8458 fold_build2_loc (loc, code, type, arg0, arg1));
8461 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8462 arithmetics if it can never overflow, or into internal functions that
8463 return both result of arithmetics and overflowed boolean flag in
8464 a complex integer result, or some other check for overflow.
8465 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8466 checking part of that. */
8468 static tree
8469 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8470 tree arg0, tree arg1, tree arg2)
8472 enum internal_fn ifn = IFN_LAST;
8473 /* The code of the expression corresponding to the type-generic
8474 built-in, or ERROR_MARK for the type-specific ones. */
8475 enum tree_code opcode = ERROR_MARK;
8476 bool ovf_only = false;
8478 switch (fcode)
8480 case BUILT_IN_ADD_OVERFLOW_P:
8481 ovf_only = true;
8482 /* FALLTHRU */
8483 case BUILT_IN_ADD_OVERFLOW:
8484 opcode = PLUS_EXPR;
8485 /* FALLTHRU */
8486 case BUILT_IN_SADD_OVERFLOW:
8487 case BUILT_IN_SADDL_OVERFLOW:
8488 case BUILT_IN_SADDLL_OVERFLOW:
8489 case BUILT_IN_UADD_OVERFLOW:
8490 case BUILT_IN_UADDL_OVERFLOW:
8491 case BUILT_IN_UADDLL_OVERFLOW:
8492 ifn = IFN_ADD_OVERFLOW;
8493 break;
8494 case BUILT_IN_SUB_OVERFLOW_P:
8495 ovf_only = true;
8496 /* FALLTHRU */
8497 case BUILT_IN_SUB_OVERFLOW:
8498 opcode = MINUS_EXPR;
8499 /* FALLTHRU */
8500 case BUILT_IN_SSUB_OVERFLOW:
8501 case BUILT_IN_SSUBL_OVERFLOW:
8502 case BUILT_IN_SSUBLL_OVERFLOW:
8503 case BUILT_IN_USUB_OVERFLOW:
8504 case BUILT_IN_USUBL_OVERFLOW:
8505 case BUILT_IN_USUBLL_OVERFLOW:
8506 ifn = IFN_SUB_OVERFLOW;
8507 break;
8508 case BUILT_IN_MUL_OVERFLOW_P:
8509 ovf_only = true;
8510 /* FALLTHRU */
8511 case BUILT_IN_MUL_OVERFLOW:
8512 opcode = MULT_EXPR;
8513 /* FALLTHRU */
8514 case BUILT_IN_SMUL_OVERFLOW:
8515 case BUILT_IN_SMULL_OVERFLOW:
8516 case BUILT_IN_SMULLL_OVERFLOW:
8517 case BUILT_IN_UMUL_OVERFLOW:
8518 case BUILT_IN_UMULL_OVERFLOW:
8519 case BUILT_IN_UMULLL_OVERFLOW:
8520 ifn = IFN_MUL_OVERFLOW;
8521 break;
8522 default:
8523 gcc_unreachable ();
8526 /* For the "generic" overloads, the first two arguments can have different
8527 types and the last argument determines the target type to use to check
8528 for overflow. The arguments of the other overloads all have the same
8529 type. */
8530 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
8532 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8533 arguments are constant, attempt to fold the built-in call into a constant
8534 expression indicating whether or not it detected an overflow. */
8535 if (ovf_only
8536 && TREE_CODE (arg0) == INTEGER_CST
8537 && TREE_CODE (arg1) == INTEGER_CST)
8538 /* Perform the computation in the target type and check for overflow. */
8539 return omit_one_operand_loc (loc, boolean_type_node,
8540 arith_overflowed_p (opcode, type, arg0, arg1)
8541 ? boolean_true_node : boolean_false_node,
8542 arg2);
8544 tree ctype = build_complex_type (type);
8545 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8546 2, arg0, arg1);
8547 tree tgt = save_expr (call);
8548 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8549 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8550 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8552 if (ovf_only)
8553 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
8555 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8556 tree store
8557 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8558 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8561 /* Fold a call to __builtin_FILE to a constant string. */
8563 static inline tree
8564 fold_builtin_FILE (location_t loc)
8566 if (const char *fname = LOCATION_FILE (loc))
8567 return build_string_literal (strlen (fname) + 1, fname);
8569 return build_string_literal (1, "");
8572 /* Fold a call to __builtin_FUNCTION to a constant string. */
8574 static inline tree
8575 fold_builtin_FUNCTION ()
8577 if (current_function_decl)
8579 const char *name = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
8580 return build_string_literal (strlen (name) + 1, name);
8583 return build_string_literal (1, "");
8586 /* Fold a call to __builtin_LINE to an integer constant. */
8588 static inline tree
8589 fold_builtin_LINE (location_t loc, tree type)
8591 return build_int_cst (type, LOCATION_LINE (loc));
8594 /* Fold a call to built-in function FNDECL with 0 arguments.
8595 This function returns NULL_TREE if no simplification was possible. */
8597 static tree
8598 fold_builtin_0 (location_t loc, tree fndecl)
8600 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8601 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8602 switch (fcode)
8604 case BUILT_IN_FILE:
8605 return fold_builtin_FILE (loc);
8607 case BUILT_IN_FUNCTION:
8608 return fold_builtin_FUNCTION ();
8610 case BUILT_IN_LINE:
8611 return fold_builtin_LINE (loc, type);
8613 CASE_FLT_FN (BUILT_IN_INF):
8614 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
8615 case BUILT_IN_INFD32:
8616 case BUILT_IN_INFD64:
8617 case BUILT_IN_INFD128:
8618 return fold_builtin_inf (loc, type, true);
8620 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8621 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
8622 return fold_builtin_inf (loc, type, false);
8624 case BUILT_IN_CLASSIFY_TYPE:
8625 return fold_builtin_classify_type (NULL_TREE);
8627 default:
8628 break;
8630 return NULL_TREE;
8633 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8634 This function returns NULL_TREE if no simplification was possible. */
8636 static tree
8637 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8639 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8640 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8642 if (TREE_CODE (arg0) == ERROR_MARK)
8643 return NULL_TREE;
8645 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
8646 return ret;
8648 switch (fcode)
8650 case BUILT_IN_CONSTANT_P:
8652 tree val = fold_builtin_constant_p (arg0);
8654 /* Gimplification will pull the CALL_EXPR for the builtin out of
8655 an if condition. When not optimizing, we'll not CSE it back.
8656 To avoid link error types of regressions, return false now. */
8657 if (!val && !optimize)
8658 val = integer_zero_node;
8660 return val;
8663 case BUILT_IN_CLASSIFY_TYPE:
8664 return fold_builtin_classify_type (arg0);
8666 case BUILT_IN_STRLEN:
8667 return fold_builtin_strlen (loc, type, arg0);
8669 CASE_FLT_FN (BUILT_IN_FABS):
8670 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8671 case BUILT_IN_FABSD32:
8672 case BUILT_IN_FABSD64:
8673 case BUILT_IN_FABSD128:
8674 return fold_builtin_fabs (loc, arg0, type);
8676 case BUILT_IN_ABS:
8677 case BUILT_IN_LABS:
8678 case BUILT_IN_LLABS:
8679 case BUILT_IN_IMAXABS:
8680 return fold_builtin_abs (loc, arg0, type);
8682 CASE_FLT_FN (BUILT_IN_CONJ):
8683 if (validate_arg (arg0, COMPLEX_TYPE)
8684 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8685 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8686 break;
8688 CASE_FLT_FN (BUILT_IN_CREAL):
8689 if (validate_arg (arg0, COMPLEX_TYPE)
8690 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8691 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8692 break;
8694 CASE_FLT_FN (BUILT_IN_CIMAG):
8695 if (validate_arg (arg0, COMPLEX_TYPE)
8696 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8697 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8698 break;
8700 CASE_FLT_FN (BUILT_IN_CARG):
8701 return fold_builtin_carg (loc, arg0, type);
8703 case BUILT_IN_ISASCII:
8704 return fold_builtin_isascii (loc, arg0);
8706 case BUILT_IN_TOASCII:
8707 return fold_builtin_toascii (loc, arg0);
8709 case BUILT_IN_ISDIGIT:
8710 return fold_builtin_isdigit (loc, arg0);
8712 CASE_FLT_FN (BUILT_IN_FINITE):
8713 case BUILT_IN_FINITED32:
8714 case BUILT_IN_FINITED64:
8715 case BUILT_IN_FINITED128:
8716 case BUILT_IN_ISFINITE:
8718 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
8719 if (ret)
8720 return ret;
8721 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8724 CASE_FLT_FN (BUILT_IN_ISINF):
8725 case BUILT_IN_ISINFD32:
8726 case BUILT_IN_ISINFD64:
8727 case BUILT_IN_ISINFD128:
8729 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
8730 if (ret)
8731 return ret;
8732 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8735 case BUILT_IN_ISNORMAL:
8736 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8738 case BUILT_IN_ISINF_SIGN:
8739 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
8741 CASE_FLT_FN (BUILT_IN_ISNAN):
8742 case BUILT_IN_ISNAND32:
8743 case BUILT_IN_ISNAND64:
8744 case BUILT_IN_ISNAND128:
8745 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
8747 case BUILT_IN_FREE:
8748 if (integer_zerop (arg0))
8749 return build_empty_stmt (loc);
8750 break;
8752 default:
8753 break;
8756 return NULL_TREE;
8760 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8761 This function returns NULL_TREE if no simplification was possible. */
8763 static tree
8764 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
8766 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8767 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8769 if (TREE_CODE (arg0) == ERROR_MARK
8770 || TREE_CODE (arg1) == ERROR_MARK)
8771 return NULL_TREE;
8773 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
8774 return ret;
8776 switch (fcode)
8778 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
8779 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
8780 if (validate_arg (arg0, REAL_TYPE)
8781 && validate_arg (arg1, POINTER_TYPE))
8782 return do_mpfr_lgamma_r (arg0, arg1, type);
8783 break;
8785 CASE_FLT_FN (BUILT_IN_FREXP):
8786 return fold_builtin_frexp (loc, arg0, arg1, type);
8788 CASE_FLT_FN (BUILT_IN_MODF):
8789 return fold_builtin_modf (loc, arg0, arg1, type);
8791 case BUILT_IN_STRSPN:
8792 return fold_builtin_strspn (loc, arg0, arg1);
8794 case BUILT_IN_STRCSPN:
8795 return fold_builtin_strcspn (loc, arg0, arg1);
8797 case BUILT_IN_STRPBRK:
8798 return fold_builtin_strpbrk (loc, arg0, arg1, type);
8800 case BUILT_IN_EXPECT:
8801 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
8803 case BUILT_IN_ISGREATER:
8804 return fold_builtin_unordered_cmp (loc, fndecl,
8805 arg0, arg1, UNLE_EXPR, LE_EXPR);
8806 case BUILT_IN_ISGREATEREQUAL:
8807 return fold_builtin_unordered_cmp (loc, fndecl,
8808 arg0, arg1, UNLT_EXPR, LT_EXPR);
8809 case BUILT_IN_ISLESS:
8810 return fold_builtin_unordered_cmp (loc, fndecl,
8811 arg0, arg1, UNGE_EXPR, GE_EXPR);
8812 case BUILT_IN_ISLESSEQUAL:
8813 return fold_builtin_unordered_cmp (loc, fndecl,
8814 arg0, arg1, UNGT_EXPR, GT_EXPR);
8815 case BUILT_IN_ISLESSGREATER:
8816 return fold_builtin_unordered_cmp (loc, fndecl,
8817 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
8818 case BUILT_IN_ISUNORDERED:
8819 return fold_builtin_unordered_cmp (loc, fndecl,
8820 arg0, arg1, UNORDERED_EXPR,
8821 NOP_EXPR);
8823 /* We do the folding for va_start in the expander. */
8824 case BUILT_IN_VA_START:
8825 break;
8827 case BUILT_IN_OBJECT_SIZE:
8828 return fold_builtin_object_size (arg0, arg1);
8830 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8831 return fold_builtin_atomic_always_lock_free (arg0, arg1);
8833 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8834 return fold_builtin_atomic_is_lock_free (arg0, arg1);
8836 default:
8837 break;
8839 return NULL_TREE;
8842 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
8843 and ARG2.
8844 This function returns NULL_TREE if no simplification was possible. */
8846 static tree
8847 fold_builtin_3 (location_t loc, tree fndecl,
8848 tree arg0, tree arg1, tree arg2)
8850 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8851 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8853 if (TREE_CODE (arg0) == ERROR_MARK
8854 || TREE_CODE (arg1) == ERROR_MARK
8855 || TREE_CODE (arg2) == ERROR_MARK)
8856 return NULL_TREE;
8858 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
8859 arg0, arg1, arg2))
8860 return ret;
8862 switch (fcode)
8865 CASE_FLT_FN (BUILT_IN_SINCOS):
8866 return fold_builtin_sincos (loc, arg0, arg1, arg2);
8868 CASE_FLT_FN (BUILT_IN_FMA):
8869 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
8871 CASE_FLT_FN (BUILT_IN_REMQUO):
8872 if (validate_arg (arg0, REAL_TYPE)
8873 && validate_arg (arg1, REAL_TYPE)
8874 && validate_arg (arg2, POINTER_TYPE))
8875 return do_mpfr_remquo (arg0, arg1, arg2);
8876 break;
8878 case BUILT_IN_BCMP:
8879 case BUILT_IN_MEMCMP:
8880 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
8882 case BUILT_IN_EXPECT:
8883 return fold_builtin_expect (loc, arg0, arg1, arg2);
8885 case BUILT_IN_ADD_OVERFLOW:
8886 case BUILT_IN_SUB_OVERFLOW:
8887 case BUILT_IN_MUL_OVERFLOW:
8888 case BUILT_IN_ADD_OVERFLOW_P:
8889 case BUILT_IN_SUB_OVERFLOW_P:
8890 case BUILT_IN_MUL_OVERFLOW_P:
8891 case BUILT_IN_SADD_OVERFLOW:
8892 case BUILT_IN_SADDL_OVERFLOW:
8893 case BUILT_IN_SADDLL_OVERFLOW:
8894 case BUILT_IN_SSUB_OVERFLOW:
8895 case BUILT_IN_SSUBL_OVERFLOW:
8896 case BUILT_IN_SSUBLL_OVERFLOW:
8897 case BUILT_IN_SMUL_OVERFLOW:
8898 case BUILT_IN_SMULL_OVERFLOW:
8899 case BUILT_IN_SMULLL_OVERFLOW:
8900 case BUILT_IN_UADD_OVERFLOW:
8901 case BUILT_IN_UADDL_OVERFLOW:
8902 case BUILT_IN_UADDLL_OVERFLOW:
8903 case BUILT_IN_USUB_OVERFLOW:
8904 case BUILT_IN_USUBL_OVERFLOW:
8905 case BUILT_IN_USUBLL_OVERFLOW:
8906 case BUILT_IN_UMUL_OVERFLOW:
8907 case BUILT_IN_UMULL_OVERFLOW:
8908 case BUILT_IN_UMULLL_OVERFLOW:
8909 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
8911 default:
8912 break;
8914 return NULL_TREE;
8917 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
8918 arguments. IGNORE is true if the result of the
8919 function call is ignored. This function returns NULL_TREE if no
8920 simplification was possible. */
8922 tree
8923 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
8925 tree ret = NULL_TREE;
8927 switch (nargs)
8929 case 0:
8930 ret = fold_builtin_0 (loc, fndecl);
8931 break;
8932 case 1:
8933 ret = fold_builtin_1 (loc, fndecl, args[0]);
8934 break;
8935 case 2:
8936 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
8937 break;
8938 case 3:
8939 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
8940 break;
8941 default:
8942 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
8943 break;
8945 if (ret)
8947 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
8948 SET_EXPR_LOCATION (ret, loc);
8949 TREE_NO_WARNING (ret) = 1;
8950 return ret;
8952 return NULL_TREE;
8955 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
8956 list ARGS along with N new arguments in NEWARGS. SKIP is the number
8957 of arguments in ARGS to be omitted. OLDNARGS is the number of
8958 elements in ARGS. */
8960 static tree
8961 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
8962 int skip, tree fndecl, int n, va_list newargs)
8964 int nargs = oldnargs - skip + n;
8965 tree *buffer;
8967 if (n > 0)
8969 int i, j;
8971 buffer = XALLOCAVEC (tree, nargs);
8972 for (i = 0; i < n; i++)
8973 buffer[i] = va_arg (newargs, tree);
8974 for (j = skip; j < oldnargs; j++, i++)
8975 buffer[i] = args[j];
8977 else
8978 buffer = args + skip;
8980 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
8983 /* Return true if FNDECL shouldn't be folded right now.
8984 If a built-in function has an inline attribute always_inline
8985 wrapper, defer folding it after always_inline functions have
8986 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
8987 might not be performed. */
8989 bool
8990 avoid_folding_inline_builtin (tree fndecl)
8992 return (DECL_DECLARED_INLINE_P (fndecl)
8993 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
8994 && cfun
8995 && !cfun->always_inline_functions_inlined
8996 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
8999 /* A wrapper function for builtin folding that prevents warnings for
9000 "statement without effect" and the like, caused by removing the
9001 call node earlier than the warning is generated. */
9003 tree
9004 fold_call_expr (location_t loc, tree exp, bool ignore)
9006 tree ret = NULL_TREE;
9007 tree fndecl = get_callee_fndecl (exp);
9008 if (fndecl
9009 && TREE_CODE (fndecl) == FUNCTION_DECL
9010 && DECL_BUILT_IN (fndecl)
9011 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9012 yet. Defer folding until we see all the arguments
9013 (after inlining). */
9014 && !CALL_EXPR_VA_ARG_PACK (exp))
9016 int nargs = call_expr_nargs (exp);
9018 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9019 instead last argument is __builtin_va_arg_pack (). Defer folding
9020 even in that case, until arguments are finalized. */
9021 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9023 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9024 if (fndecl2
9025 && TREE_CODE (fndecl2) == FUNCTION_DECL
9026 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9027 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9028 return NULL_TREE;
9031 if (avoid_folding_inline_builtin (fndecl))
9032 return NULL_TREE;
9034 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9035 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9036 CALL_EXPR_ARGP (exp), ignore);
9037 else
9039 tree *args = CALL_EXPR_ARGP (exp);
9040 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9041 if (ret)
9042 return ret;
9045 return NULL_TREE;
9048 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9049 N arguments are passed in the array ARGARRAY. Return a folded
9050 expression or NULL_TREE if no simplification was possible. */
9052 tree
9053 fold_builtin_call_array (location_t loc, tree,
9054 tree fn,
9055 int n,
9056 tree *argarray)
9058 if (TREE_CODE (fn) != ADDR_EXPR)
9059 return NULL_TREE;
9061 tree fndecl = TREE_OPERAND (fn, 0);
9062 if (TREE_CODE (fndecl) == FUNCTION_DECL
9063 && DECL_BUILT_IN (fndecl))
9065 /* If last argument is __builtin_va_arg_pack (), arguments to this
9066 function are not finalized yet. Defer folding until they are. */
9067 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9069 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9070 if (fndecl2
9071 && TREE_CODE (fndecl2) == FUNCTION_DECL
9072 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9073 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9074 return NULL_TREE;
9076 if (avoid_folding_inline_builtin (fndecl))
9077 return NULL_TREE;
9078 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9079 return targetm.fold_builtin (fndecl, n, argarray, false);
9080 else
9081 return fold_builtin_n (loc, fndecl, argarray, n, false);
9084 return NULL_TREE;
9087 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9088 along with N new arguments specified as the "..." parameters. SKIP
9089 is the number of arguments in EXP to be omitted. This function is used
9090 to do varargs-to-varargs transformations. */
9092 static tree
9093 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9095 va_list ap;
9096 tree t;
9098 va_start (ap, n);
9099 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9100 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9101 va_end (ap);
9103 return t;
9106 /* Validate a single argument ARG against a tree code CODE representing
9107 a type. Return true when argument is valid. */
9109 static bool
9110 validate_arg (const_tree arg, enum tree_code code)
9112 if (!arg)
9113 return false;
9114 else if (code == POINTER_TYPE)
9115 return POINTER_TYPE_P (TREE_TYPE (arg));
9116 else if (code == INTEGER_TYPE)
9117 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9118 return code == TREE_CODE (TREE_TYPE (arg));
9121 /* This function validates the types of a function call argument list
9122 against a specified list of tree_codes. If the last specifier is a 0,
9123 that represents an ellipses, otherwise the last specifier must be a
9124 VOID_TYPE.
9126 This is the GIMPLE version of validate_arglist. Eventually we want to
9127 completely convert builtins.c to work from GIMPLEs and the tree based
9128 validate_arglist will then be removed. */
9130 bool
9131 validate_gimple_arglist (const gcall *call, ...)
9133 enum tree_code code;
9134 bool res = 0;
9135 va_list ap;
9136 const_tree arg;
9137 size_t i;
9139 va_start (ap, call);
9140 i = 0;
9144 code = (enum tree_code) va_arg (ap, int);
9145 switch (code)
9147 case 0:
9148 /* This signifies an ellipses, any further arguments are all ok. */
9149 res = true;
9150 goto end;
9151 case VOID_TYPE:
9152 /* This signifies an endlink, if no arguments remain, return
9153 true, otherwise return false. */
9154 res = (i == gimple_call_num_args (call));
9155 goto end;
9156 default:
9157 /* If no parameters remain or the parameter's code does not
9158 match the specified code, return false. Otherwise continue
9159 checking any remaining arguments. */
9160 arg = gimple_call_arg (call, i++);
9161 if (!validate_arg (arg, code))
9162 goto end;
9163 break;
9166 while (1);
9168 /* We need gotos here since we can only have one VA_CLOSE in a
9169 function. */
9170 end: ;
9171 va_end (ap);
9173 return res;
9176 /* Default target-specific builtin expander that does nothing. */
9179 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9180 rtx target ATTRIBUTE_UNUSED,
9181 rtx subtarget ATTRIBUTE_UNUSED,
9182 machine_mode mode ATTRIBUTE_UNUSED,
9183 int ignore ATTRIBUTE_UNUSED)
9185 return NULL_RTX;
9188 /* Returns true is EXP represents data that would potentially reside
9189 in a readonly section. */
9191 bool
9192 readonly_data_expr (tree exp)
9194 STRIP_NOPS (exp);
9196 if (TREE_CODE (exp) != ADDR_EXPR)
9197 return false;
9199 exp = get_base_address (TREE_OPERAND (exp, 0));
9200 if (!exp)
9201 return false;
9203 /* Make sure we call decl_readonly_section only for trees it
9204 can handle (since it returns true for everything it doesn't
9205 understand). */
9206 if (TREE_CODE (exp) == STRING_CST
9207 || TREE_CODE (exp) == CONSTRUCTOR
9208 || (VAR_P (exp) && TREE_STATIC (exp)))
9209 return decl_readonly_section (exp, 0);
9210 else
9211 return false;
9214 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9215 to the call, and TYPE is its return type.
9217 Return NULL_TREE if no simplification was possible, otherwise return the
9218 simplified form of the call as a tree.
9220 The simplified form may be a constant or other expression which
9221 computes the same value, but in a more efficient manner (including
9222 calls to other builtin functions).
9224 The call may contain arguments which need to be evaluated, but
9225 which are not useful to determine the result of the call. In
9226 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9227 COMPOUND_EXPR will be an argument which must be evaluated.
9228 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9229 COMPOUND_EXPR in the chain will contain the tree for the simplified
9230 form of the builtin function call. */
9232 static tree
9233 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9235 if (!validate_arg (s1, POINTER_TYPE)
9236 || !validate_arg (s2, POINTER_TYPE))
9237 return NULL_TREE;
9238 else
9240 tree fn;
9241 const char *p1, *p2;
9243 p2 = c_getstr (s2);
9244 if (p2 == NULL)
9245 return NULL_TREE;
9247 p1 = c_getstr (s1);
9248 if (p1 != NULL)
9250 const char *r = strpbrk (p1, p2);
9251 tree tem;
9253 if (r == NULL)
9254 return build_int_cst (TREE_TYPE (s1), 0);
9256 /* Return an offset into the constant string argument. */
9257 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9258 return fold_convert_loc (loc, type, tem);
9261 if (p2[0] == '\0')
9262 /* strpbrk(x, "") == NULL.
9263 Evaluate and ignore s1 in case it had side-effects. */
9264 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
9266 if (p2[1] != '\0')
9267 return NULL_TREE; /* Really call strpbrk. */
9269 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9270 if (!fn)
9271 return NULL_TREE;
9273 /* New argument list transforming strpbrk(s1, s2) to
9274 strchr(s1, s2[0]). */
9275 return build_call_expr_loc (loc, fn, 2, s1,
9276 build_int_cst (integer_type_node, p2[0]));
9280 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9281 to the call.
9283 Return NULL_TREE if no simplification was possible, otherwise return the
9284 simplified form of the call as a tree.
9286 The simplified form may be a constant or other expression which
9287 computes the same value, but in a more efficient manner (including
9288 calls to other builtin functions).
9290 The call may contain arguments which need to be evaluated, but
9291 which are not useful to determine the result of the call. In
9292 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9293 COMPOUND_EXPR will be an argument which must be evaluated.
9294 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9295 COMPOUND_EXPR in the chain will contain the tree for the simplified
9296 form of the builtin function call. */
9298 static tree
9299 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9301 if (!validate_arg (s1, POINTER_TYPE)
9302 || !validate_arg (s2, POINTER_TYPE))
9303 return NULL_TREE;
9304 else
9306 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9308 /* If either argument is "", return NULL_TREE. */
9309 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9310 /* Evaluate and ignore both arguments in case either one has
9311 side-effects. */
9312 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9313 s1, s2);
9314 return NULL_TREE;
9318 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9319 to the call.
9321 Return NULL_TREE if no simplification was possible, otherwise return the
9322 simplified form of the call as a tree.
9324 The simplified form may be a constant or other expression which
9325 computes the same value, but in a more efficient manner (including
9326 calls to other builtin functions).
9328 The call may contain arguments which need to be evaluated, but
9329 which are not useful to determine the result of the call. In
9330 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9331 COMPOUND_EXPR will be an argument which must be evaluated.
9332 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9333 COMPOUND_EXPR in the chain will contain the tree for the simplified
9334 form of the builtin function call. */
9336 static tree
9337 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9339 if (!validate_arg (s1, POINTER_TYPE)
9340 || !validate_arg (s2, POINTER_TYPE))
9341 return NULL_TREE;
9342 else
9344 /* If the first argument is "", return NULL_TREE. */
9345 const char *p1 = c_getstr (s1);
9346 if (p1 && *p1 == '\0')
9348 /* Evaluate and ignore argument s2 in case it has
9349 side-effects. */
9350 return omit_one_operand_loc (loc, size_type_node,
9351 size_zero_node, s2);
9354 /* If the second argument is "", return __builtin_strlen(s1). */
9355 const char *p2 = c_getstr (s2);
9356 if (p2 && *p2 == '\0')
9358 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9360 /* If the replacement _DECL isn't initialized, don't do the
9361 transformation. */
9362 if (!fn)
9363 return NULL_TREE;
9365 return build_call_expr_loc (loc, fn, 1, s1);
9367 return NULL_TREE;
9371 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9372 produced. False otherwise. This is done so that we don't output the error
9373 or warning twice or three times. */
9375 bool
9376 fold_builtin_next_arg (tree exp, bool va_start_p)
9378 tree fntype = TREE_TYPE (current_function_decl);
9379 int nargs = call_expr_nargs (exp);
9380 tree arg;
9381 /* There is good chance the current input_location points inside the
9382 definition of the va_start macro (perhaps on the token for
9383 builtin) in a system header, so warnings will not be emitted.
9384 Use the location in real source code. */
9385 source_location current_location =
9386 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9387 NULL);
9389 if (!stdarg_p (fntype))
9391 error ("%<va_start%> used in function with fixed args");
9392 return true;
9395 if (va_start_p)
9397 if (va_start_p && (nargs != 2))
9399 error ("wrong number of arguments to function %<va_start%>");
9400 return true;
9402 arg = CALL_EXPR_ARG (exp, 1);
9404 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9405 when we checked the arguments and if needed issued a warning. */
9406 else
9408 if (nargs == 0)
9410 /* Evidently an out of date version of <stdarg.h>; can't validate
9411 va_start's second argument, but can still work as intended. */
9412 warning_at (current_location,
9413 OPT_Wvarargs,
9414 "%<__builtin_next_arg%> called without an argument");
9415 return true;
9417 else if (nargs > 1)
9419 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9420 return true;
9422 arg = CALL_EXPR_ARG (exp, 0);
9425 if (TREE_CODE (arg) == SSA_NAME)
9426 arg = SSA_NAME_VAR (arg);
9428 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9429 or __builtin_next_arg (0) the first time we see it, after checking
9430 the arguments and if needed issuing a warning. */
9431 if (!integer_zerop (arg))
9433 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9435 /* Strip off all nops for the sake of the comparison. This
9436 is not quite the same as STRIP_NOPS. It does more.
9437 We must also strip off INDIRECT_EXPR for C++ reference
9438 parameters. */
9439 while (CONVERT_EXPR_P (arg)
9440 || TREE_CODE (arg) == INDIRECT_REF)
9441 arg = TREE_OPERAND (arg, 0);
9442 if (arg != last_parm)
9444 /* FIXME: Sometimes with the tree optimizers we can get the
9445 not the last argument even though the user used the last
9446 argument. We just warn and set the arg to be the last
9447 argument so that we will get wrong-code because of
9448 it. */
9449 warning_at (current_location,
9450 OPT_Wvarargs,
9451 "second parameter of %<va_start%> not last named argument");
9454 /* Undefined by C99 7.15.1.4p4 (va_start):
9455 "If the parameter parmN is declared with the register storage
9456 class, with a function or array type, or with a type that is
9457 not compatible with the type that results after application of
9458 the default argument promotions, the behavior is undefined."
9460 else if (DECL_REGISTER (arg))
9462 warning_at (current_location,
9463 OPT_Wvarargs,
9464 "undefined behavior when second parameter of "
9465 "%<va_start%> is declared with %<register%> storage");
9468 /* We want to verify the second parameter just once before the tree
9469 optimizers are run and then avoid keeping it in the tree,
9470 as otherwise we could warn even for correct code like:
9471 void foo (int i, ...)
9472 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9473 if (va_start_p)
9474 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9475 else
9476 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9478 return false;
9482 /* Expand a call EXP to __builtin_object_size. */
9484 static rtx
9485 expand_builtin_object_size (tree exp)
9487 tree ost;
9488 int object_size_type;
9489 tree fndecl = get_callee_fndecl (exp);
9491 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9493 error ("%Kfirst argument of %D must be a pointer, second integer constant",
9494 exp, fndecl);
9495 expand_builtin_trap ();
9496 return const0_rtx;
9499 ost = CALL_EXPR_ARG (exp, 1);
9500 STRIP_NOPS (ost);
9502 if (TREE_CODE (ost) != INTEGER_CST
9503 || tree_int_cst_sgn (ost) < 0
9504 || compare_tree_int (ost, 3) > 0)
9506 error ("%Klast argument of %D is not integer constant between 0 and 3",
9507 exp, fndecl);
9508 expand_builtin_trap ();
9509 return const0_rtx;
9512 object_size_type = tree_to_shwi (ost);
9514 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9517 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9518 FCODE is the BUILT_IN_* to use.
9519 Return NULL_RTX if we failed; the caller should emit a normal call,
9520 otherwise try to get the result in TARGET, if convenient (and in
9521 mode MODE if that's convenient). */
9523 static rtx
9524 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9525 enum built_in_function fcode)
9527 tree dest, src, len, size;
9529 if (!validate_arglist (exp,
9530 POINTER_TYPE,
9531 fcode == BUILT_IN_MEMSET_CHK
9532 ? INTEGER_TYPE : POINTER_TYPE,
9533 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9534 return NULL_RTX;
9536 dest = CALL_EXPR_ARG (exp, 0);
9537 src = CALL_EXPR_ARG (exp, 1);
9538 len = CALL_EXPR_ARG (exp, 2);
9539 size = CALL_EXPR_ARG (exp, 3);
9541 bool sizes_ok = check_sizes (OPT_Wstringop_overflow_,
9542 exp, len, /*maxlen=*/NULL_TREE,
9543 /*str=*/NULL_TREE, size);
9545 if (!tree_fits_uhwi_p (size))
9546 return NULL_RTX;
9548 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
9550 /* Avoid transforming the checking call to an ordinary one when
9551 an overflow has been detected or when the call couldn't be
9552 validated because the size is not constant. */
9553 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
9554 return NULL_RTX;
9556 tree fn = NULL_TREE;
9557 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9558 mem{cpy,pcpy,move,set} is available. */
9559 switch (fcode)
9561 case BUILT_IN_MEMCPY_CHK:
9562 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
9563 break;
9564 case BUILT_IN_MEMPCPY_CHK:
9565 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
9566 break;
9567 case BUILT_IN_MEMMOVE_CHK:
9568 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
9569 break;
9570 case BUILT_IN_MEMSET_CHK:
9571 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
9572 break;
9573 default:
9574 break;
9577 if (! fn)
9578 return NULL_RTX;
9580 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
9581 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9582 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9583 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9585 else if (fcode == BUILT_IN_MEMSET_CHK)
9586 return NULL_RTX;
9587 else
9589 unsigned int dest_align = get_pointer_alignment (dest);
9591 /* If DEST is not a pointer type, call the normal function. */
9592 if (dest_align == 0)
9593 return NULL_RTX;
9595 /* If SRC and DEST are the same (and not volatile), do nothing. */
9596 if (operand_equal_p (src, dest, 0))
9598 tree expr;
9600 if (fcode != BUILT_IN_MEMPCPY_CHK)
9602 /* Evaluate and ignore LEN in case it has side-effects. */
9603 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9604 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9607 expr = fold_build_pointer_plus (dest, len);
9608 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9611 /* __memmove_chk special case. */
9612 if (fcode == BUILT_IN_MEMMOVE_CHK)
9614 unsigned int src_align = get_pointer_alignment (src);
9616 if (src_align == 0)
9617 return NULL_RTX;
9619 /* If src is categorized for a readonly section we can use
9620 normal __memcpy_chk. */
9621 if (readonly_data_expr (src))
9623 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
9624 if (!fn)
9625 return NULL_RTX;
9626 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9627 dest, src, len, size);
9628 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9629 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9630 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9633 return NULL_RTX;
9637 /* Emit warning if a buffer overflow is detected at compile time. */
9639 static void
9640 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9642 /* The source string. */
9643 tree srcstr = NULL_TREE;
9644 /* The size of the destination object. */
9645 tree objsize = NULL_TREE;
9646 /* The string that is being concatenated with (as in __strcat_chk)
9647 or null if it isn't. */
9648 tree catstr = NULL_TREE;
9649 /* The maximum length of the source sequence in a bounded operation
9650 (such as __strncat_chk) or null if the operation isn't bounded
9651 (such as __strcat_chk). */
9652 tree maxlen = NULL_TREE;
9654 switch (fcode)
9656 case BUILT_IN_STRCPY_CHK:
9657 case BUILT_IN_STPCPY_CHK:
9658 srcstr = CALL_EXPR_ARG (exp, 1);
9659 objsize = CALL_EXPR_ARG (exp, 2);
9660 break;
9662 case BUILT_IN_STRCAT_CHK:
9663 /* For __strcat_chk the warning will be emitted only if overflowing
9664 by at least strlen (dest) + 1 bytes. */
9665 catstr = CALL_EXPR_ARG (exp, 0);
9666 srcstr = CALL_EXPR_ARG (exp, 1);
9667 objsize = CALL_EXPR_ARG (exp, 2);
9668 break;
9670 case BUILT_IN_STRNCAT_CHK:
9671 catstr = CALL_EXPR_ARG (exp, 0);
9672 srcstr = CALL_EXPR_ARG (exp, 1);
9673 maxlen = CALL_EXPR_ARG (exp, 2);
9674 objsize = CALL_EXPR_ARG (exp, 3);
9675 break;
9677 case BUILT_IN_STRNCPY_CHK:
9678 case BUILT_IN_STPNCPY_CHK:
9679 srcstr = CALL_EXPR_ARG (exp, 1);
9680 maxlen = CALL_EXPR_ARG (exp, 2);
9681 objsize = CALL_EXPR_ARG (exp, 3);
9682 break;
9684 case BUILT_IN_SNPRINTF_CHK:
9685 case BUILT_IN_VSNPRINTF_CHK:
9686 maxlen = CALL_EXPR_ARG (exp, 1);
9687 objsize = CALL_EXPR_ARG (exp, 3);
9688 break;
9689 default:
9690 gcc_unreachable ();
9693 if (catstr && maxlen)
9695 /* Check __strncat_chk. There is no way to determine the length
9696 of the string to which the source string is being appended so
9697 just warn when the length of the source string is not known. */
9698 if (!check_strncat_sizes (exp, objsize))
9699 return;
9702 check_sizes (OPT_Wstringop_overflow_, exp,
9703 /*size=*/NULL_TREE, maxlen, srcstr, objsize);
9706 /* Emit warning if a buffer overflow is detected at compile time
9707 in __sprintf_chk/__vsprintf_chk calls. */
9709 static void
9710 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
9712 tree size, len, fmt;
9713 const char *fmt_str;
9714 int nargs = call_expr_nargs (exp);
9716 /* Verify the required arguments in the original call. */
9718 if (nargs < 4)
9719 return;
9720 size = CALL_EXPR_ARG (exp, 2);
9721 fmt = CALL_EXPR_ARG (exp, 3);
9723 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9724 return;
9726 /* Check whether the format is a literal string constant. */
9727 fmt_str = c_getstr (fmt);
9728 if (fmt_str == NULL)
9729 return;
9731 if (!init_target_chars ())
9732 return;
9734 /* If the format doesn't contain % args or %%, we know its size. */
9735 if (strchr (fmt_str, target_percent) == 0)
9736 len = build_int_cstu (size_type_node, strlen (fmt_str));
9737 /* If the format is "%s" and first ... argument is a string literal,
9738 we know it too. */
9739 else if (fcode == BUILT_IN_SPRINTF_CHK
9740 && strcmp (fmt_str, target_percent_s) == 0)
9742 tree arg;
9744 if (nargs < 5)
9745 return;
9746 arg = CALL_EXPR_ARG (exp, 4);
9747 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
9748 return;
9750 len = c_strlen (arg, 1);
9751 if (!len || ! tree_fits_uhwi_p (len))
9752 return;
9754 else
9755 return;
9757 /* Add one for the terminating nul. */
9758 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
9759 check_sizes (OPT_Wstringop_overflow_,
9760 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, len, size);
9763 /* Emit warning if a free is called with address of a variable. */
9765 static void
9766 maybe_emit_free_warning (tree exp)
9768 tree arg = CALL_EXPR_ARG (exp, 0);
9770 STRIP_NOPS (arg);
9771 if (TREE_CODE (arg) != ADDR_EXPR)
9772 return;
9774 arg = get_base_address (TREE_OPERAND (arg, 0));
9775 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
9776 return;
9778 if (SSA_VAR_P (arg))
9779 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9780 "%Kattempt to free a non-heap object %qD", exp, arg);
9781 else
9782 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9783 "%Kattempt to free a non-heap object", exp);
9786 /* Fold a call to __builtin_object_size with arguments PTR and OST,
9787 if possible. */
9789 static tree
9790 fold_builtin_object_size (tree ptr, tree ost)
9792 unsigned HOST_WIDE_INT bytes;
9793 int object_size_type;
9795 if (!validate_arg (ptr, POINTER_TYPE)
9796 || !validate_arg (ost, INTEGER_TYPE))
9797 return NULL_TREE;
9799 STRIP_NOPS (ost);
9801 if (TREE_CODE (ost) != INTEGER_CST
9802 || tree_int_cst_sgn (ost) < 0
9803 || compare_tree_int (ost, 3) > 0)
9804 return NULL_TREE;
9806 object_size_type = tree_to_shwi (ost);
9808 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
9809 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
9810 and (size_t) 0 for types 2 and 3. */
9811 if (TREE_SIDE_EFFECTS (ptr))
9812 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
9814 if (TREE_CODE (ptr) == ADDR_EXPR)
9816 compute_builtin_object_size (ptr, object_size_type, &bytes);
9817 if (wi::fits_to_tree_p (bytes, size_type_node))
9818 return build_int_cstu (size_type_node, bytes);
9820 else if (TREE_CODE (ptr) == SSA_NAME)
9822 /* If object size is not known yet, delay folding until
9823 later. Maybe subsequent passes will help determining
9824 it. */
9825 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
9826 && wi::fits_to_tree_p (bytes, size_type_node))
9827 return build_int_cstu (size_type_node, bytes);
9830 return NULL_TREE;
9833 /* Builtins with folding operations that operate on "..." arguments
9834 need special handling; we need to store the arguments in a convenient
9835 data structure before attempting any folding. Fortunately there are
9836 only a few builtins that fall into this category. FNDECL is the
9837 function, EXP is the CALL_EXPR for the call. */
9839 static tree
9840 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
9842 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9843 tree ret = NULL_TREE;
9845 switch (fcode)
9847 case BUILT_IN_FPCLASSIFY:
9848 ret = fold_builtin_fpclassify (loc, args, nargs);
9849 break;
9851 default:
9852 break;
9854 if (ret)
9856 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9857 SET_EXPR_LOCATION (ret, loc);
9858 TREE_NO_WARNING (ret) = 1;
9859 return ret;
9861 return NULL_TREE;
9864 /* Initialize format string characters in the target charset. */
9866 bool
9867 init_target_chars (void)
9869 static bool init;
9870 if (!init)
9872 target_newline = lang_hooks.to_target_charset ('\n');
9873 target_percent = lang_hooks.to_target_charset ('%');
9874 target_c = lang_hooks.to_target_charset ('c');
9875 target_s = lang_hooks.to_target_charset ('s');
9876 if (target_newline == 0 || target_percent == 0 || target_c == 0
9877 || target_s == 0)
9878 return false;
9880 target_percent_c[0] = target_percent;
9881 target_percent_c[1] = target_c;
9882 target_percent_c[2] = '\0';
9884 target_percent_s[0] = target_percent;
9885 target_percent_s[1] = target_s;
9886 target_percent_s[2] = '\0';
9888 target_percent_s_newline[0] = target_percent;
9889 target_percent_s_newline[1] = target_s;
9890 target_percent_s_newline[2] = target_newline;
9891 target_percent_s_newline[3] = '\0';
9893 init = true;
9895 return true;
9898 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
9899 and no overflow/underflow occurred. INEXACT is true if M was not
9900 exactly calculated. TYPE is the tree type for the result. This
9901 function assumes that you cleared the MPFR flags and then
9902 calculated M to see if anything subsequently set a flag prior to
9903 entering this function. Return NULL_TREE if any checks fail. */
9905 static tree
9906 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
9908 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9909 overflow/underflow occurred. If -frounding-math, proceed iff the
9910 result of calling FUNC was exact. */
9911 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
9912 && (!flag_rounding_math || !inexact))
9914 REAL_VALUE_TYPE rr;
9916 real_from_mpfr (&rr, m, type, GMP_RNDN);
9917 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
9918 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9919 but the mpft_t is not, then we underflowed in the
9920 conversion. */
9921 if (real_isfinite (&rr)
9922 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
9924 REAL_VALUE_TYPE rmode;
9926 real_convert (&rmode, TYPE_MODE (type), &rr);
9927 /* Proceed iff the specified mode can hold the value. */
9928 if (real_identical (&rmode, &rr))
9929 return build_real (type, rmode);
9932 return NULL_TREE;
9935 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
9936 number and no overflow/underflow occurred. INEXACT is true if M
9937 was not exactly calculated. TYPE is the tree type for the result.
9938 This function assumes that you cleared the MPFR flags and then
9939 calculated M to see if anything subsequently set a flag prior to
9940 entering this function. Return NULL_TREE if any checks fail, if
9941 FORCE_CONVERT is true, then bypass the checks. */
9943 static tree
9944 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
9946 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9947 overflow/underflow occurred. If -frounding-math, proceed iff the
9948 result of calling FUNC was exact. */
9949 if (force_convert
9950 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
9951 && !mpfr_overflow_p () && !mpfr_underflow_p ()
9952 && (!flag_rounding_math || !inexact)))
9954 REAL_VALUE_TYPE re, im;
9956 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
9957 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
9958 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
9959 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9960 but the mpft_t is not, then we underflowed in the
9961 conversion. */
9962 if (force_convert
9963 || (real_isfinite (&re) && real_isfinite (&im)
9964 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
9965 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
9967 REAL_VALUE_TYPE re_mode, im_mode;
9969 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
9970 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
9971 /* Proceed iff the specified mode can hold the value. */
9972 if (force_convert
9973 || (real_identical (&re_mode, &re)
9974 && real_identical (&im_mode, &im)))
9975 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
9976 build_real (TREE_TYPE (type), im_mode));
9979 return NULL_TREE;
9982 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
9983 the pointer *(ARG_QUO) and return the result. The type is taken
9984 from the type of ARG0 and is used for setting the precision of the
9985 calculation and results. */
9987 static tree
9988 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
9990 tree const type = TREE_TYPE (arg0);
9991 tree result = NULL_TREE;
9993 STRIP_NOPS (arg0);
9994 STRIP_NOPS (arg1);
9996 /* To proceed, MPFR must exactly represent the target floating point
9997 format, which only happens when the target base equals two. */
9998 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9999 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10000 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10002 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10003 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10005 if (real_isfinite (ra0) && real_isfinite (ra1))
10007 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10008 const int prec = fmt->p;
10009 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10010 tree result_rem;
10011 long integer_quo;
10012 mpfr_t m0, m1;
10014 mpfr_inits2 (prec, m0, m1, NULL);
10015 mpfr_from_real (m0, ra0, GMP_RNDN);
10016 mpfr_from_real (m1, ra1, GMP_RNDN);
10017 mpfr_clear_flags ();
10018 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10019 /* Remquo is independent of the rounding mode, so pass
10020 inexact=0 to do_mpfr_ckconv(). */
10021 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10022 mpfr_clears (m0, m1, NULL);
10023 if (result_rem)
10025 /* MPFR calculates quo in the host's long so it may
10026 return more bits in quo than the target int can hold
10027 if sizeof(host long) > sizeof(target int). This can
10028 happen even for native compilers in LP64 mode. In
10029 these cases, modulo the quo value with the largest
10030 number that the target int can hold while leaving one
10031 bit for the sign. */
10032 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10033 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10035 /* Dereference the quo pointer argument. */
10036 arg_quo = build_fold_indirect_ref (arg_quo);
10037 /* Proceed iff a valid pointer type was passed in. */
10038 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10040 /* Set the value. */
10041 tree result_quo
10042 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10043 build_int_cst (TREE_TYPE (arg_quo),
10044 integer_quo));
10045 TREE_SIDE_EFFECTS (result_quo) = 1;
10046 /* Combine the quo assignment with the rem. */
10047 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10048 result_quo, result_rem));
10053 return result;
10056 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10057 resulting value as a tree with type TYPE. The mpfr precision is
10058 set to the precision of TYPE. We assume that this mpfr function
10059 returns zero if the result could be calculated exactly within the
10060 requested precision. In addition, the integer pointer represented
10061 by ARG_SG will be dereferenced and set to the appropriate signgam
10062 (-1,1) value. */
10064 static tree
10065 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10067 tree result = NULL_TREE;
10069 STRIP_NOPS (arg);
10071 /* To proceed, MPFR must exactly represent the target floating point
10072 format, which only happens when the target base equals two. Also
10073 verify ARG is a constant and that ARG_SG is an int pointer. */
10074 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10075 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10076 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10077 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10079 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10081 /* In addition to NaN and Inf, the argument cannot be zero or a
10082 negative integer. */
10083 if (real_isfinite (ra)
10084 && ra->cl != rvc_zero
10085 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10087 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10088 const int prec = fmt->p;
10089 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10090 int inexact, sg;
10091 mpfr_t m;
10092 tree result_lg;
10094 mpfr_init2 (m, prec);
10095 mpfr_from_real (m, ra, GMP_RNDN);
10096 mpfr_clear_flags ();
10097 inexact = mpfr_lgamma (m, &sg, m, rnd);
10098 result_lg = do_mpfr_ckconv (m, type, inexact);
10099 mpfr_clear (m);
10100 if (result_lg)
10102 tree result_sg;
10104 /* Dereference the arg_sg pointer argument. */
10105 arg_sg = build_fold_indirect_ref (arg_sg);
10106 /* Assign the signgam value into *arg_sg. */
10107 result_sg = fold_build2 (MODIFY_EXPR,
10108 TREE_TYPE (arg_sg), arg_sg,
10109 build_int_cst (TREE_TYPE (arg_sg), sg));
10110 TREE_SIDE_EFFECTS (result_sg) = 1;
10111 /* Combine the signgam assignment with the lgamma result. */
10112 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10113 result_sg, result_lg));
10118 return result;
10121 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10122 mpc function FUNC on it and return the resulting value as a tree
10123 with type TYPE. The mpfr precision is set to the precision of
10124 TYPE. We assume that function FUNC returns zero if the result
10125 could be calculated exactly within the requested precision. If
10126 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10127 in the arguments and/or results. */
10129 tree
10130 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10131 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10133 tree result = NULL_TREE;
10135 STRIP_NOPS (arg0);
10136 STRIP_NOPS (arg1);
10138 /* To proceed, MPFR must exactly represent the target floating point
10139 format, which only happens when the target base equals two. */
10140 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10141 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10142 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10143 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10144 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10146 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10147 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10148 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10149 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10151 if (do_nonfinite
10152 || (real_isfinite (re0) && real_isfinite (im0)
10153 && real_isfinite (re1) && real_isfinite (im1)))
10155 const struct real_format *const fmt =
10156 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10157 const int prec = fmt->p;
10158 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10159 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10160 int inexact;
10161 mpc_t m0, m1;
10163 mpc_init2 (m0, prec);
10164 mpc_init2 (m1, prec);
10165 mpfr_from_real (mpc_realref (m0), re0, rnd);
10166 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10167 mpfr_from_real (mpc_realref (m1), re1, rnd);
10168 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10169 mpfr_clear_flags ();
10170 inexact = func (m0, m0, m1, crnd);
10171 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10172 mpc_clear (m0);
10173 mpc_clear (m1);
10177 return result;
10180 /* A wrapper function for builtin folding that prevents warnings for
10181 "statement without effect" and the like, caused by removing the
10182 call node earlier than the warning is generated. */
10184 tree
10185 fold_call_stmt (gcall *stmt, bool ignore)
10187 tree ret = NULL_TREE;
10188 tree fndecl = gimple_call_fndecl (stmt);
10189 location_t loc = gimple_location (stmt);
10190 if (fndecl
10191 && TREE_CODE (fndecl) == FUNCTION_DECL
10192 && DECL_BUILT_IN (fndecl)
10193 && !gimple_call_va_arg_pack_p (stmt))
10195 int nargs = gimple_call_num_args (stmt);
10196 tree *args = (nargs > 0
10197 ? gimple_call_arg_ptr (stmt, 0)
10198 : &error_mark_node);
10200 if (avoid_folding_inline_builtin (fndecl))
10201 return NULL_TREE;
10202 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10204 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10206 else
10208 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10209 if (ret)
10211 /* Propagate location information from original call to
10212 expansion of builtin. Otherwise things like
10213 maybe_emit_chk_warning, that operate on the expansion
10214 of a builtin, will use the wrong location information. */
10215 if (gimple_has_location (stmt))
10217 tree realret = ret;
10218 if (TREE_CODE (ret) == NOP_EXPR)
10219 realret = TREE_OPERAND (ret, 0);
10220 if (CAN_HAVE_LOCATION_P (realret)
10221 && !EXPR_HAS_LOCATION (realret))
10222 SET_EXPR_LOCATION (realret, loc);
10223 return realret;
10225 return ret;
10229 return NULL_TREE;
10232 /* Look up the function in builtin_decl that corresponds to DECL
10233 and set ASMSPEC as its user assembler name. DECL must be a
10234 function decl that declares a builtin. */
10236 void
10237 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10239 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10240 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10241 && asmspec != 0);
10243 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10244 set_user_assembler_name (builtin, asmspec);
10246 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10247 && INT_TYPE_SIZE < BITS_PER_WORD)
10249 set_user_assembler_libfunc ("ffs", asmspec);
10250 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE, MODE_INT, 0),
10251 "ffs");
10255 /* Return true if DECL is a builtin that expands to a constant or similarly
10256 simple code. */
10257 bool
10258 is_simple_builtin (tree decl)
10260 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10261 switch (DECL_FUNCTION_CODE (decl))
10263 /* Builtins that expand to constants. */
10264 case BUILT_IN_CONSTANT_P:
10265 case BUILT_IN_EXPECT:
10266 case BUILT_IN_OBJECT_SIZE:
10267 case BUILT_IN_UNREACHABLE:
10268 /* Simple register moves or loads from stack. */
10269 case BUILT_IN_ASSUME_ALIGNED:
10270 case BUILT_IN_RETURN_ADDRESS:
10271 case BUILT_IN_EXTRACT_RETURN_ADDR:
10272 case BUILT_IN_FROB_RETURN_ADDR:
10273 case BUILT_IN_RETURN:
10274 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10275 case BUILT_IN_FRAME_ADDRESS:
10276 case BUILT_IN_VA_END:
10277 case BUILT_IN_STACK_SAVE:
10278 case BUILT_IN_STACK_RESTORE:
10279 /* Exception state returns or moves registers around. */
10280 case BUILT_IN_EH_FILTER:
10281 case BUILT_IN_EH_POINTER:
10282 case BUILT_IN_EH_COPY_VALUES:
10283 return true;
10285 default:
10286 return false;
10289 return false;
10292 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10293 most probably expanded inline into reasonably simple code. This is a
10294 superset of is_simple_builtin. */
10295 bool
10296 is_inexpensive_builtin (tree decl)
10298 if (!decl)
10299 return false;
10300 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10301 return true;
10302 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10303 switch (DECL_FUNCTION_CODE (decl))
10305 case BUILT_IN_ABS:
10306 case BUILT_IN_ALLOCA:
10307 case BUILT_IN_ALLOCA_WITH_ALIGN:
10308 case BUILT_IN_BSWAP16:
10309 case BUILT_IN_BSWAP32:
10310 case BUILT_IN_BSWAP64:
10311 case BUILT_IN_CLZ:
10312 case BUILT_IN_CLZIMAX:
10313 case BUILT_IN_CLZL:
10314 case BUILT_IN_CLZLL:
10315 case BUILT_IN_CTZ:
10316 case BUILT_IN_CTZIMAX:
10317 case BUILT_IN_CTZL:
10318 case BUILT_IN_CTZLL:
10319 case BUILT_IN_FFS:
10320 case BUILT_IN_FFSIMAX:
10321 case BUILT_IN_FFSL:
10322 case BUILT_IN_FFSLL:
10323 case BUILT_IN_IMAXABS:
10324 case BUILT_IN_FINITE:
10325 case BUILT_IN_FINITEF:
10326 case BUILT_IN_FINITEL:
10327 case BUILT_IN_FINITED32:
10328 case BUILT_IN_FINITED64:
10329 case BUILT_IN_FINITED128:
10330 case BUILT_IN_FPCLASSIFY:
10331 case BUILT_IN_ISFINITE:
10332 case BUILT_IN_ISINF_SIGN:
10333 case BUILT_IN_ISINF:
10334 case BUILT_IN_ISINFF:
10335 case BUILT_IN_ISINFL:
10336 case BUILT_IN_ISINFD32:
10337 case BUILT_IN_ISINFD64:
10338 case BUILT_IN_ISINFD128:
10339 case BUILT_IN_ISNAN:
10340 case BUILT_IN_ISNANF:
10341 case BUILT_IN_ISNANL:
10342 case BUILT_IN_ISNAND32:
10343 case BUILT_IN_ISNAND64:
10344 case BUILT_IN_ISNAND128:
10345 case BUILT_IN_ISNORMAL:
10346 case BUILT_IN_ISGREATER:
10347 case BUILT_IN_ISGREATEREQUAL:
10348 case BUILT_IN_ISLESS:
10349 case BUILT_IN_ISLESSEQUAL:
10350 case BUILT_IN_ISLESSGREATER:
10351 case BUILT_IN_ISUNORDERED:
10352 case BUILT_IN_VA_ARG_PACK:
10353 case BUILT_IN_VA_ARG_PACK_LEN:
10354 case BUILT_IN_VA_COPY:
10355 case BUILT_IN_TRAP:
10356 case BUILT_IN_SAVEREGS:
10357 case BUILT_IN_POPCOUNTL:
10358 case BUILT_IN_POPCOUNTLL:
10359 case BUILT_IN_POPCOUNTIMAX:
10360 case BUILT_IN_POPCOUNT:
10361 case BUILT_IN_PARITYL:
10362 case BUILT_IN_PARITYLL:
10363 case BUILT_IN_PARITYIMAX:
10364 case BUILT_IN_PARITY:
10365 case BUILT_IN_LABS:
10366 case BUILT_IN_LLABS:
10367 case BUILT_IN_PREFETCH:
10368 case BUILT_IN_ACC_ON_DEVICE:
10369 return true;
10371 default:
10372 return is_simple_builtin (decl);
10375 return false;
10378 /* Return true if T is a constant and the value cast to a target char
10379 can be represented by a host char.
10380 Store the casted char constant in *P if so. */
10382 bool
10383 target_char_cst_p (tree t, char *p)
10385 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
10386 return false;
10388 *p = (char)tree_to_uhwi (t);
10389 return true;