[ARM] Add source mode to coprocessor pattern SETs
[official-gcc.git] / gcc / builtins.c
blobf3bee5bfc1afa653fd84983e15d6cc5daee09b98
1 /* Expand builtin functions.
2 Copyright (C) 1988-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "stor-layout.h"
47 #include "calls.h"
48 #include "varasm.h"
49 #include "tree-object-size.h"
50 #include "realmpfr.h"
51 #include "cfgrtl.h"
52 #include "except.h"
53 #include "dojump.h"
54 #include "explow.h"
55 #include "stmt.h"
56 #include "expr.h"
57 #include "libfuncs.h"
58 #include "output.h"
59 #include "typeclass.h"
60 #include "langhooks.h"
61 #include "value-prof.h"
62 #include "builtins.h"
63 #include "asan.h"
64 #include "cilk.h"
65 #include "tree-chkp.h"
66 #include "rtl-chkp.h"
67 #include "internal-fn.h"
68 #include "case-cfn-macros.h"
69 #include "gimple-fold.h"
70 #include "intl.h"
72 struct target_builtins default_target_builtins;
73 #if SWITCHABLE_TARGET
74 struct target_builtins *this_target_builtins = &default_target_builtins;
75 #endif
77 /* Define the names of the builtin function types and codes. */
78 const char *const built_in_class_names[BUILT_IN_LAST]
79 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
81 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
82 const char * built_in_names[(int) END_BUILTINS] =
84 #include "builtins.def"
87 /* Setup an array of builtin_info_type, make sure each element decl is
88 initialized to NULL_TREE. */
89 builtin_info_type builtin_info[(int)END_BUILTINS];
91 /* Non-zero if __builtin_constant_p should be folded right away. */
92 bool force_folding_builtin_constant_p;
94 static rtx c_readstr (const char *, machine_mode);
95 static int target_char_cast (tree, char *);
96 static rtx get_memory_rtx (tree, tree);
97 static int apply_args_size (void);
98 static int apply_result_size (void);
99 static rtx result_vector (int, rtx);
100 static void expand_builtin_prefetch (tree);
101 static rtx expand_builtin_apply_args (void);
102 static rtx expand_builtin_apply_args_1 (void);
103 static rtx expand_builtin_apply (rtx, rtx, rtx);
104 static void expand_builtin_return (rtx);
105 static enum type_class type_to_class (tree);
106 static rtx expand_builtin_classify_type (tree);
107 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
109 static rtx expand_builtin_interclass_mathfn (tree, rtx);
110 static rtx expand_builtin_sincos (tree);
111 static rtx expand_builtin_cexpi (tree, rtx);
112 static rtx expand_builtin_int_roundingfn (tree, rtx);
113 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
114 static rtx expand_builtin_next_arg (void);
115 static rtx expand_builtin_va_start (tree);
116 static rtx expand_builtin_va_end (tree);
117 static rtx expand_builtin_va_copy (tree);
118 static rtx expand_builtin_strcmp (tree, rtx);
119 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
120 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
121 static rtx expand_builtin_memcpy (tree, rtx);
122 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
123 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
124 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
125 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
126 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
127 machine_mode, int, tree);
128 static rtx expand_builtin_strcat (tree, rtx);
129 static rtx expand_builtin_strcpy (tree, rtx);
130 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
131 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
132 static rtx expand_builtin_strncat (tree, rtx);
133 static rtx expand_builtin_strncpy (tree, rtx);
134 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
135 static rtx expand_builtin_memset (tree, rtx, machine_mode);
136 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
137 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
138 static rtx expand_builtin_bzero (tree);
139 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
140 static rtx expand_builtin_alloca (tree, bool);
141 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
142 static rtx expand_builtin_frame_address (tree, tree);
143 static tree stabilize_va_list_loc (location_t, tree, int);
144 static rtx expand_builtin_expect (tree, rtx);
145 static tree fold_builtin_constant_p (tree);
146 static tree fold_builtin_classify_type (tree);
147 static tree fold_builtin_strlen (location_t, tree, tree);
148 static tree fold_builtin_inf (location_t, tree, int);
149 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
150 static bool validate_arg (const_tree, enum tree_code code);
151 static rtx expand_builtin_fabs (tree, rtx, rtx);
152 static rtx expand_builtin_signbit (tree, rtx);
153 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
154 static tree fold_builtin_isascii (location_t, tree);
155 static tree fold_builtin_toascii (location_t, tree);
156 static tree fold_builtin_isdigit (location_t, tree);
157 static tree fold_builtin_fabs (location_t, tree, tree);
158 static tree fold_builtin_abs (location_t, tree, tree);
159 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
160 enum tree_code);
161 static tree fold_builtin_0 (location_t, tree);
162 static tree fold_builtin_1 (location_t, tree, tree);
163 static tree fold_builtin_2 (location_t, tree, tree, tree);
164 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
165 static tree fold_builtin_varargs (location_t, tree, tree*, int);
167 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
168 static tree fold_builtin_strspn (location_t, tree, tree);
169 static tree fold_builtin_strcspn (location_t, tree, tree);
171 static rtx expand_builtin_object_size (tree);
172 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
173 enum built_in_function);
174 static void maybe_emit_chk_warning (tree, enum built_in_function);
175 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
176 static void maybe_emit_free_warning (tree);
177 static tree fold_builtin_object_size (tree, tree);
179 unsigned HOST_WIDE_INT target_newline;
180 unsigned HOST_WIDE_INT target_percent;
181 static unsigned HOST_WIDE_INT target_c;
182 static unsigned HOST_WIDE_INT target_s;
183 char target_percent_c[3];
184 char target_percent_s[3];
185 char target_percent_s_newline[4];
186 static tree do_mpfr_remquo (tree, tree, tree);
187 static tree do_mpfr_lgamma_r (tree, tree, tree);
188 static void expand_builtin_sync_synchronize (void);
190 /* Return true if NAME starts with __builtin_ or __sync_. */
192 static bool
193 is_builtin_name (const char *name)
195 if (strncmp (name, "__builtin_", 10) == 0)
196 return true;
197 if (strncmp (name, "__sync_", 7) == 0)
198 return true;
199 if (strncmp (name, "__atomic_", 9) == 0)
200 return true;
201 if (flag_cilkplus
202 && (!strcmp (name, "__cilkrts_detach")
203 || !strcmp (name, "__cilkrts_pop_frame")))
204 return true;
205 return false;
209 /* Return true if DECL is a function symbol representing a built-in. */
211 bool
212 is_builtin_fn (tree decl)
214 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
217 /* Return true if NODE should be considered for inline expansion regardless
218 of the optimization level. This means whenever a function is invoked with
219 its "internal" name, which normally contains the prefix "__builtin". */
221 bool
222 called_as_built_in (tree node)
224 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
225 we want the name used to call the function, not the name it
226 will have. */
227 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
228 return is_builtin_name (name);
231 /* Compute values M and N such that M divides (address of EXP - N) and such
232 that N < M. If these numbers can be determined, store M in alignp and N in
233 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
234 *alignp and any bit-offset to *bitposp.
236 Note that the address (and thus the alignment) computed here is based
237 on the address to which a symbol resolves, whereas DECL_ALIGN is based
238 on the address at which an object is actually located. These two
239 addresses are not always the same. For example, on ARM targets,
240 the address &foo of a Thumb function foo() has the lowest bit set,
241 whereas foo() itself starts on an even address.
243 If ADDR_P is true we are taking the address of the memory reference EXP
244 and thus cannot rely on the access taking place. */
246 static bool
247 get_object_alignment_2 (tree exp, unsigned int *alignp,
248 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
250 HOST_WIDE_INT bitsize, bitpos;
251 tree offset;
252 machine_mode mode;
253 int unsignedp, reversep, volatilep;
254 unsigned int align = BITS_PER_UNIT;
255 bool known_alignment = false;
257 /* Get the innermost object and the constant (bitpos) and possibly
258 variable (offset) offset of the access. */
259 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
260 &unsignedp, &reversep, &volatilep);
262 /* Extract alignment information from the innermost object and
263 possibly adjust bitpos and offset. */
264 if (TREE_CODE (exp) == FUNCTION_DECL)
266 /* Function addresses can encode extra information besides their
267 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
268 allows the low bit to be used as a virtual bit, we know
269 that the address itself must be at least 2-byte aligned. */
270 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
271 align = 2 * BITS_PER_UNIT;
273 else if (TREE_CODE (exp) == LABEL_DECL)
275 else if (TREE_CODE (exp) == CONST_DECL)
277 /* The alignment of a CONST_DECL is determined by its initializer. */
278 exp = DECL_INITIAL (exp);
279 align = TYPE_ALIGN (TREE_TYPE (exp));
280 if (CONSTANT_CLASS_P (exp))
281 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
283 known_alignment = true;
285 else if (DECL_P (exp))
287 align = DECL_ALIGN (exp);
288 known_alignment = true;
290 else if (TREE_CODE (exp) == INDIRECT_REF
291 || TREE_CODE (exp) == MEM_REF
292 || TREE_CODE (exp) == TARGET_MEM_REF)
294 tree addr = TREE_OPERAND (exp, 0);
295 unsigned ptr_align;
296 unsigned HOST_WIDE_INT ptr_bitpos;
297 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
299 /* If the address is explicitely aligned, handle that. */
300 if (TREE_CODE (addr) == BIT_AND_EXPR
301 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
303 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
304 ptr_bitmask *= BITS_PER_UNIT;
305 align = least_bit_hwi (ptr_bitmask);
306 addr = TREE_OPERAND (addr, 0);
309 known_alignment
310 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
311 align = MAX (ptr_align, align);
313 /* Re-apply explicit alignment to the bitpos. */
314 ptr_bitpos &= ptr_bitmask;
316 /* The alignment of the pointer operand in a TARGET_MEM_REF
317 has to take the variable offset parts into account. */
318 if (TREE_CODE (exp) == TARGET_MEM_REF)
320 if (TMR_INDEX (exp))
322 unsigned HOST_WIDE_INT step = 1;
323 if (TMR_STEP (exp))
324 step = TREE_INT_CST_LOW (TMR_STEP (exp));
325 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
327 if (TMR_INDEX2 (exp))
328 align = BITS_PER_UNIT;
329 known_alignment = false;
332 /* When EXP is an actual memory reference then we can use
333 TYPE_ALIGN of a pointer indirection to derive alignment.
334 Do so only if get_pointer_alignment_1 did not reveal absolute
335 alignment knowledge and if using that alignment would
336 improve the situation. */
337 unsigned int talign;
338 if (!addr_p && !known_alignment
339 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
340 && talign > align)
341 align = talign;
342 else
344 /* Else adjust bitpos accordingly. */
345 bitpos += ptr_bitpos;
346 if (TREE_CODE (exp) == MEM_REF
347 || TREE_CODE (exp) == TARGET_MEM_REF)
348 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
351 else if (TREE_CODE (exp) == STRING_CST)
353 /* STRING_CST are the only constant objects we allow to be not
354 wrapped inside a CONST_DECL. */
355 align = TYPE_ALIGN (TREE_TYPE (exp));
356 if (CONSTANT_CLASS_P (exp))
357 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
359 known_alignment = true;
362 /* If there is a non-constant offset part extract the maximum
363 alignment that can prevail. */
364 if (offset)
366 unsigned int trailing_zeros = tree_ctz (offset);
367 if (trailing_zeros < HOST_BITS_PER_INT)
369 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
370 if (inner)
371 align = MIN (align, inner);
375 *alignp = align;
376 *bitposp = bitpos & (*alignp - 1);
377 return known_alignment;
380 /* For a memory reference expression EXP compute values M and N such that M
381 divides (&EXP - N) and such that N < M. If these numbers can be determined,
382 store M in alignp and N in *BITPOSP and return true. Otherwise return false
383 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
385 bool
386 get_object_alignment_1 (tree exp, unsigned int *alignp,
387 unsigned HOST_WIDE_INT *bitposp)
389 return get_object_alignment_2 (exp, alignp, bitposp, false);
392 /* Return the alignment in bits of EXP, an object. */
394 unsigned int
395 get_object_alignment (tree exp)
397 unsigned HOST_WIDE_INT bitpos = 0;
398 unsigned int align;
400 get_object_alignment_1 (exp, &align, &bitpos);
402 /* align and bitpos now specify known low bits of the pointer.
403 ptr & (align - 1) == bitpos. */
405 if (bitpos != 0)
406 align = least_bit_hwi (bitpos);
407 return align;
410 /* For a pointer valued expression EXP compute values M and N such that M
411 divides (EXP - N) and such that N < M. If these numbers can be determined,
412 store M in alignp and N in *BITPOSP and return true. Return false if
413 the results are just a conservative approximation.
415 If EXP is not a pointer, false is returned too. */
417 bool
418 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
419 unsigned HOST_WIDE_INT *bitposp)
421 STRIP_NOPS (exp);
423 if (TREE_CODE (exp) == ADDR_EXPR)
424 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
425 alignp, bitposp, true);
426 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
428 unsigned int align;
429 unsigned HOST_WIDE_INT bitpos;
430 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
431 &align, &bitpos);
432 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
433 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
434 else
436 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
437 if (trailing_zeros < HOST_BITS_PER_INT)
439 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
440 if (inner)
441 align = MIN (align, inner);
444 *alignp = align;
445 *bitposp = bitpos & (align - 1);
446 return res;
448 else if (TREE_CODE (exp) == SSA_NAME
449 && POINTER_TYPE_P (TREE_TYPE (exp)))
451 unsigned int ptr_align, ptr_misalign;
452 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
454 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
456 *bitposp = ptr_misalign * BITS_PER_UNIT;
457 *alignp = ptr_align * BITS_PER_UNIT;
458 /* Make sure to return a sensible alignment when the multiplication
459 by BITS_PER_UNIT overflowed. */
460 if (*alignp == 0)
461 *alignp = 1u << (HOST_BITS_PER_INT - 1);
462 /* We cannot really tell whether this result is an approximation. */
463 return false;
465 else
467 *bitposp = 0;
468 *alignp = BITS_PER_UNIT;
469 return false;
472 else if (TREE_CODE (exp) == INTEGER_CST)
474 *alignp = BIGGEST_ALIGNMENT;
475 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
476 & (BIGGEST_ALIGNMENT - 1));
477 return true;
480 *bitposp = 0;
481 *alignp = BITS_PER_UNIT;
482 return false;
485 /* Return the alignment in bits of EXP, a pointer valued expression.
486 The alignment returned is, by default, the alignment of the thing that
487 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
489 Otherwise, look at the expression to see if we can do better, i.e., if the
490 expression is actually pointing at an object whose alignment is tighter. */
492 unsigned int
493 get_pointer_alignment (tree exp)
495 unsigned HOST_WIDE_INT bitpos = 0;
496 unsigned int align;
498 get_pointer_alignment_1 (exp, &align, &bitpos);
500 /* align and bitpos now specify known low bits of the pointer.
501 ptr & (align - 1) == bitpos. */
503 if (bitpos != 0)
504 align = least_bit_hwi (bitpos);
506 return align;
509 /* Return the number of non-zero elements in the sequence
510 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
511 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
513 static unsigned
514 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
516 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
518 unsigned n;
520 if (eltsize == 1)
522 /* Optimize the common case of plain char. */
523 for (n = 0; n < maxelts; n++)
525 const char *elt = (const char*) ptr + n;
526 if (!*elt)
527 break;
530 else
532 for (n = 0; n < maxelts; n++)
534 const char *elt = (const char*) ptr + n * eltsize;
535 if (!memcmp (elt, "\0\0\0\0", eltsize))
536 break;
539 return n;
542 /* Compute the length of a null-terminated character string or wide
543 character string handling character sizes of 1, 2, and 4 bytes.
544 TREE_STRING_LENGTH is not the right way because it evaluates to
545 the size of the character array in bytes (as opposed to characters)
546 and because it can contain a zero byte in the middle.
548 ONLY_VALUE should be nonzero if the result is not going to be emitted
549 into the instruction stream and zero if it is going to be expanded.
550 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
551 is returned, otherwise NULL, since
552 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
553 evaluate the side-effects.
555 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
556 accesses. Note that this implies the result is not going to be emitted
557 into the instruction stream.
559 The value returned is of type `ssizetype'.
561 Unfortunately, string_constant can't access the values of const char
562 arrays with initializers, so neither can we do so here. */
564 tree
565 c_strlen (tree src, int only_value)
567 STRIP_NOPS (src);
568 if (TREE_CODE (src) == COND_EXPR
569 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
571 tree len1, len2;
573 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
574 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
575 if (tree_int_cst_equal (len1, len2))
576 return len1;
579 if (TREE_CODE (src) == COMPOUND_EXPR
580 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
581 return c_strlen (TREE_OPERAND (src, 1), only_value);
583 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
585 /* Offset from the beginning of the string in bytes. */
586 tree byteoff;
587 src = string_constant (src, &byteoff);
588 if (src == 0)
589 return NULL_TREE;
591 /* Determine the size of the string element. */
592 unsigned eltsize
593 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src))));
595 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
596 length of SRC. */
597 unsigned maxelts = TREE_STRING_LENGTH (src) / eltsize - 1;
599 /* PTR can point to the byte representation of any string type, including
600 char* and wchar_t*. */
601 const char *ptr = TREE_STRING_POINTER (src);
603 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
605 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
606 compute the offset to the following null if we don't know where to
607 start searching for it. */
608 if (string_length (ptr, eltsize, maxelts) < maxelts)
610 /* Return when an embedded null character is found. */
611 return NULL_TREE;
614 /* We don't know the starting offset, but we do know that the string
615 has no internal zero bytes. We can assume that the offset falls
616 within the bounds of the string; otherwise, the programmer deserves
617 what he gets. Subtract the offset from the length of the string,
618 and return that. This would perhaps not be valid if we were dealing
619 with named arrays in addition to literal string constants. */
621 return size_diffop_loc (loc, size_int (maxelts * eltsize), byteoff);
624 /* Offset from the beginning of the string in elements. */
625 HOST_WIDE_INT eltoff;
627 /* We have a known offset into the string. Start searching there for
628 a null character if we can represent it as a single HOST_WIDE_INT. */
629 if (byteoff == 0)
630 eltoff = 0;
631 else if (! tree_fits_shwi_p (byteoff))
632 eltoff = -1;
633 else
634 eltoff = tree_to_shwi (byteoff) / eltsize;
636 /* If the offset is known to be out of bounds, warn, and call strlen at
637 runtime. */
638 if (eltoff < 0 || eltoff > maxelts)
640 /* Suppress multiple warnings for propagated constant strings. */
641 if (only_value != 2
642 && !TREE_NO_WARNING (src))
644 warning_at (loc, 0, "offset %qwi outside bounds of constant string",
645 eltoff);
646 TREE_NO_WARNING (src) = 1;
648 return NULL_TREE;
651 /* Use strlen to search for the first zero byte. Since any strings
652 constructed with build_string will have nulls appended, we win even
653 if we get handed something like (char[4])"abcd".
655 Since ELTOFF is our starting index into the string, no further
656 calculation is needed. */
657 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
658 maxelts - eltoff);
660 return ssize_int (len);
663 /* Return a constant integer corresponding to target reading
664 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
666 static rtx
667 c_readstr (const char *str, machine_mode mode)
669 HOST_WIDE_INT ch;
670 unsigned int i, j;
671 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
673 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
674 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
675 / HOST_BITS_PER_WIDE_INT;
677 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
678 for (i = 0; i < len; i++)
679 tmp[i] = 0;
681 ch = 1;
682 for (i = 0; i < GET_MODE_SIZE (mode); i++)
684 j = i;
685 if (WORDS_BIG_ENDIAN)
686 j = GET_MODE_SIZE (mode) - i - 1;
687 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
688 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
689 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
690 j *= BITS_PER_UNIT;
692 if (ch)
693 ch = (unsigned char) str[i];
694 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
697 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
698 return immed_wide_int_const (c, mode);
701 /* Cast a target constant CST to target CHAR and if that value fits into
702 host char type, return zero and put that value into variable pointed to by
703 P. */
705 static int
706 target_char_cast (tree cst, char *p)
708 unsigned HOST_WIDE_INT val, hostval;
710 if (TREE_CODE (cst) != INTEGER_CST
711 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
712 return 1;
714 /* Do not care if it fits or not right here. */
715 val = TREE_INT_CST_LOW (cst);
717 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
718 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
720 hostval = val;
721 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
722 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
724 if (val != hostval)
725 return 1;
727 *p = hostval;
728 return 0;
731 /* Similar to save_expr, but assumes that arbitrary code is not executed
732 in between the multiple evaluations. In particular, we assume that a
733 non-addressable local variable will not be modified. */
735 static tree
736 builtin_save_expr (tree exp)
738 if (TREE_CODE (exp) == SSA_NAME
739 || (TREE_ADDRESSABLE (exp) == 0
740 && (TREE_CODE (exp) == PARM_DECL
741 || (VAR_P (exp) && !TREE_STATIC (exp)))))
742 return exp;
744 return save_expr (exp);
747 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
748 times to get the address of either a higher stack frame, or a return
749 address located within it (depending on FNDECL_CODE). */
751 static rtx
752 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
754 int i;
755 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
756 if (tem == NULL_RTX)
758 /* For a zero count with __builtin_return_address, we don't care what
759 frame address we return, because target-specific definitions will
760 override us. Therefore frame pointer elimination is OK, and using
761 the soft frame pointer is OK.
763 For a nonzero count, or a zero count with __builtin_frame_address,
764 we require a stable offset from the current frame pointer to the
765 previous one, so we must use the hard frame pointer, and
766 we must disable frame pointer elimination. */
767 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
768 tem = frame_pointer_rtx;
769 else
771 tem = hard_frame_pointer_rtx;
773 /* Tell reload not to eliminate the frame pointer. */
774 crtl->accesses_prior_frames = 1;
778 if (count > 0)
779 SETUP_FRAME_ADDRESSES ();
781 /* On the SPARC, the return address is not in the frame, it is in a
782 register. There is no way to access it off of the current frame
783 pointer, but it can be accessed off the previous frame pointer by
784 reading the value from the register window save area. */
785 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
786 count--;
788 /* Scan back COUNT frames to the specified frame. */
789 for (i = 0; i < count; i++)
791 /* Assume the dynamic chain pointer is in the word that the
792 frame address points to, unless otherwise specified. */
793 tem = DYNAMIC_CHAIN_ADDRESS (tem);
794 tem = memory_address (Pmode, tem);
795 tem = gen_frame_mem (Pmode, tem);
796 tem = copy_to_reg (tem);
799 /* For __builtin_frame_address, return what we've got. But, on
800 the SPARC for example, we may have to add a bias. */
801 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
802 return FRAME_ADDR_RTX (tem);
804 /* For __builtin_return_address, get the return address from that frame. */
805 #ifdef RETURN_ADDR_RTX
806 tem = RETURN_ADDR_RTX (count, tem);
807 #else
808 tem = memory_address (Pmode,
809 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
810 tem = gen_frame_mem (Pmode, tem);
811 #endif
812 return tem;
815 /* Alias set used for setjmp buffer. */
816 static alias_set_type setjmp_alias_set = -1;
818 /* Construct the leading half of a __builtin_setjmp call. Control will
819 return to RECEIVER_LABEL. This is also called directly by the SJLJ
820 exception handling code. */
822 void
823 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
825 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
826 rtx stack_save;
827 rtx mem;
829 if (setjmp_alias_set == -1)
830 setjmp_alias_set = new_alias_set ();
832 buf_addr = convert_memory_address (Pmode, buf_addr);
834 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
836 /* We store the frame pointer and the address of receiver_label in
837 the buffer and use the rest of it for the stack save area, which
838 is machine-dependent. */
840 mem = gen_rtx_MEM (Pmode, buf_addr);
841 set_mem_alias_set (mem, setjmp_alias_set);
842 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
844 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
845 GET_MODE_SIZE (Pmode))),
846 set_mem_alias_set (mem, setjmp_alias_set);
848 emit_move_insn (validize_mem (mem),
849 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
851 stack_save = gen_rtx_MEM (sa_mode,
852 plus_constant (Pmode, buf_addr,
853 2 * GET_MODE_SIZE (Pmode)));
854 set_mem_alias_set (stack_save, setjmp_alias_set);
855 emit_stack_save (SAVE_NONLOCAL, &stack_save);
857 /* If there is further processing to do, do it. */
858 if (targetm.have_builtin_setjmp_setup ())
859 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
861 /* We have a nonlocal label. */
862 cfun->has_nonlocal_label = 1;
865 /* Construct the trailing part of a __builtin_setjmp call. This is
866 also called directly by the SJLJ exception handling code.
867 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
869 void
870 expand_builtin_setjmp_receiver (rtx receiver_label)
872 rtx chain;
874 /* Mark the FP as used when we get here, so we have to make sure it's
875 marked as used by this function. */
876 emit_use (hard_frame_pointer_rtx);
878 /* Mark the static chain as clobbered here so life information
879 doesn't get messed up for it. */
880 chain = targetm.calls.static_chain (current_function_decl, true);
881 if (chain && REG_P (chain))
882 emit_clobber (chain);
884 /* Now put in the code to restore the frame pointer, and argument
885 pointer, if needed. */
886 if (! targetm.have_nonlocal_goto ())
888 /* First adjust our frame pointer to its actual value. It was
889 previously set to the start of the virtual area corresponding to
890 the stacked variables when we branched here and now needs to be
891 adjusted to the actual hardware fp value.
893 Assignments to virtual registers are converted by
894 instantiate_virtual_regs into the corresponding assignment
895 to the underlying register (fp in this case) that makes
896 the original assignment true.
897 So the following insn will actually be decrementing fp by
898 STARTING_FRAME_OFFSET. */
899 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
901 /* Restoring the frame pointer also modifies the hard frame pointer.
902 Mark it used (so that the previous assignment remains live once
903 the frame pointer is eliminated) and clobbered (to represent the
904 implicit update from the assignment). */
905 emit_use (hard_frame_pointer_rtx);
906 emit_clobber (hard_frame_pointer_rtx);
909 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
911 /* If the argument pointer can be eliminated in favor of the
912 frame pointer, we don't need to restore it. We assume here
913 that if such an elimination is present, it can always be used.
914 This is the case on all known machines; if we don't make this
915 assumption, we do unnecessary saving on many machines. */
916 size_t i;
917 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
919 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
920 if (elim_regs[i].from == ARG_POINTER_REGNUM
921 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
922 break;
924 if (i == ARRAY_SIZE (elim_regs))
926 /* Now restore our arg pointer from the address at which it
927 was saved in our stack frame. */
928 emit_move_insn (crtl->args.internal_arg_pointer,
929 copy_to_reg (get_arg_pointer_save_area ()));
933 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
934 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
935 else if (targetm.have_nonlocal_goto_receiver ())
936 emit_insn (targetm.gen_nonlocal_goto_receiver ());
937 else
938 { /* Nothing */ }
940 /* We must not allow the code we just generated to be reordered by
941 scheduling. Specifically, the update of the frame pointer must
942 happen immediately, not later. */
943 emit_insn (gen_blockage ());
946 /* __builtin_longjmp is passed a pointer to an array of five words (not
947 all will be used on all machines). It operates similarly to the C
948 library function of the same name, but is more efficient. Much of
949 the code below is copied from the handling of non-local gotos. */
951 static void
952 expand_builtin_longjmp (rtx buf_addr, rtx value)
954 rtx fp, lab, stack;
955 rtx_insn *insn, *last;
956 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
958 /* DRAP is needed for stack realign if longjmp is expanded to current
959 function */
960 if (SUPPORTS_STACK_ALIGNMENT)
961 crtl->need_drap = true;
963 if (setjmp_alias_set == -1)
964 setjmp_alias_set = new_alias_set ();
966 buf_addr = convert_memory_address (Pmode, buf_addr);
968 buf_addr = force_reg (Pmode, buf_addr);
970 /* We require that the user must pass a second argument of 1, because
971 that is what builtin_setjmp will return. */
972 gcc_assert (value == const1_rtx);
974 last = get_last_insn ();
975 if (targetm.have_builtin_longjmp ())
976 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
977 else
979 fp = gen_rtx_MEM (Pmode, buf_addr);
980 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
981 GET_MODE_SIZE (Pmode)));
983 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
984 2 * GET_MODE_SIZE (Pmode)));
985 set_mem_alias_set (fp, setjmp_alias_set);
986 set_mem_alias_set (lab, setjmp_alias_set);
987 set_mem_alias_set (stack, setjmp_alias_set);
989 /* Pick up FP, label, and SP from the block and jump. This code is
990 from expand_goto in stmt.c; see there for detailed comments. */
991 if (targetm.have_nonlocal_goto ())
992 /* We have to pass a value to the nonlocal_goto pattern that will
993 get copied into the static_chain pointer, but it does not matter
994 what that value is, because builtin_setjmp does not use it. */
995 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
996 else
998 lab = copy_to_reg (lab);
1000 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1001 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1003 emit_move_insn (hard_frame_pointer_rtx, fp);
1004 emit_stack_restore (SAVE_NONLOCAL, stack);
1006 emit_use (hard_frame_pointer_rtx);
1007 emit_use (stack_pointer_rtx);
1008 emit_indirect_jump (lab);
1012 /* Search backwards and mark the jump insn as a non-local goto.
1013 Note that this precludes the use of __builtin_longjmp to a
1014 __builtin_setjmp target in the same function. However, we've
1015 already cautioned the user that these functions are for
1016 internal exception handling use only. */
1017 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1019 gcc_assert (insn != last);
1021 if (JUMP_P (insn))
1023 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1024 break;
1026 else if (CALL_P (insn))
1027 break;
1031 static inline bool
1032 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1034 return (iter->i < iter->n);
1037 /* This function validates the types of a function call argument list
1038 against a specified list of tree_codes. If the last specifier is a 0,
1039 that represents an ellipsis, otherwise the last specifier must be a
1040 VOID_TYPE. */
1042 static bool
1043 validate_arglist (const_tree callexpr, ...)
1045 enum tree_code code;
1046 bool res = 0;
1047 va_list ap;
1048 const_call_expr_arg_iterator iter;
1049 const_tree arg;
1051 va_start (ap, callexpr);
1052 init_const_call_expr_arg_iterator (callexpr, &iter);
1054 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1055 tree fn = CALL_EXPR_FN (callexpr);
1056 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1058 for (unsigned argno = 1; ; ++argno)
1060 code = (enum tree_code) va_arg (ap, int);
1062 switch (code)
1064 case 0:
1065 /* This signifies an ellipses, any further arguments are all ok. */
1066 res = true;
1067 goto end;
1068 case VOID_TYPE:
1069 /* This signifies an endlink, if no arguments remain, return
1070 true, otherwise return false. */
1071 res = !more_const_call_expr_args_p (&iter);
1072 goto end;
1073 case POINTER_TYPE:
1074 /* The actual argument must be nonnull when either the whole
1075 called function has been declared nonnull, or when the formal
1076 argument corresponding to the actual argument has been. */
1077 if (argmap
1078 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1080 arg = next_const_call_expr_arg (&iter);
1081 if (!validate_arg (arg, code) || integer_zerop (arg))
1082 goto end;
1083 break;
1085 /* FALLTHRU */
1086 default:
1087 /* If no parameters remain or the parameter's code does not
1088 match the specified code, return false. Otherwise continue
1089 checking any remaining arguments. */
1090 arg = next_const_call_expr_arg (&iter);
1091 if (!validate_arg (arg, code))
1092 goto end;
1093 break;
1097 /* We need gotos here since we can only have one VA_CLOSE in a
1098 function. */
1099 end: ;
1100 va_end (ap);
1102 BITMAP_FREE (argmap);
1104 return res;
1107 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1108 and the address of the save area. */
1110 static rtx
1111 expand_builtin_nonlocal_goto (tree exp)
1113 tree t_label, t_save_area;
1114 rtx r_label, r_save_area, r_fp, r_sp;
1115 rtx_insn *insn;
1117 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1118 return NULL_RTX;
1120 t_label = CALL_EXPR_ARG (exp, 0);
1121 t_save_area = CALL_EXPR_ARG (exp, 1);
1123 r_label = expand_normal (t_label);
1124 r_label = convert_memory_address (Pmode, r_label);
1125 r_save_area = expand_normal (t_save_area);
1126 r_save_area = convert_memory_address (Pmode, r_save_area);
1127 /* Copy the address of the save location to a register just in case it was
1128 based on the frame pointer. */
1129 r_save_area = copy_to_reg (r_save_area);
1130 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1131 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1132 plus_constant (Pmode, r_save_area,
1133 GET_MODE_SIZE (Pmode)));
1135 crtl->has_nonlocal_goto = 1;
1137 /* ??? We no longer need to pass the static chain value, afaik. */
1138 if (targetm.have_nonlocal_goto ())
1139 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1140 else
1142 r_label = copy_to_reg (r_label);
1144 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1145 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1147 /* Restore frame pointer for containing function. */
1148 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1149 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1151 /* USE of hard_frame_pointer_rtx added for consistency;
1152 not clear if really needed. */
1153 emit_use (hard_frame_pointer_rtx);
1154 emit_use (stack_pointer_rtx);
1156 /* If the architecture is using a GP register, we must
1157 conservatively assume that the target function makes use of it.
1158 The prologue of functions with nonlocal gotos must therefore
1159 initialize the GP register to the appropriate value, and we
1160 must then make sure that this value is live at the point
1161 of the jump. (Note that this doesn't necessarily apply
1162 to targets with a nonlocal_goto pattern; they are free
1163 to implement it in their own way. Note also that this is
1164 a no-op if the GP register is a global invariant.) */
1165 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1166 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1167 emit_use (pic_offset_table_rtx);
1169 emit_indirect_jump (r_label);
1172 /* Search backwards to the jump insn and mark it as a
1173 non-local goto. */
1174 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1176 if (JUMP_P (insn))
1178 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1179 break;
1181 else if (CALL_P (insn))
1182 break;
1185 return const0_rtx;
1188 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1189 (not all will be used on all machines) that was passed to __builtin_setjmp.
1190 It updates the stack pointer in that block to the current value. This is
1191 also called directly by the SJLJ exception handling code. */
1193 void
1194 expand_builtin_update_setjmp_buf (rtx buf_addr)
1196 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1197 rtx stack_save
1198 = gen_rtx_MEM (sa_mode,
1199 memory_address
1200 (sa_mode,
1201 plus_constant (Pmode, buf_addr,
1202 2 * GET_MODE_SIZE (Pmode))));
1204 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1207 /* Expand a call to __builtin_prefetch. For a target that does not support
1208 data prefetch, evaluate the memory address argument in case it has side
1209 effects. */
1211 static void
1212 expand_builtin_prefetch (tree exp)
1214 tree arg0, arg1, arg2;
1215 int nargs;
1216 rtx op0, op1, op2;
1218 if (!validate_arglist (exp, POINTER_TYPE, 0))
1219 return;
1221 arg0 = CALL_EXPR_ARG (exp, 0);
1223 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1224 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1225 locality). */
1226 nargs = call_expr_nargs (exp);
1227 if (nargs > 1)
1228 arg1 = CALL_EXPR_ARG (exp, 1);
1229 else
1230 arg1 = integer_zero_node;
1231 if (nargs > 2)
1232 arg2 = CALL_EXPR_ARG (exp, 2);
1233 else
1234 arg2 = integer_three_node;
1236 /* Argument 0 is an address. */
1237 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1239 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1240 if (TREE_CODE (arg1) != INTEGER_CST)
1242 error ("second argument to %<__builtin_prefetch%> must be a constant");
1243 arg1 = integer_zero_node;
1245 op1 = expand_normal (arg1);
1246 /* Argument 1 must be either zero or one. */
1247 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1249 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1250 " using zero");
1251 op1 = const0_rtx;
1254 /* Argument 2 (locality) must be a compile-time constant int. */
1255 if (TREE_CODE (arg2) != INTEGER_CST)
1257 error ("third argument to %<__builtin_prefetch%> must be a constant");
1258 arg2 = integer_zero_node;
1260 op2 = expand_normal (arg2);
1261 /* Argument 2 must be 0, 1, 2, or 3. */
1262 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1264 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1265 op2 = const0_rtx;
1268 if (targetm.have_prefetch ())
1270 struct expand_operand ops[3];
1272 create_address_operand (&ops[0], op0);
1273 create_integer_operand (&ops[1], INTVAL (op1));
1274 create_integer_operand (&ops[2], INTVAL (op2));
1275 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1276 return;
1279 /* Don't do anything with direct references to volatile memory, but
1280 generate code to handle other side effects. */
1281 if (!MEM_P (op0) && side_effects_p (op0))
1282 emit_insn (op0);
1285 /* Get a MEM rtx for expression EXP which is the address of an operand
1286 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1287 the maximum length of the block of memory that might be accessed or
1288 NULL if unknown. */
1290 static rtx
1291 get_memory_rtx (tree exp, tree len)
1293 tree orig_exp = exp;
1294 rtx addr, mem;
1296 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1297 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1298 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1299 exp = TREE_OPERAND (exp, 0);
1301 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1302 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1304 /* Get an expression we can use to find the attributes to assign to MEM.
1305 First remove any nops. */
1306 while (CONVERT_EXPR_P (exp)
1307 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1308 exp = TREE_OPERAND (exp, 0);
1310 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1311 (as builtin stringops may alias with anything). */
1312 exp = fold_build2 (MEM_REF,
1313 build_array_type (char_type_node,
1314 build_range_type (sizetype,
1315 size_one_node, len)),
1316 exp, build_int_cst (ptr_type_node, 0));
1318 /* If the MEM_REF has no acceptable address, try to get the base object
1319 from the original address we got, and build an all-aliasing
1320 unknown-sized access to that one. */
1321 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1322 set_mem_attributes (mem, exp, 0);
1323 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1324 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1325 0))))
1327 exp = build_fold_addr_expr (exp);
1328 exp = fold_build2 (MEM_REF,
1329 build_array_type (char_type_node,
1330 build_range_type (sizetype,
1331 size_zero_node,
1332 NULL)),
1333 exp, build_int_cst (ptr_type_node, 0));
1334 set_mem_attributes (mem, exp, 0);
1336 set_mem_alias_set (mem, 0);
1337 return mem;
1340 /* Built-in functions to perform an untyped call and return. */
1342 #define apply_args_mode \
1343 (this_target_builtins->x_apply_args_mode)
1344 #define apply_result_mode \
1345 (this_target_builtins->x_apply_result_mode)
1347 /* Return the size required for the block returned by __builtin_apply_args,
1348 and initialize apply_args_mode. */
1350 static int
1351 apply_args_size (void)
1353 static int size = -1;
1354 int align;
1355 unsigned int regno;
1356 machine_mode mode;
1358 /* The values computed by this function never change. */
1359 if (size < 0)
1361 /* The first value is the incoming arg-pointer. */
1362 size = GET_MODE_SIZE (Pmode);
1364 /* The second value is the structure value address unless this is
1365 passed as an "invisible" first argument. */
1366 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1367 size += GET_MODE_SIZE (Pmode);
1369 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1370 if (FUNCTION_ARG_REGNO_P (regno))
1372 mode = targetm.calls.get_raw_arg_mode (regno);
1374 gcc_assert (mode != VOIDmode);
1376 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1377 if (size % align != 0)
1378 size = CEIL (size, align) * align;
1379 size += GET_MODE_SIZE (mode);
1380 apply_args_mode[regno] = mode;
1382 else
1384 apply_args_mode[regno] = VOIDmode;
1387 return size;
1390 /* Return the size required for the block returned by __builtin_apply,
1391 and initialize apply_result_mode. */
1393 static int
1394 apply_result_size (void)
1396 static int size = -1;
1397 int align, regno;
1398 machine_mode mode;
1400 /* The values computed by this function never change. */
1401 if (size < 0)
1403 size = 0;
1405 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1406 if (targetm.calls.function_value_regno_p (regno))
1408 mode = targetm.calls.get_raw_result_mode (regno);
1410 gcc_assert (mode != VOIDmode);
1412 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1413 if (size % align != 0)
1414 size = CEIL (size, align) * align;
1415 size += GET_MODE_SIZE (mode);
1416 apply_result_mode[regno] = mode;
1418 else
1419 apply_result_mode[regno] = VOIDmode;
1421 /* Allow targets that use untyped_call and untyped_return to override
1422 the size so that machine-specific information can be stored here. */
1423 #ifdef APPLY_RESULT_SIZE
1424 size = APPLY_RESULT_SIZE;
1425 #endif
1427 return size;
1430 /* Create a vector describing the result block RESULT. If SAVEP is true,
1431 the result block is used to save the values; otherwise it is used to
1432 restore the values. */
1434 static rtx
1435 result_vector (int savep, rtx result)
1437 int regno, size, align, nelts;
1438 machine_mode mode;
1439 rtx reg, mem;
1440 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1442 size = nelts = 0;
1443 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1444 if ((mode = apply_result_mode[regno]) != VOIDmode)
1446 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1447 if (size % align != 0)
1448 size = CEIL (size, align) * align;
1449 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1450 mem = adjust_address (result, mode, size);
1451 savevec[nelts++] = (savep
1452 ? gen_rtx_SET (mem, reg)
1453 : gen_rtx_SET (reg, mem));
1454 size += GET_MODE_SIZE (mode);
1456 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1459 /* Save the state required to perform an untyped call with the same
1460 arguments as were passed to the current function. */
1462 static rtx
1463 expand_builtin_apply_args_1 (void)
1465 rtx registers, tem;
1466 int size, align, regno;
1467 machine_mode mode;
1468 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1470 /* Create a block where the arg-pointer, structure value address,
1471 and argument registers can be saved. */
1472 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1474 /* Walk past the arg-pointer and structure value address. */
1475 size = GET_MODE_SIZE (Pmode);
1476 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1477 size += GET_MODE_SIZE (Pmode);
1479 /* Save each register used in calling a function to the block. */
1480 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1481 if ((mode = apply_args_mode[regno]) != VOIDmode)
1483 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1484 if (size % align != 0)
1485 size = CEIL (size, align) * align;
1487 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1489 emit_move_insn (adjust_address (registers, mode, size), tem);
1490 size += GET_MODE_SIZE (mode);
1493 /* Save the arg pointer to the block. */
1494 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1495 /* We need the pointer as the caller actually passed them to us, not
1496 as we might have pretended they were passed. Make sure it's a valid
1497 operand, as emit_move_insn isn't expected to handle a PLUS. */
1498 if (STACK_GROWS_DOWNWARD)
1500 = force_operand (plus_constant (Pmode, tem,
1501 crtl->args.pretend_args_size),
1502 NULL_RTX);
1503 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1505 size = GET_MODE_SIZE (Pmode);
1507 /* Save the structure value address unless this is passed as an
1508 "invisible" first argument. */
1509 if (struct_incoming_value)
1511 emit_move_insn (adjust_address (registers, Pmode, size),
1512 copy_to_reg (struct_incoming_value));
1513 size += GET_MODE_SIZE (Pmode);
1516 /* Return the address of the block. */
1517 return copy_addr_to_reg (XEXP (registers, 0));
1520 /* __builtin_apply_args returns block of memory allocated on
1521 the stack into which is stored the arg pointer, structure
1522 value address, static chain, and all the registers that might
1523 possibly be used in performing a function call. The code is
1524 moved to the start of the function so the incoming values are
1525 saved. */
1527 static rtx
1528 expand_builtin_apply_args (void)
1530 /* Don't do __builtin_apply_args more than once in a function.
1531 Save the result of the first call and reuse it. */
1532 if (apply_args_value != 0)
1533 return apply_args_value;
1535 /* When this function is called, it means that registers must be
1536 saved on entry to this function. So we migrate the
1537 call to the first insn of this function. */
1538 rtx temp;
1540 start_sequence ();
1541 temp = expand_builtin_apply_args_1 ();
1542 rtx_insn *seq = get_insns ();
1543 end_sequence ();
1545 apply_args_value = temp;
1547 /* Put the insns after the NOTE that starts the function.
1548 If this is inside a start_sequence, make the outer-level insn
1549 chain current, so the code is placed at the start of the
1550 function. If internal_arg_pointer is a non-virtual pseudo,
1551 it needs to be placed after the function that initializes
1552 that pseudo. */
1553 push_topmost_sequence ();
1554 if (REG_P (crtl->args.internal_arg_pointer)
1555 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1556 emit_insn_before (seq, parm_birth_insn);
1557 else
1558 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1559 pop_topmost_sequence ();
1560 return temp;
1564 /* Perform an untyped call and save the state required to perform an
1565 untyped return of whatever value was returned by the given function. */
1567 static rtx
1568 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1570 int size, align, regno;
1571 machine_mode mode;
1572 rtx incoming_args, result, reg, dest, src;
1573 rtx_call_insn *call_insn;
1574 rtx old_stack_level = 0;
1575 rtx call_fusage = 0;
1576 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1578 arguments = convert_memory_address (Pmode, arguments);
1580 /* Create a block where the return registers can be saved. */
1581 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1583 /* Fetch the arg pointer from the ARGUMENTS block. */
1584 incoming_args = gen_reg_rtx (Pmode);
1585 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1586 if (!STACK_GROWS_DOWNWARD)
1587 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1588 incoming_args, 0, OPTAB_LIB_WIDEN);
1590 /* Push a new argument block and copy the arguments. Do not allow
1591 the (potential) memcpy call below to interfere with our stack
1592 manipulations. */
1593 do_pending_stack_adjust ();
1594 NO_DEFER_POP;
1596 /* Save the stack with nonlocal if available. */
1597 if (targetm.have_save_stack_nonlocal ())
1598 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1599 else
1600 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1602 /* Allocate a block of memory onto the stack and copy the memory
1603 arguments to the outgoing arguments address. We can pass TRUE
1604 as the 4th argument because we just saved the stack pointer
1605 and will restore it right after the call. */
1606 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1608 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1609 may have already set current_function_calls_alloca to true.
1610 current_function_calls_alloca won't be set if argsize is zero,
1611 so we have to guarantee need_drap is true here. */
1612 if (SUPPORTS_STACK_ALIGNMENT)
1613 crtl->need_drap = true;
1615 dest = virtual_outgoing_args_rtx;
1616 if (!STACK_GROWS_DOWNWARD)
1618 if (CONST_INT_P (argsize))
1619 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1620 else
1621 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1623 dest = gen_rtx_MEM (BLKmode, dest);
1624 set_mem_align (dest, PARM_BOUNDARY);
1625 src = gen_rtx_MEM (BLKmode, incoming_args);
1626 set_mem_align (src, PARM_BOUNDARY);
1627 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1629 /* Refer to the argument block. */
1630 apply_args_size ();
1631 arguments = gen_rtx_MEM (BLKmode, arguments);
1632 set_mem_align (arguments, PARM_BOUNDARY);
1634 /* Walk past the arg-pointer and structure value address. */
1635 size = GET_MODE_SIZE (Pmode);
1636 if (struct_value)
1637 size += GET_MODE_SIZE (Pmode);
1639 /* Restore each of the registers previously saved. Make USE insns
1640 for each of these registers for use in making the call. */
1641 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1642 if ((mode = apply_args_mode[regno]) != VOIDmode)
1644 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1645 if (size % align != 0)
1646 size = CEIL (size, align) * align;
1647 reg = gen_rtx_REG (mode, regno);
1648 emit_move_insn (reg, adjust_address (arguments, mode, size));
1649 use_reg (&call_fusage, reg);
1650 size += GET_MODE_SIZE (mode);
1653 /* Restore the structure value address unless this is passed as an
1654 "invisible" first argument. */
1655 size = GET_MODE_SIZE (Pmode);
1656 if (struct_value)
1658 rtx value = gen_reg_rtx (Pmode);
1659 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1660 emit_move_insn (struct_value, value);
1661 if (REG_P (struct_value))
1662 use_reg (&call_fusage, struct_value);
1663 size += GET_MODE_SIZE (Pmode);
1666 /* All arguments and registers used for the call are set up by now! */
1667 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1669 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1670 and we don't want to load it into a register as an optimization,
1671 because prepare_call_address already did it if it should be done. */
1672 if (GET_CODE (function) != SYMBOL_REF)
1673 function = memory_address (FUNCTION_MODE, function);
1675 /* Generate the actual call instruction and save the return value. */
1676 if (targetm.have_untyped_call ())
1678 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1679 emit_call_insn (targetm.gen_untyped_call (mem, result,
1680 result_vector (1, result)));
1682 else if (targetm.have_call_value ())
1684 rtx valreg = 0;
1686 /* Locate the unique return register. It is not possible to
1687 express a call that sets more than one return register using
1688 call_value; use untyped_call for that. In fact, untyped_call
1689 only needs to save the return registers in the given block. */
1690 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1691 if ((mode = apply_result_mode[regno]) != VOIDmode)
1693 gcc_assert (!valreg); /* have_untyped_call required. */
1695 valreg = gen_rtx_REG (mode, regno);
1698 emit_insn (targetm.gen_call_value (valreg,
1699 gen_rtx_MEM (FUNCTION_MODE, function),
1700 const0_rtx, NULL_RTX, const0_rtx));
1702 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1704 else
1705 gcc_unreachable ();
1707 /* Find the CALL insn we just emitted, and attach the register usage
1708 information. */
1709 call_insn = last_call_insn ();
1710 add_function_usage_to (call_insn, call_fusage);
1712 /* Restore the stack. */
1713 if (targetm.have_save_stack_nonlocal ())
1714 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1715 else
1716 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1717 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1719 OK_DEFER_POP;
1721 /* Return the address of the result block. */
1722 result = copy_addr_to_reg (XEXP (result, 0));
1723 return convert_memory_address (ptr_mode, result);
1726 /* Perform an untyped return. */
1728 static void
1729 expand_builtin_return (rtx result)
1731 int size, align, regno;
1732 machine_mode mode;
1733 rtx reg;
1734 rtx_insn *call_fusage = 0;
1736 result = convert_memory_address (Pmode, result);
1738 apply_result_size ();
1739 result = gen_rtx_MEM (BLKmode, result);
1741 if (targetm.have_untyped_return ())
1743 rtx vector = result_vector (0, result);
1744 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1745 emit_barrier ();
1746 return;
1749 /* Restore the return value and note that each value is used. */
1750 size = 0;
1751 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1752 if ((mode = apply_result_mode[regno]) != VOIDmode)
1754 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1755 if (size % align != 0)
1756 size = CEIL (size, align) * align;
1757 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1758 emit_move_insn (reg, adjust_address (result, mode, size));
1760 push_to_sequence (call_fusage);
1761 emit_use (reg);
1762 call_fusage = get_insns ();
1763 end_sequence ();
1764 size += GET_MODE_SIZE (mode);
1767 /* Put the USE insns before the return. */
1768 emit_insn (call_fusage);
1770 /* Return whatever values was restored by jumping directly to the end
1771 of the function. */
1772 expand_naked_return ();
1775 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1777 static enum type_class
1778 type_to_class (tree type)
1780 switch (TREE_CODE (type))
1782 case VOID_TYPE: return void_type_class;
1783 case INTEGER_TYPE: return integer_type_class;
1784 case ENUMERAL_TYPE: return enumeral_type_class;
1785 case BOOLEAN_TYPE: return boolean_type_class;
1786 case POINTER_TYPE: return pointer_type_class;
1787 case REFERENCE_TYPE: return reference_type_class;
1788 case OFFSET_TYPE: return offset_type_class;
1789 case REAL_TYPE: return real_type_class;
1790 case COMPLEX_TYPE: return complex_type_class;
1791 case FUNCTION_TYPE: return function_type_class;
1792 case METHOD_TYPE: return method_type_class;
1793 case RECORD_TYPE: return record_type_class;
1794 case UNION_TYPE:
1795 case QUAL_UNION_TYPE: return union_type_class;
1796 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1797 ? string_type_class : array_type_class);
1798 case LANG_TYPE: return lang_type_class;
1799 default: return no_type_class;
1803 /* Expand a call EXP to __builtin_classify_type. */
1805 static rtx
1806 expand_builtin_classify_type (tree exp)
1808 if (call_expr_nargs (exp))
1809 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1810 return GEN_INT (no_type_class);
1813 /* This helper macro, meant to be used in mathfn_built_in below,
1814 determines which among a set of three builtin math functions is
1815 appropriate for a given type mode. The `F' and `L' cases are
1816 automatically generated from the `double' case. */
1817 #define CASE_MATHFN(MATHFN) \
1818 CASE_CFN_##MATHFN: \
1819 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1820 fcodel = BUILT_IN_##MATHFN##L ; break;
1821 /* Similar to above, but appends _R after any F/L suffix. */
1822 #define CASE_MATHFN_REENT(MATHFN) \
1823 case CFN_BUILT_IN_##MATHFN##_R: \
1824 case CFN_BUILT_IN_##MATHFN##F_R: \
1825 case CFN_BUILT_IN_##MATHFN##L_R: \
1826 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1827 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1829 /* Return a function equivalent to FN but operating on floating-point
1830 values of type TYPE, or END_BUILTINS if no such function exists.
1831 This is purely an operation on function codes; it does not guarantee
1832 that the target actually has an implementation of the function. */
1834 static built_in_function
1835 mathfn_built_in_2 (tree type, combined_fn fn)
1837 built_in_function fcode, fcodef, fcodel;
1839 switch (fn)
1841 CASE_MATHFN (ACOS)
1842 CASE_MATHFN (ACOSH)
1843 CASE_MATHFN (ASIN)
1844 CASE_MATHFN (ASINH)
1845 CASE_MATHFN (ATAN)
1846 CASE_MATHFN (ATAN2)
1847 CASE_MATHFN (ATANH)
1848 CASE_MATHFN (CBRT)
1849 CASE_MATHFN (CEIL)
1850 CASE_MATHFN (CEXPI)
1851 CASE_MATHFN (COPYSIGN)
1852 CASE_MATHFN (COS)
1853 CASE_MATHFN (COSH)
1854 CASE_MATHFN (DREM)
1855 CASE_MATHFN (ERF)
1856 CASE_MATHFN (ERFC)
1857 CASE_MATHFN (EXP)
1858 CASE_MATHFN (EXP10)
1859 CASE_MATHFN (EXP2)
1860 CASE_MATHFN (EXPM1)
1861 CASE_MATHFN (FABS)
1862 CASE_MATHFN (FDIM)
1863 CASE_MATHFN (FLOOR)
1864 CASE_MATHFN (FMA)
1865 CASE_MATHFN (FMAX)
1866 CASE_MATHFN (FMIN)
1867 CASE_MATHFN (FMOD)
1868 CASE_MATHFN (FREXP)
1869 CASE_MATHFN (GAMMA)
1870 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1871 CASE_MATHFN (HUGE_VAL)
1872 CASE_MATHFN (HYPOT)
1873 CASE_MATHFN (ILOGB)
1874 CASE_MATHFN (ICEIL)
1875 CASE_MATHFN (IFLOOR)
1876 CASE_MATHFN (INF)
1877 CASE_MATHFN (IRINT)
1878 CASE_MATHFN (IROUND)
1879 CASE_MATHFN (ISINF)
1880 CASE_MATHFN (J0)
1881 CASE_MATHFN (J1)
1882 CASE_MATHFN (JN)
1883 CASE_MATHFN (LCEIL)
1884 CASE_MATHFN (LDEXP)
1885 CASE_MATHFN (LFLOOR)
1886 CASE_MATHFN (LGAMMA)
1887 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1888 CASE_MATHFN (LLCEIL)
1889 CASE_MATHFN (LLFLOOR)
1890 CASE_MATHFN (LLRINT)
1891 CASE_MATHFN (LLROUND)
1892 CASE_MATHFN (LOG)
1893 CASE_MATHFN (LOG10)
1894 CASE_MATHFN (LOG1P)
1895 CASE_MATHFN (LOG2)
1896 CASE_MATHFN (LOGB)
1897 CASE_MATHFN (LRINT)
1898 CASE_MATHFN (LROUND)
1899 CASE_MATHFN (MODF)
1900 CASE_MATHFN (NAN)
1901 CASE_MATHFN (NANS)
1902 CASE_MATHFN (NEARBYINT)
1903 CASE_MATHFN (NEXTAFTER)
1904 CASE_MATHFN (NEXTTOWARD)
1905 CASE_MATHFN (POW)
1906 CASE_MATHFN (POWI)
1907 CASE_MATHFN (POW10)
1908 CASE_MATHFN (REMAINDER)
1909 CASE_MATHFN (REMQUO)
1910 CASE_MATHFN (RINT)
1911 CASE_MATHFN (ROUND)
1912 CASE_MATHFN (SCALB)
1913 CASE_MATHFN (SCALBLN)
1914 CASE_MATHFN (SCALBN)
1915 CASE_MATHFN (SIGNBIT)
1916 CASE_MATHFN (SIGNIFICAND)
1917 CASE_MATHFN (SIN)
1918 CASE_MATHFN (SINCOS)
1919 CASE_MATHFN (SINH)
1920 CASE_MATHFN (SQRT)
1921 CASE_MATHFN (TAN)
1922 CASE_MATHFN (TANH)
1923 CASE_MATHFN (TGAMMA)
1924 CASE_MATHFN (TRUNC)
1925 CASE_MATHFN (Y0)
1926 CASE_MATHFN (Y1)
1927 CASE_MATHFN (YN)
1929 default:
1930 return END_BUILTINS;
1933 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1934 return fcode;
1935 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1936 return fcodef;
1937 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1938 return fcodel;
1939 else
1940 return END_BUILTINS;
1943 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1944 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1945 otherwise use the explicit declaration. If we can't do the conversion,
1946 return null. */
1948 static tree
1949 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
1951 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1952 if (fcode2 == END_BUILTINS)
1953 return NULL_TREE;
1955 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1956 return NULL_TREE;
1958 return builtin_decl_explicit (fcode2);
1961 /* Like mathfn_built_in_1, but always use the implicit array. */
1963 tree
1964 mathfn_built_in (tree type, combined_fn fn)
1966 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1969 /* Like mathfn_built_in_1, but take a built_in_function and
1970 always use the implicit array. */
1972 tree
1973 mathfn_built_in (tree type, enum built_in_function fn)
1975 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
1978 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
1979 return its code, otherwise return IFN_LAST. Note that this function
1980 only tests whether the function is defined in internals.def, not whether
1981 it is actually available on the target. */
1983 internal_fn
1984 associated_internal_fn (tree fndecl)
1986 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
1987 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
1988 switch (DECL_FUNCTION_CODE (fndecl))
1990 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
1991 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1992 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
1993 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1994 #include "internal-fn.def"
1996 CASE_FLT_FN (BUILT_IN_POW10):
1997 return IFN_EXP10;
1999 CASE_FLT_FN (BUILT_IN_DREM):
2000 return IFN_REMAINDER;
2002 CASE_FLT_FN (BUILT_IN_SCALBN):
2003 CASE_FLT_FN (BUILT_IN_SCALBLN):
2004 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2005 return IFN_LDEXP;
2006 return IFN_LAST;
2008 default:
2009 return IFN_LAST;
2013 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2014 on the current target by a call to an internal function, return the
2015 code of that internal function, otherwise return IFN_LAST. The caller
2016 is responsible for ensuring that any side-effects of the built-in
2017 call are dealt with correctly. E.g. if CALL sets errno, the caller
2018 must decide that the errno result isn't needed or make it available
2019 in some other way. */
2021 internal_fn
2022 replacement_internal_fn (gcall *call)
2024 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2026 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2027 if (ifn != IFN_LAST)
2029 tree_pair types = direct_internal_fn_types (ifn, call);
2030 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2031 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2032 return ifn;
2035 return IFN_LAST;
2038 /* Expand a call to the builtin trinary math functions (fma).
2039 Return NULL_RTX if a normal call should be emitted rather than expanding the
2040 function in-line. EXP is the expression that is a call to the builtin
2041 function; if convenient, the result should be placed in TARGET.
2042 SUBTARGET may be used as the target for computing one of EXP's
2043 operands. */
2045 static rtx
2046 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2048 optab builtin_optab;
2049 rtx op0, op1, op2, result;
2050 rtx_insn *insns;
2051 tree fndecl = get_callee_fndecl (exp);
2052 tree arg0, arg1, arg2;
2053 machine_mode mode;
2055 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2056 return NULL_RTX;
2058 arg0 = CALL_EXPR_ARG (exp, 0);
2059 arg1 = CALL_EXPR_ARG (exp, 1);
2060 arg2 = CALL_EXPR_ARG (exp, 2);
2062 switch (DECL_FUNCTION_CODE (fndecl))
2064 CASE_FLT_FN (BUILT_IN_FMA):
2065 builtin_optab = fma_optab; break;
2066 default:
2067 gcc_unreachable ();
2070 /* Make a suitable register to place result in. */
2071 mode = TYPE_MODE (TREE_TYPE (exp));
2073 /* Before working hard, check whether the instruction is available. */
2074 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2075 return NULL_RTX;
2077 result = gen_reg_rtx (mode);
2079 /* Always stabilize the argument list. */
2080 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2081 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2082 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2084 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2085 op1 = expand_normal (arg1);
2086 op2 = expand_normal (arg2);
2088 start_sequence ();
2090 /* Compute into RESULT.
2091 Set RESULT to wherever the result comes back. */
2092 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2093 result, 0);
2095 /* If we were unable to expand via the builtin, stop the sequence
2096 (without outputting the insns) and call to the library function
2097 with the stabilized argument list. */
2098 if (result == 0)
2100 end_sequence ();
2101 return expand_call (exp, target, target == const0_rtx);
2104 /* Output the entire sequence. */
2105 insns = get_insns ();
2106 end_sequence ();
2107 emit_insn (insns);
2109 return result;
2112 /* Expand a call to the builtin sin and cos math functions.
2113 Return NULL_RTX if a normal call should be emitted rather than expanding the
2114 function in-line. EXP is the expression that is a call to the builtin
2115 function; if convenient, the result should be placed in TARGET.
2116 SUBTARGET may be used as the target for computing one of EXP's
2117 operands. */
2119 static rtx
2120 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2122 optab builtin_optab;
2123 rtx op0;
2124 rtx_insn *insns;
2125 tree fndecl = get_callee_fndecl (exp);
2126 machine_mode mode;
2127 tree arg;
2129 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2130 return NULL_RTX;
2132 arg = CALL_EXPR_ARG (exp, 0);
2134 switch (DECL_FUNCTION_CODE (fndecl))
2136 CASE_FLT_FN (BUILT_IN_SIN):
2137 CASE_FLT_FN (BUILT_IN_COS):
2138 builtin_optab = sincos_optab; break;
2139 default:
2140 gcc_unreachable ();
2143 /* Make a suitable register to place result in. */
2144 mode = TYPE_MODE (TREE_TYPE (exp));
2146 /* Check if sincos insn is available, otherwise fallback
2147 to sin or cos insn. */
2148 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2149 switch (DECL_FUNCTION_CODE (fndecl))
2151 CASE_FLT_FN (BUILT_IN_SIN):
2152 builtin_optab = sin_optab; break;
2153 CASE_FLT_FN (BUILT_IN_COS):
2154 builtin_optab = cos_optab; break;
2155 default:
2156 gcc_unreachable ();
2159 /* Before working hard, check whether the instruction is available. */
2160 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2162 rtx result = gen_reg_rtx (mode);
2164 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2165 need to expand the argument again. This way, we will not perform
2166 side-effects more the once. */
2167 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2169 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2171 start_sequence ();
2173 /* Compute into RESULT.
2174 Set RESULT to wherever the result comes back. */
2175 if (builtin_optab == sincos_optab)
2177 int ok;
2179 switch (DECL_FUNCTION_CODE (fndecl))
2181 CASE_FLT_FN (BUILT_IN_SIN):
2182 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2183 break;
2184 CASE_FLT_FN (BUILT_IN_COS):
2185 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2186 break;
2187 default:
2188 gcc_unreachable ();
2190 gcc_assert (ok);
2192 else
2193 result = expand_unop (mode, builtin_optab, op0, result, 0);
2195 if (result != 0)
2197 /* Output the entire sequence. */
2198 insns = get_insns ();
2199 end_sequence ();
2200 emit_insn (insns);
2201 return result;
2204 /* If we were unable to expand via the builtin, stop the sequence
2205 (without outputting the insns) and call to the library function
2206 with the stabilized argument list. */
2207 end_sequence ();
2210 return expand_call (exp, target, target == const0_rtx);
2213 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2214 return an RTL instruction code that implements the functionality.
2215 If that isn't possible or available return CODE_FOR_nothing. */
2217 static enum insn_code
2218 interclass_mathfn_icode (tree arg, tree fndecl)
2220 bool errno_set = false;
2221 optab builtin_optab = unknown_optab;
2222 machine_mode mode;
2224 switch (DECL_FUNCTION_CODE (fndecl))
2226 CASE_FLT_FN (BUILT_IN_ILOGB):
2227 errno_set = true; builtin_optab = ilogb_optab; break;
2228 CASE_FLT_FN (BUILT_IN_ISINF):
2229 builtin_optab = isinf_optab; break;
2230 case BUILT_IN_ISNORMAL:
2231 case BUILT_IN_ISFINITE:
2232 CASE_FLT_FN (BUILT_IN_FINITE):
2233 case BUILT_IN_FINITED32:
2234 case BUILT_IN_FINITED64:
2235 case BUILT_IN_FINITED128:
2236 case BUILT_IN_ISINFD32:
2237 case BUILT_IN_ISINFD64:
2238 case BUILT_IN_ISINFD128:
2239 /* These builtins have no optabs (yet). */
2240 break;
2241 default:
2242 gcc_unreachable ();
2245 /* There's no easy way to detect the case we need to set EDOM. */
2246 if (flag_errno_math && errno_set)
2247 return CODE_FOR_nothing;
2249 /* Optab mode depends on the mode of the input argument. */
2250 mode = TYPE_MODE (TREE_TYPE (arg));
2252 if (builtin_optab)
2253 return optab_handler (builtin_optab, mode);
2254 return CODE_FOR_nothing;
2257 /* Expand a call to one of the builtin math functions that operate on
2258 floating point argument and output an integer result (ilogb, isinf,
2259 isnan, etc).
2260 Return 0 if a normal call should be emitted rather than expanding the
2261 function in-line. EXP is the expression that is a call to the builtin
2262 function; if convenient, the result should be placed in TARGET. */
2264 static rtx
2265 expand_builtin_interclass_mathfn (tree exp, rtx target)
2267 enum insn_code icode = CODE_FOR_nothing;
2268 rtx op0;
2269 tree fndecl = get_callee_fndecl (exp);
2270 machine_mode mode;
2271 tree arg;
2273 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2274 return NULL_RTX;
2276 arg = CALL_EXPR_ARG (exp, 0);
2277 icode = interclass_mathfn_icode (arg, fndecl);
2278 mode = TYPE_MODE (TREE_TYPE (arg));
2280 if (icode != CODE_FOR_nothing)
2282 struct expand_operand ops[1];
2283 rtx_insn *last = get_last_insn ();
2284 tree orig_arg = arg;
2286 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2287 need to expand the argument again. This way, we will not perform
2288 side-effects more the once. */
2289 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2291 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2293 if (mode != GET_MODE (op0))
2294 op0 = convert_to_mode (mode, op0, 0);
2296 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2297 if (maybe_legitimize_operands (icode, 0, 1, ops)
2298 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2299 return ops[0].value;
2301 delete_insns_since (last);
2302 CALL_EXPR_ARG (exp, 0) = orig_arg;
2305 return NULL_RTX;
2308 /* Expand a call to the builtin sincos math function.
2309 Return NULL_RTX if a normal call should be emitted rather than expanding the
2310 function in-line. EXP is the expression that is a call to the builtin
2311 function. */
2313 static rtx
2314 expand_builtin_sincos (tree exp)
2316 rtx op0, op1, op2, target1, target2;
2317 machine_mode mode;
2318 tree arg, sinp, cosp;
2319 int result;
2320 location_t loc = EXPR_LOCATION (exp);
2321 tree alias_type, alias_off;
2323 if (!validate_arglist (exp, REAL_TYPE,
2324 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2325 return NULL_RTX;
2327 arg = CALL_EXPR_ARG (exp, 0);
2328 sinp = CALL_EXPR_ARG (exp, 1);
2329 cosp = CALL_EXPR_ARG (exp, 2);
2331 /* Make a suitable register to place result in. */
2332 mode = TYPE_MODE (TREE_TYPE (arg));
2334 /* Check if sincos insn is available, otherwise emit the call. */
2335 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2336 return NULL_RTX;
2338 target1 = gen_reg_rtx (mode);
2339 target2 = gen_reg_rtx (mode);
2341 op0 = expand_normal (arg);
2342 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2343 alias_off = build_int_cst (alias_type, 0);
2344 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2345 sinp, alias_off));
2346 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2347 cosp, alias_off));
2349 /* Compute into target1 and target2.
2350 Set TARGET to wherever the result comes back. */
2351 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2352 gcc_assert (result);
2354 /* Move target1 and target2 to the memory locations indicated
2355 by op1 and op2. */
2356 emit_move_insn (op1, target1);
2357 emit_move_insn (op2, target2);
2359 return const0_rtx;
2362 /* Expand a call to the internal cexpi builtin to the sincos math function.
2363 EXP is the expression that is a call to the builtin function; if convenient,
2364 the result should be placed in TARGET. */
2366 static rtx
2367 expand_builtin_cexpi (tree exp, rtx target)
2369 tree fndecl = get_callee_fndecl (exp);
2370 tree arg, type;
2371 machine_mode mode;
2372 rtx op0, op1, op2;
2373 location_t loc = EXPR_LOCATION (exp);
2375 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2376 return NULL_RTX;
2378 arg = CALL_EXPR_ARG (exp, 0);
2379 type = TREE_TYPE (arg);
2380 mode = TYPE_MODE (TREE_TYPE (arg));
2382 /* Try expanding via a sincos optab, fall back to emitting a libcall
2383 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2384 is only generated from sincos, cexp or if we have either of them. */
2385 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2387 op1 = gen_reg_rtx (mode);
2388 op2 = gen_reg_rtx (mode);
2390 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2392 /* Compute into op1 and op2. */
2393 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2395 else if (targetm.libc_has_function (function_sincos))
2397 tree call, fn = NULL_TREE;
2398 tree top1, top2;
2399 rtx op1a, op2a;
2401 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2402 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2403 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2404 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2405 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2406 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2407 else
2408 gcc_unreachable ();
2410 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2411 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2412 op1a = copy_addr_to_reg (XEXP (op1, 0));
2413 op2a = copy_addr_to_reg (XEXP (op2, 0));
2414 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2415 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2417 /* Make sure not to fold the sincos call again. */
2418 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2419 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2420 call, 3, arg, top1, top2));
2422 else
2424 tree call, fn = NULL_TREE, narg;
2425 tree ctype = build_complex_type (type);
2427 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2428 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2429 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2430 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2431 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2432 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2433 else
2434 gcc_unreachable ();
2436 /* If we don't have a decl for cexp create one. This is the
2437 friendliest fallback if the user calls __builtin_cexpi
2438 without full target C99 function support. */
2439 if (fn == NULL_TREE)
2441 tree fntype;
2442 const char *name = NULL;
2444 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2445 name = "cexpf";
2446 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2447 name = "cexp";
2448 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2449 name = "cexpl";
2451 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2452 fn = build_fn_decl (name, fntype);
2455 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2456 build_real (type, dconst0), arg);
2458 /* Make sure not to fold the cexp call again. */
2459 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2460 return expand_expr (build_call_nary (ctype, call, 1, narg),
2461 target, VOIDmode, EXPAND_NORMAL);
2464 /* Now build the proper return type. */
2465 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2466 make_tree (TREE_TYPE (arg), op2),
2467 make_tree (TREE_TYPE (arg), op1)),
2468 target, VOIDmode, EXPAND_NORMAL);
2471 /* Conveniently construct a function call expression. FNDECL names the
2472 function to be called, N is the number of arguments, and the "..."
2473 parameters are the argument expressions. Unlike build_call_exr
2474 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2476 static tree
2477 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2479 va_list ap;
2480 tree fntype = TREE_TYPE (fndecl);
2481 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2483 va_start (ap, n);
2484 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2485 va_end (ap);
2486 SET_EXPR_LOCATION (fn, loc);
2487 return fn;
2490 /* Expand a call to one of the builtin rounding functions gcc defines
2491 as an extension (lfloor and lceil). As these are gcc extensions we
2492 do not need to worry about setting errno to EDOM.
2493 If expanding via optab fails, lower expression to (int)(floor(x)).
2494 EXP is the expression that is a call to the builtin function;
2495 if convenient, the result should be placed in TARGET. */
2497 static rtx
2498 expand_builtin_int_roundingfn (tree exp, rtx target)
2500 convert_optab builtin_optab;
2501 rtx op0, tmp;
2502 rtx_insn *insns;
2503 tree fndecl = get_callee_fndecl (exp);
2504 enum built_in_function fallback_fn;
2505 tree fallback_fndecl;
2506 machine_mode mode;
2507 tree arg;
2509 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2510 gcc_unreachable ();
2512 arg = CALL_EXPR_ARG (exp, 0);
2514 switch (DECL_FUNCTION_CODE (fndecl))
2516 CASE_FLT_FN (BUILT_IN_ICEIL):
2517 CASE_FLT_FN (BUILT_IN_LCEIL):
2518 CASE_FLT_FN (BUILT_IN_LLCEIL):
2519 builtin_optab = lceil_optab;
2520 fallback_fn = BUILT_IN_CEIL;
2521 break;
2523 CASE_FLT_FN (BUILT_IN_IFLOOR):
2524 CASE_FLT_FN (BUILT_IN_LFLOOR):
2525 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2526 builtin_optab = lfloor_optab;
2527 fallback_fn = BUILT_IN_FLOOR;
2528 break;
2530 default:
2531 gcc_unreachable ();
2534 /* Make a suitable register to place result in. */
2535 mode = TYPE_MODE (TREE_TYPE (exp));
2537 target = gen_reg_rtx (mode);
2539 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2540 need to expand the argument again. This way, we will not perform
2541 side-effects more the once. */
2542 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2544 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2546 start_sequence ();
2548 /* Compute into TARGET. */
2549 if (expand_sfix_optab (target, op0, builtin_optab))
2551 /* Output the entire sequence. */
2552 insns = get_insns ();
2553 end_sequence ();
2554 emit_insn (insns);
2555 return target;
2558 /* If we were unable to expand via the builtin, stop the sequence
2559 (without outputting the insns). */
2560 end_sequence ();
2562 /* Fall back to floating point rounding optab. */
2563 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2565 /* For non-C99 targets we may end up without a fallback fndecl here
2566 if the user called __builtin_lfloor directly. In this case emit
2567 a call to the floor/ceil variants nevertheless. This should result
2568 in the best user experience for not full C99 targets. */
2569 if (fallback_fndecl == NULL_TREE)
2571 tree fntype;
2572 const char *name = NULL;
2574 switch (DECL_FUNCTION_CODE (fndecl))
2576 case BUILT_IN_ICEIL:
2577 case BUILT_IN_LCEIL:
2578 case BUILT_IN_LLCEIL:
2579 name = "ceil";
2580 break;
2581 case BUILT_IN_ICEILF:
2582 case BUILT_IN_LCEILF:
2583 case BUILT_IN_LLCEILF:
2584 name = "ceilf";
2585 break;
2586 case BUILT_IN_ICEILL:
2587 case BUILT_IN_LCEILL:
2588 case BUILT_IN_LLCEILL:
2589 name = "ceill";
2590 break;
2591 case BUILT_IN_IFLOOR:
2592 case BUILT_IN_LFLOOR:
2593 case BUILT_IN_LLFLOOR:
2594 name = "floor";
2595 break;
2596 case BUILT_IN_IFLOORF:
2597 case BUILT_IN_LFLOORF:
2598 case BUILT_IN_LLFLOORF:
2599 name = "floorf";
2600 break;
2601 case BUILT_IN_IFLOORL:
2602 case BUILT_IN_LFLOORL:
2603 case BUILT_IN_LLFLOORL:
2604 name = "floorl";
2605 break;
2606 default:
2607 gcc_unreachable ();
2610 fntype = build_function_type_list (TREE_TYPE (arg),
2611 TREE_TYPE (arg), NULL_TREE);
2612 fallback_fndecl = build_fn_decl (name, fntype);
2615 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2617 tmp = expand_normal (exp);
2618 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2620 /* Truncate the result of floating point optab to integer
2621 via expand_fix (). */
2622 target = gen_reg_rtx (mode);
2623 expand_fix (target, tmp, 0);
2625 return target;
2628 /* Expand a call to one of the builtin math functions doing integer
2629 conversion (lrint).
2630 Return 0 if a normal call should be emitted rather than expanding the
2631 function in-line. EXP is the expression that is a call to the builtin
2632 function; if convenient, the result should be placed in TARGET. */
2634 static rtx
2635 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2637 convert_optab builtin_optab;
2638 rtx op0;
2639 rtx_insn *insns;
2640 tree fndecl = get_callee_fndecl (exp);
2641 tree arg;
2642 machine_mode mode;
2643 enum built_in_function fallback_fn = BUILT_IN_NONE;
2645 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2646 gcc_unreachable ();
2648 arg = CALL_EXPR_ARG (exp, 0);
2650 switch (DECL_FUNCTION_CODE (fndecl))
2652 CASE_FLT_FN (BUILT_IN_IRINT):
2653 fallback_fn = BUILT_IN_LRINT;
2654 gcc_fallthrough ();
2655 CASE_FLT_FN (BUILT_IN_LRINT):
2656 CASE_FLT_FN (BUILT_IN_LLRINT):
2657 builtin_optab = lrint_optab;
2658 break;
2660 CASE_FLT_FN (BUILT_IN_IROUND):
2661 fallback_fn = BUILT_IN_LROUND;
2662 gcc_fallthrough ();
2663 CASE_FLT_FN (BUILT_IN_LROUND):
2664 CASE_FLT_FN (BUILT_IN_LLROUND):
2665 builtin_optab = lround_optab;
2666 break;
2668 default:
2669 gcc_unreachable ();
2672 /* There's no easy way to detect the case we need to set EDOM. */
2673 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2674 return NULL_RTX;
2676 /* Make a suitable register to place result in. */
2677 mode = TYPE_MODE (TREE_TYPE (exp));
2679 /* There's no easy way to detect the case we need to set EDOM. */
2680 if (!flag_errno_math)
2682 rtx result = gen_reg_rtx (mode);
2684 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2685 need to expand the argument again. This way, we will not perform
2686 side-effects more the once. */
2687 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2689 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2691 start_sequence ();
2693 if (expand_sfix_optab (result, op0, builtin_optab))
2695 /* Output the entire sequence. */
2696 insns = get_insns ();
2697 end_sequence ();
2698 emit_insn (insns);
2699 return result;
2702 /* If we were unable to expand via the builtin, stop the sequence
2703 (without outputting the insns) and call to the library function
2704 with the stabilized argument list. */
2705 end_sequence ();
2708 if (fallback_fn != BUILT_IN_NONE)
2710 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2711 targets, (int) round (x) should never be transformed into
2712 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2713 a call to lround in the hope that the target provides at least some
2714 C99 functions. This should result in the best user experience for
2715 not full C99 targets. */
2716 tree fallback_fndecl = mathfn_built_in_1
2717 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2719 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2720 fallback_fndecl, 1, arg);
2722 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2723 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2724 return convert_to_mode (mode, target, 0);
2727 return expand_call (exp, target, target == const0_rtx);
2730 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2731 a normal call should be emitted rather than expanding the function
2732 in-line. EXP is the expression that is a call to the builtin
2733 function; if convenient, the result should be placed in TARGET. */
2735 static rtx
2736 expand_builtin_powi (tree exp, rtx target)
2738 tree arg0, arg1;
2739 rtx op0, op1;
2740 machine_mode mode;
2741 machine_mode mode2;
2743 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2744 return NULL_RTX;
2746 arg0 = CALL_EXPR_ARG (exp, 0);
2747 arg1 = CALL_EXPR_ARG (exp, 1);
2748 mode = TYPE_MODE (TREE_TYPE (exp));
2750 /* Emit a libcall to libgcc. */
2752 /* Mode of the 2nd argument must match that of an int. */
2753 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2755 if (target == NULL_RTX)
2756 target = gen_reg_rtx (mode);
2758 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2759 if (GET_MODE (op0) != mode)
2760 op0 = convert_to_mode (mode, op0, 0);
2761 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2762 if (GET_MODE (op1) != mode2)
2763 op1 = convert_to_mode (mode2, op1, 0);
2765 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2766 target, LCT_CONST, mode, 2,
2767 op0, mode, op1, mode2);
2769 return target;
2772 /* Expand expression EXP which is a call to the strlen builtin. Return
2773 NULL_RTX if we failed the caller should emit a normal call, otherwise
2774 try to get the result in TARGET, if convenient. */
2776 static rtx
2777 expand_builtin_strlen (tree exp, rtx target,
2778 machine_mode target_mode)
2780 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2781 return NULL_RTX;
2782 else
2784 struct expand_operand ops[4];
2785 rtx pat;
2786 tree len;
2787 tree src = CALL_EXPR_ARG (exp, 0);
2788 rtx src_reg;
2789 rtx_insn *before_strlen;
2790 machine_mode insn_mode = target_mode;
2791 enum insn_code icode = CODE_FOR_nothing;
2792 unsigned int align;
2794 /* If the length can be computed at compile-time, return it. */
2795 len = c_strlen (src, 0);
2796 if (len)
2797 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2799 /* If the length can be computed at compile-time and is constant
2800 integer, but there are side-effects in src, evaluate
2801 src for side-effects, then return len.
2802 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2803 can be optimized into: i++; x = 3; */
2804 len = c_strlen (src, 1);
2805 if (len && TREE_CODE (len) == INTEGER_CST)
2807 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2808 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2811 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2813 /* If SRC is not a pointer type, don't do this operation inline. */
2814 if (align == 0)
2815 return NULL_RTX;
2817 /* Bail out if we can't compute strlen in the right mode. */
2818 while (insn_mode != VOIDmode)
2820 icode = optab_handler (strlen_optab, insn_mode);
2821 if (icode != CODE_FOR_nothing)
2822 break;
2824 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2826 if (insn_mode == VOIDmode)
2827 return NULL_RTX;
2829 /* Make a place to hold the source address. We will not expand
2830 the actual source until we are sure that the expansion will
2831 not fail -- there are trees that cannot be expanded twice. */
2832 src_reg = gen_reg_rtx (Pmode);
2834 /* Mark the beginning of the strlen sequence so we can emit the
2835 source operand later. */
2836 before_strlen = get_last_insn ();
2838 create_output_operand (&ops[0], target, insn_mode);
2839 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2840 create_integer_operand (&ops[2], 0);
2841 create_integer_operand (&ops[3], align);
2842 if (!maybe_expand_insn (icode, 4, ops))
2843 return NULL_RTX;
2845 /* Now that we are assured of success, expand the source. */
2846 start_sequence ();
2847 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2848 if (pat != src_reg)
2850 #ifdef POINTERS_EXTEND_UNSIGNED
2851 if (GET_MODE (pat) != Pmode)
2852 pat = convert_to_mode (Pmode, pat,
2853 POINTERS_EXTEND_UNSIGNED);
2854 #endif
2855 emit_move_insn (src_reg, pat);
2857 pat = get_insns ();
2858 end_sequence ();
2860 if (before_strlen)
2861 emit_insn_after (pat, before_strlen);
2862 else
2863 emit_insn_before (pat, get_insns ());
2865 /* Return the value in the proper mode for this function. */
2866 if (GET_MODE (ops[0].value) == target_mode)
2867 target = ops[0].value;
2868 else if (target != 0)
2869 convert_move (target, ops[0].value, 0);
2870 else
2871 target = convert_to_mode (target_mode, ops[0].value, 0);
2873 return target;
2877 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2878 bytes from constant string DATA + OFFSET and return it as target
2879 constant. */
2881 static rtx
2882 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2883 machine_mode mode)
2885 const char *str = (const char *) data;
2887 gcc_assert (offset >= 0
2888 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2889 <= strlen (str) + 1));
2891 return c_readstr (str + offset, mode);
2894 /* LEN specify length of the block of memcpy/memset operation.
2895 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2896 In some cases we can make very likely guess on max size, then we
2897 set it into PROBABLE_MAX_SIZE. */
2899 static void
2900 determine_block_size (tree len, rtx len_rtx,
2901 unsigned HOST_WIDE_INT *min_size,
2902 unsigned HOST_WIDE_INT *max_size,
2903 unsigned HOST_WIDE_INT *probable_max_size)
2905 if (CONST_INT_P (len_rtx))
2907 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
2908 return;
2910 else
2912 wide_int min, max;
2913 enum value_range_type range_type = VR_UNDEFINED;
2915 /* Determine bounds from the type. */
2916 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2917 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2918 else
2919 *min_size = 0;
2920 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2921 *probable_max_size = *max_size
2922 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
2923 else
2924 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2926 if (TREE_CODE (len) == SSA_NAME)
2927 range_type = get_range_info (len, &min, &max);
2928 if (range_type == VR_RANGE)
2930 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
2931 *min_size = min.to_uhwi ();
2932 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
2933 *probable_max_size = *max_size = max.to_uhwi ();
2935 else if (range_type == VR_ANTI_RANGE)
2937 /* Anti range 0...N lets us to determine minimal size to N+1. */
2938 if (min == 0)
2940 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2941 *min_size = max.to_uhwi () + 1;
2943 /* Code like
2945 int n;
2946 if (n < 100)
2947 memcpy (a, b, n)
2949 Produce anti range allowing negative values of N. We still
2950 can use the information and make a guess that N is not negative.
2952 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
2953 *probable_max_size = min.to_uhwi () - 1;
2956 gcc_checking_assert (*max_size <=
2957 (unsigned HOST_WIDE_INT)
2958 GET_MODE_MASK (GET_MODE (len_rtx)));
2961 /* Helper function to do the actual work for expand_builtin_memcpy. */
2963 static rtx
2964 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
2966 const char *src_str;
2967 unsigned int src_align = get_pointer_alignment (src);
2968 unsigned int dest_align = get_pointer_alignment (dest);
2969 rtx dest_mem, src_mem, dest_addr, len_rtx;
2970 HOST_WIDE_INT expected_size = -1;
2971 unsigned int expected_align = 0;
2972 unsigned HOST_WIDE_INT min_size;
2973 unsigned HOST_WIDE_INT max_size;
2974 unsigned HOST_WIDE_INT probable_max_size;
2976 /* If DEST is not a pointer type, call the normal function. */
2977 if (dest_align == 0)
2978 return NULL_RTX;
2980 /* If either SRC is not a pointer type, don't do this
2981 operation in-line. */
2982 if (src_align == 0)
2983 return NULL_RTX;
2985 if (currently_expanding_gimple_stmt)
2986 stringop_block_profile (currently_expanding_gimple_stmt,
2987 &expected_align, &expected_size);
2989 if (expected_align < dest_align)
2990 expected_align = dest_align;
2991 dest_mem = get_memory_rtx (dest, len);
2992 set_mem_align (dest_mem, dest_align);
2993 len_rtx = expand_normal (len);
2994 determine_block_size (len, len_rtx, &min_size, &max_size,
2995 &probable_max_size);
2996 src_str = c_getstr (src);
2998 /* If SRC is a string constant and block move would be done
2999 by pieces, we can avoid loading the string from memory
3000 and only stored the computed constants. */
3001 if (src_str
3002 && CONST_INT_P (len_rtx)
3003 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3004 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3005 CONST_CAST (char *, src_str),
3006 dest_align, false))
3008 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3009 builtin_memcpy_read_str,
3010 CONST_CAST (char *, src_str),
3011 dest_align, false, 0);
3012 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3013 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3014 return dest_mem;
3017 src_mem = get_memory_rtx (src, len);
3018 set_mem_align (src_mem, src_align);
3020 /* Copy word part most expediently. */
3021 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3022 CALL_EXPR_TAILCALL (exp)
3023 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3024 expected_align, expected_size,
3025 min_size, max_size, probable_max_size);
3027 if (dest_addr == 0)
3029 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3030 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3033 return dest_addr;
3036 /* Try to verify that the sizes and lengths of the arguments to a string
3037 manipulation function given by EXP are within valid bounds and that
3038 the operation does not lead to buffer overflow. Arguments other than
3039 EXP may be null. When non-null, the arguments have the following
3040 meaning:
3041 SIZE is the user-supplied size argument to the function (such as in
3042 memcpy(d, s, SIZE) or strncpy(d, s, SIZE). It specifies the exact
3043 number of bytes to write.
3044 MAXLEN is the user-supplied bound on the length of the source sequence
3045 (such as in strncat(d, s, N). It specifies the upper limit on the number
3046 of bytes to write.
3047 STR is the source string (such as in strcpy(d, s)) when the epxression
3048 EXP is a string function call (as opposed to a memory call like memcpy).
3049 As an exception, STR can also be an integer denoting the precomputed
3050 length of the source string.
3051 OBJSIZE is the size of the destination object specified by the last
3052 argument to the _chk builtins, typically resulting from the expansion
3053 of __builtin_object_size (such as in __builtin___strcpy_chk(d, s,
3054 OBJSIZE).
3056 When SIZE is null LEN is checked to verify that it doesn't exceed
3057 SIZE_MAX.
3059 If the call is successfully verified as safe from buffer overflow
3060 the function returns true, otherwise false.. */
3062 static bool
3063 check_sizes (int opt, tree exp, tree size, tree maxlen, tree str, tree objsize)
3065 /* The size of the largest object is half the address space, or
3066 SSIZE_MAX. (This is way too permissive.) */
3067 tree maxobjsize = TYPE_MAX_VALUE (ssizetype);
3069 tree slen = NULL_TREE;
3071 /* Set to true when the exact number of bytes written by a string
3072 function like strcpy is not known and the only thing that is
3073 known is that it must be at least one (for the terminating nul). */
3074 bool at_least_one = false;
3075 if (str)
3077 /* STR is normally a pointer to string but as a special case
3078 it can be an integer denoting the length of a string. */
3079 if (POINTER_TYPE_P (TREE_TYPE (str)))
3081 /* Try to determine the range of lengths the source string
3082 refers to. If it can be determined add one to it for
3083 the terminating nul. Otherwise, set it to one for
3084 the same reason. */
3085 tree lenrange[2];
3086 get_range_strlen (str, lenrange);
3087 if (lenrange[0])
3088 slen = fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3089 size_one_node);
3090 else
3092 at_least_one = true;
3093 slen = size_one_node;
3096 else
3097 slen = str;
3100 if (!size && !maxlen)
3102 /* When the only available piece of data is the object size
3103 there is nothing to do. */
3104 if (!slen)
3105 return true;
3107 /* Otherwise, when the length of the source sequence is known
3108 (as with with strlen), set SIZE to it. */
3109 size = slen;
3112 if (!objsize)
3113 objsize = maxobjsize;
3115 /* The SIZE is exact if it's non-null, constant, and in range of
3116 unsigned HOST_WIDE_INT. */
3117 bool exactsize = size && tree_fits_uhwi_p (size);
3119 tree range[2] = { NULL_TREE, NULL_TREE };
3120 if (size)
3121 get_size_range (size, range);
3123 /* First check the number of bytes to be written against the maximum
3124 object size. */
3125 if (range[0] && tree_int_cst_lt (maxobjsize, range[0]))
3127 location_t loc = tree_nonartificial_location (exp);
3129 if (range[0] == range[1])
3130 warning_at (loc, opt,
3131 "%K%qD: specified size %wu "
3132 "exceeds maximum object size %wu",
3133 exp, get_callee_fndecl (exp),
3134 tree_to_uhwi (range[0]),
3135 tree_to_uhwi (maxobjsize));
3136 else
3137 warning_at (loc, opt,
3138 "%K%qD: specified size between %wu and %wu "
3139 "exceeds maximum object size %wu",
3140 exp, get_callee_fndecl (exp),
3141 tree_to_uhwi (range[0]),
3142 tree_to_uhwi (range[1]),
3143 tree_to_uhwi (maxobjsize));
3144 return false;
3147 /* Next check the number of bytes to be written against the destination
3148 object size. */
3149 if (range[0] || !exactsize || integer_all_onesp (size))
3151 if (range[0]
3152 && ((tree_fits_uhwi_p (objsize)
3153 && tree_int_cst_lt (objsize, range[0]))
3154 || (tree_fits_uhwi_p (size)
3155 && tree_int_cst_lt (size, range[0]))))
3157 unsigned HOST_WIDE_INT uwir0 = tree_to_uhwi (range[0]);
3159 location_t loc = tree_nonartificial_location (exp);
3161 if (at_least_one)
3162 warning_at (loc, opt,
3163 "%K%qD: writing at least %wu byte into a region "
3164 "of size %wu overflows the destination",
3165 exp, get_callee_fndecl (exp), uwir0,
3166 tree_to_uhwi (objsize));
3167 else if (range[0] == range[1])
3168 warning_at (loc, opt,
3169 (uwir0 == 1
3170 ? G_("%K%qD: writing %wu byte into a region "
3171 "of size %wu overflows the destination")
3172 : G_("%K%qD writing %wu bytes into a region "
3173 "of size %wu overflows the destination")),
3174 exp, get_callee_fndecl (exp), uwir0,
3175 tree_to_uhwi (objsize));
3176 else
3177 warning_at (loc, opt,
3178 "%K%qD: writing between %wu and %wu bytes "
3179 "into a region of size %wu overflows "
3180 "the destination",
3181 exp, get_callee_fndecl (exp), uwir0,
3182 tree_to_uhwi (range[1]), tree_to_uhwi (objsize));
3184 /* Return error when an overflow has been detected. */
3185 return false;
3189 /* Check the maximum length of the source sequence against the size
3190 of the destination object if known, or against the maximum size
3191 of an object. */
3192 if (maxlen)
3194 get_size_range (maxlen, range);
3196 if (range[0] && objsize && tree_fits_uhwi_p (objsize))
3198 location_t loc = tree_nonartificial_location (exp);
3200 if (tree_int_cst_lt (maxobjsize, range[0]))
3202 /* Warn about crazy big sizes first since that's more
3203 likely to be meaningful than saying that the bound
3204 is greater than the object size if both are big. */
3205 if (range[0] == range[1])
3206 warning_at (loc, opt,
3207 "%K%qD: specified bound %wu "
3208 "exceeds maximum object size %wu",
3209 exp, get_callee_fndecl (exp),
3210 tree_to_uhwi (range[0]),
3211 tree_to_uhwi (maxobjsize));
3212 else
3213 warning_at (loc, opt,
3214 "%K%qD: specified bound between %wu and %wu "
3215 " exceeds maximum object size %wu",
3216 exp, get_callee_fndecl (exp),
3217 tree_to_uhwi (range[0]),
3218 tree_to_uhwi (range[1]),
3219 tree_to_uhwi (maxobjsize));
3221 return false;
3224 if (objsize != maxobjsize && tree_int_cst_lt (objsize, range[0]))
3226 if (range[0] == range[1])
3227 warning_at (loc, opt,
3228 "%K%qD: specified bound %wu "
3229 "exceeds the size %wu of the destination",
3230 exp, get_callee_fndecl (exp),
3231 tree_to_uhwi (range[0]),
3232 tree_to_uhwi (objsize));
3233 else
3234 warning_at (loc, opt,
3235 "%K%qD: specified bound between %wu and %wu "
3236 " exceeds the size %wu of the destination",
3237 exp, get_callee_fndecl (exp),
3238 tree_to_uhwi (range[0]),
3239 tree_to_uhwi (range[1]),
3240 tree_to_uhwi (objsize));
3241 return false;
3246 return true;
3249 /* Helper to compute the size of the object referenced by the DEST
3250 expression which must of of pointer type, using Object Size type
3251 OSTYPE (only the least significant 2 bits are used). Return
3252 the size of the object if successful or NULL when the size cannot
3253 be determined. */
3255 static inline tree
3256 compute_dest_size (tree dest, int ostype)
3258 unsigned HOST_WIDE_INT size;
3259 if (compute_builtin_object_size (dest, ostype & 3, &size))
3260 return build_int_cst (sizetype, size);
3262 return NULL_TREE;
3265 /* Helper to determine and check the sizes of the source and the destination
3266 of calls to __builtin_{bzero,memcpy,memset} calls. Use Object Size type-0
3267 regardless of the OPT_Wstringop_overflow_ setting. Returns true on success
3268 (no overflow or invalid sizes), false otherwise. */
3270 static bool
3271 check_memop_sizes (tree exp, tree dest, tree size)
3273 if (!warn_stringop_overflow)
3274 return true;
3276 /* For functions like memset and memcpy that operate on raw memory
3277 try to determine the size of the largest destination object using
3278 type-0 Object Size regardless of the object size type specified
3279 by the option. */
3280 tree objsize = compute_dest_size (dest, 0);
3282 return check_sizes (OPT_Wstringop_overflow_, exp,
3283 size, /*maxlen=*/NULL_TREE, /*str=*/NULL_TREE, objsize);
3286 /* Expand a call EXP to the memcpy builtin.
3287 Return NULL_RTX if we failed, the caller should emit a normal call,
3288 otherwise try to get the result in TARGET, if convenient (and in
3289 mode MODE if that's convenient). */
3291 static rtx
3292 expand_builtin_memcpy (tree exp, rtx target)
3294 if (!validate_arglist (exp,
3295 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3296 return NULL_RTX;
3298 tree dest = CALL_EXPR_ARG (exp, 0);
3299 tree src = CALL_EXPR_ARG (exp, 1);
3300 tree len = CALL_EXPR_ARG (exp, 2);
3302 check_memop_sizes (exp, dest, len);
3304 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3307 /* Expand an instrumented call EXP to the memcpy builtin.
3308 Return NULL_RTX if we failed, the caller should emit a normal call,
3309 otherwise try to get the result in TARGET, if convenient (and in
3310 mode MODE if that's convenient). */
3312 static rtx
3313 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3315 if (!validate_arglist (exp,
3316 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3317 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3318 INTEGER_TYPE, VOID_TYPE))
3319 return NULL_RTX;
3320 else
3322 tree dest = CALL_EXPR_ARG (exp, 0);
3323 tree src = CALL_EXPR_ARG (exp, 2);
3324 tree len = CALL_EXPR_ARG (exp, 4);
3325 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3327 /* Return src bounds with the result. */
3328 if (res)
3330 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3331 expand_normal (CALL_EXPR_ARG (exp, 1)));
3332 res = chkp_join_splitted_slot (res, bnd);
3334 return res;
3338 /* Expand a call EXP to the mempcpy builtin.
3339 Return NULL_RTX if we failed; the caller should emit a normal call,
3340 otherwise try to get the result in TARGET, if convenient (and in
3341 mode MODE if that's convenient). If ENDP is 0 return the
3342 destination pointer, if ENDP is 1 return the end pointer ala
3343 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3344 stpcpy. */
3346 static rtx
3347 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3349 if (!validate_arglist (exp,
3350 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3351 return NULL_RTX;
3353 tree dest = CALL_EXPR_ARG (exp, 0);
3354 tree src = CALL_EXPR_ARG (exp, 1);
3355 tree len = CALL_EXPR_ARG (exp, 2);
3357 /* Avoid expanding mempcpy into memcpy when the call is determined
3358 to overflow the buffer. This also prevents the same overflow
3359 from being diagnosed again when expanding memcpy. */
3360 if (!check_memop_sizes (exp, dest, len))
3361 return NULL_RTX;
3363 return expand_builtin_mempcpy_args (dest, src, len,
3364 target, mode, /*endp=*/ 1,
3365 exp);
3368 /* Expand an instrumented call EXP to the mempcpy builtin.
3369 Return NULL_RTX if we failed, the caller should emit a normal call,
3370 otherwise try to get the result in TARGET, if convenient (and in
3371 mode MODE if that's convenient). */
3373 static rtx
3374 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3376 if (!validate_arglist (exp,
3377 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3378 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3379 INTEGER_TYPE, VOID_TYPE))
3380 return NULL_RTX;
3381 else
3383 tree dest = CALL_EXPR_ARG (exp, 0);
3384 tree src = CALL_EXPR_ARG (exp, 2);
3385 tree len = CALL_EXPR_ARG (exp, 4);
3386 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3387 mode, 1, exp);
3389 /* Return src bounds with the result. */
3390 if (res)
3392 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3393 expand_normal (CALL_EXPR_ARG (exp, 1)));
3394 res = chkp_join_splitted_slot (res, bnd);
3396 return res;
3400 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3401 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3402 so that this can also be called without constructing an actual CALL_EXPR.
3403 The other arguments and return value are the same as for
3404 expand_builtin_mempcpy. */
3406 static rtx
3407 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3408 rtx target, machine_mode mode, int endp,
3409 tree orig_exp)
3411 tree fndecl = get_callee_fndecl (orig_exp);
3413 /* If return value is ignored, transform mempcpy into memcpy. */
3414 if (target == const0_rtx
3415 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3416 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3418 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3419 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3420 dest, src, len);
3421 return expand_expr (result, target, mode, EXPAND_NORMAL);
3423 else if (target == const0_rtx
3424 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3426 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3427 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3428 dest, src, len);
3429 return expand_expr (result, target, mode, EXPAND_NORMAL);
3431 else
3433 const char *src_str;
3434 unsigned int src_align = get_pointer_alignment (src);
3435 unsigned int dest_align = get_pointer_alignment (dest);
3436 rtx dest_mem, src_mem, len_rtx;
3438 /* If either SRC or DEST is not a pointer type, don't do this
3439 operation in-line. */
3440 if (dest_align == 0 || src_align == 0)
3441 return NULL_RTX;
3443 /* If LEN is not constant, call the normal function. */
3444 if (! tree_fits_uhwi_p (len))
3445 return NULL_RTX;
3447 len_rtx = expand_normal (len);
3448 src_str = c_getstr (src);
3450 /* If SRC is a string constant and block move would be done
3451 by pieces, we can avoid loading the string from memory
3452 and only stored the computed constants. */
3453 if (src_str
3454 && CONST_INT_P (len_rtx)
3455 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3456 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3457 CONST_CAST (char *, src_str),
3458 dest_align, false))
3460 dest_mem = get_memory_rtx (dest, len);
3461 set_mem_align (dest_mem, dest_align);
3462 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3463 builtin_memcpy_read_str,
3464 CONST_CAST (char *, src_str),
3465 dest_align, false, endp);
3466 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3467 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3468 return dest_mem;
3471 if (CONST_INT_P (len_rtx)
3472 && can_move_by_pieces (INTVAL (len_rtx),
3473 MIN (dest_align, src_align)))
3475 dest_mem = get_memory_rtx (dest, len);
3476 set_mem_align (dest_mem, dest_align);
3477 src_mem = get_memory_rtx (src, len);
3478 set_mem_align (src_mem, src_align);
3479 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3480 MIN (dest_align, src_align), endp);
3481 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3482 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3483 return dest_mem;
3486 return NULL_RTX;
3490 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3491 we failed, the caller should emit a normal call, otherwise try to
3492 get the result in TARGET, if convenient. If ENDP is 0 return the
3493 destination pointer, if ENDP is 1 return the end pointer ala
3494 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3495 stpcpy. */
3497 static rtx
3498 expand_movstr (tree dest, tree src, rtx target, int endp)
3500 struct expand_operand ops[3];
3501 rtx dest_mem;
3502 rtx src_mem;
3504 if (!targetm.have_movstr ())
3505 return NULL_RTX;
3507 dest_mem = get_memory_rtx (dest, NULL);
3508 src_mem = get_memory_rtx (src, NULL);
3509 if (!endp)
3511 target = force_reg (Pmode, XEXP (dest_mem, 0));
3512 dest_mem = replace_equiv_address (dest_mem, target);
3515 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3516 create_fixed_operand (&ops[1], dest_mem);
3517 create_fixed_operand (&ops[2], src_mem);
3518 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3519 return NULL_RTX;
3521 if (endp && target != const0_rtx)
3523 target = ops[0].value;
3524 /* movstr is supposed to set end to the address of the NUL
3525 terminator. If the caller requested a mempcpy-like return value,
3526 adjust it. */
3527 if (endp == 1)
3529 rtx tem = plus_constant (GET_MODE (target),
3530 gen_lowpart (GET_MODE (target), target), 1);
3531 emit_move_insn (target, force_operand (tem, NULL_RTX));
3534 return target;
3537 /* Do some very basic size validation of a call to the strcpy builtin
3538 given by EXP. Return NULL_RTX to have the built-in expand to a call
3539 to the library function. */
3541 static rtx
3542 expand_builtin_strcat (tree exp, rtx)
3544 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3545 || !warn_stringop_overflow)
3546 return NULL_RTX;
3548 tree dest = CALL_EXPR_ARG (exp, 0);
3549 tree src = CALL_EXPR_ARG (exp, 1);
3551 /* There is no way here to determine the length of the string in
3552 the destination to which the SRC string is being appended so
3553 just diagnose cases when the souce string is longer than
3554 the destination object. */
3556 tree destsize = compute_dest_size (dest, warn_stringop_overflow - 1);
3558 check_sizes (OPT_Wstringop_overflow_,
3559 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3561 return NULL_RTX;
3564 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3565 NULL_RTX if we failed the caller should emit a normal call, otherwise
3566 try to get the result in TARGET, if convenient (and in mode MODE if that's
3567 convenient). */
3569 static rtx
3570 expand_builtin_strcpy (tree exp, rtx target)
3572 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3573 return NULL_RTX;
3575 tree dest = CALL_EXPR_ARG (exp, 0);
3576 tree src = CALL_EXPR_ARG (exp, 1);
3578 if (warn_stringop_overflow)
3580 tree destsize = compute_dest_size (dest, warn_stringop_overflow - 1);
3581 check_sizes (OPT_Wstringop_overflow_,
3582 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3585 return expand_builtin_strcpy_args (dest, src, target);
3588 /* Helper function to do the actual work for expand_builtin_strcpy. The
3589 arguments to the builtin_strcpy call DEST and SRC are broken out
3590 so that this can also be called without constructing an actual CALL_EXPR.
3591 The other arguments and return value are the same as for
3592 expand_builtin_strcpy. */
3594 static rtx
3595 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3597 return expand_movstr (dest, src, target, /*endp=*/0);
3600 /* Expand a call EXP to the stpcpy builtin.
3601 Return NULL_RTX if we failed the caller should emit a normal call,
3602 otherwise try to get the result in TARGET, if convenient (and in
3603 mode MODE if that's convenient). */
3605 static rtx
3606 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3608 tree dst, src;
3609 location_t loc = EXPR_LOCATION (exp);
3611 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3612 return NULL_RTX;
3614 dst = CALL_EXPR_ARG (exp, 0);
3615 src = CALL_EXPR_ARG (exp, 1);
3617 /* If return value is ignored, transform stpcpy into strcpy. */
3618 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3620 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3621 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3622 return expand_expr (result, target, mode, EXPAND_NORMAL);
3624 else
3626 tree len, lenp1;
3627 rtx ret;
3629 /* Ensure we get an actual string whose length can be evaluated at
3630 compile-time, not an expression containing a string. This is
3631 because the latter will potentially produce pessimized code
3632 when used to produce the return value. */
3633 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3634 return expand_movstr (dst, src, target, /*endp=*/2);
3636 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3637 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3638 target, mode, /*endp=*/2,
3639 exp);
3641 if (ret)
3642 return ret;
3644 if (TREE_CODE (len) == INTEGER_CST)
3646 rtx len_rtx = expand_normal (len);
3648 if (CONST_INT_P (len_rtx))
3650 ret = expand_builtin_strcpy_args (dst, src, target);
3652 if (ret)
3654 if (! target)
3656 if (mode != VOIDmode)
3657 target = gen_reg_rtx (mode);
3658 else
3659 target = gen_reg_rtx (GET_MODE (ret));
3661 if (GET_MODE (target) != GET_MODE (ret))
3662 ret = gen_lowpart (GET_MODE (target), ret);
3664 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3665 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3666 gcc_assert (ret);
3668 return target;
3673 return expand_movstr (dst, src, target, /*endp=*/2);
3677 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3678 bytes from constant string DATA + OFFSET and return it as target
3679 constant. */
3682 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3683 machine_mode mode)
3685 const char *str = (const char *) data;
3687 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3688 return const0_rtx;
3690 return c_readstr (str + offset, mode);
3693 /* Helper to check the sizes of sequences and the destination of calls
3694 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3695 success (no overflow or invalid sizes), false otherwise. */
3697 static bool
3698 check_strncat_sizes (tree exp, tree objsize)
3700 tree dest = CALL_EXPR_ARG (exp, 0);
3701 tree src = CALL_EXPR_ARG (exp, 1);
3702 tree maxlen = CALL_EXPR_ARG (exp, 2);
3704 /* Try to determine the range of lengths that the source expression
3705 refers to. */
3706 tree lenrange[2];
3707 get_range_strlen (src, lenrange);
3709 /* Try to verify that the destination is big enough for the shortest
3710 string. */
3712 if (!objsize && warn_stringop_overflow)
3714 /* If it hasn't been provided by __strncat_chk, try to determine
3715 the size of the destination object into which the source is
3716 being copied. */
3717 objsize = compute_dest_size (dest, warn_stringop_overflow - 1);
3720 /* Add one for the terminating nul. */
3721 tree srclen = (lenrange[0]
3722 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3723 size_one_node)
3724 : NULL_TREE);
3726 /* Strncat copies at most MAXLEN bytes and always appends the terminating
3727 nul so the specified upper bound should never be equal to (or greater
3728 than) the size of the destination. */
3729 if (tree_fits_uhwi_p (maxlen) && tree_fits_uhwi_p (objsize)
3730 && tree_int_cst_equal (objsize, maxlen))
3732 warning_at (EXPR_LOCATION (exp), OPT_Wstringop_overflow_,
3733 "specified bound %wu "
3734 "equals the size of the destination",
3735 tree_to_uhwi (maxlen));
3737 return false;
3740 if (!srclen
3741 || (maxlen && tree_fits_uhwi_p (maxlen)
3742 && tree_fits_uhwi_p (srclen)
3743 && tree_int_cst_lt (maxlen, srclen)))
3744 srclen = maxlen;
3746 /* The number of bytes to write is LEN but check_sizes will also
3747 check SRCLEN if LEN's value isn't known. */
3748 return check_sizes (OPT_Wstringop_overflow_,
3749 exp, /*size=*/NULL_TREE, maxlen, srclen, objsize);
3752 /* Similar to expand_builtin_strcat, do some very basic size validation
3753 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
3754 the built-in expand to a call to the library function. */
3756 static rtx
3757 expand_builtin_strncat (tree exp, rtx)
3759 if (!validate_arglist (exp,
3760 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3761 || !warn_stringop_overflow)
3762 return NULL_RTX;
3764 tree dest = CALL_EXPR_ARG (exp, 0);
3765 tree src = CALL_EXPR_ARG (exp, 1);
3766 /* The upper bound on the number of bytes to write. */
3767 tree maxlen = CALL_EXPR_ARG (exp, 2);
3768 /* The length of the source sequence. */
3769 tree slen = c_strlen (src, 1);
3771 /* Try to determine the range of lengths that the source expression
3772 refers to. */
3773 tree lenrange[2];
3774 if (slen)
3775 lenrange[0] = lenrange[1] = slen;
3776 else
3777 get_range_strlen (src, lenrange);
3779 /* Try to verify that the destination is big enough for the shortest
3780 string. First try to determine the size of the destination object
3781 into which the source is being copied. */
3782 tree destsize = compute_dest_size (dest, warn_stringop_overflow - 1);
3784 /* Add one for the terminating nul. */
3785 tree srclen = (lenrange[0]
3786 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3787 size_one_node)
3788 : NULL_TREE);
3790 /* Strncat copies at most MAXLEN bytes and always appends the terminating
3791 nul so the specified upper bound should never be equal to (or greater
3792 than) the size of the destination. */
3793 if (tree_fits_uhwi_p (maxlen) && tree_fits_uhwi_p (destsize)
3794 && tree_int_cst_equal (destsize, maxlen))
3796 warning_at (EXPR_LOCATION (exp), OPT_Wstringop_overflow_,
3797 "specified bound %wu "
3798 "equals the size of the destination",
3799 tree_to_uhwi (maxlen));
3801 return NULL_RTX;
3804 if (!srclen
3805 || (maxlen && tree_fits_uhwi_p (maxlen)
3806 && tree_fits_uhwi_p (srclen)
3807 && tree_int_cst_lt (maxlen, srclen)))
3808 srclen = maxlen;
3810 /* The number of bytes to write is LEN but check_sizes will also
3811 check SRCLEN if LEN's value isn't known. */
3812 check_sizes (OPT_Wstringop_overflow_,
3813 exp, /*size=*/NULL_TREE, maxlen, srclen, destsize);
3815 return NULL_RTX;
3818 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3819 NULL_RTX if we failed the caller should emit a normal call. */
3821 static rtx
3822 expand_builtin_strncpy (tree exp, rtx target)
3824 location_t loc = EXPR_LOCATION (exp);
3826 if (validate_arglist (exp,
3827 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3829 tree dest = CALL_EXPR_ARG (exp, 0);
3830 tree src = CALL_EXPR_ARG (exp, 1);
3831 /* The number of bytes to write (not the maximum). */
3832 tree len = CALL_EXPR_ARG (exp, 2);
3833 /* The length of the source sequence. */
3834 tree slen = c_strlen (src, 1);
3836 if (warn_stringop_overflow)
3838 /* Try to determine the range of lengths that the source expression
3839 refers to. */
3840 tree lenrange[2];
3841 if (slen)
3842 lenrange[0] = lenrange[1] = slen;
3843 else
3845 get_range_strlen (src, lenrange);
3846 slen = lenrange[0];
3849 tree destsize = compute_dest_size (dest,
3850 warn_stringop_overflow - 1);
3852 /* The number of bytes to write is LEN but check_sizes will also
3853 check SLEN if LEN's value isn't known. */
3854 check_sizes (OPT_Wstringop_overflow_,
3855 exp, len, /*maxlen=*/NULL_TREE, slen, destsize);
3858 /* We must be passed a constant len and src parameter. */
3859 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3860 return NULL_RTX;
3862 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3864 /* We're required to pad with trailing zeros if the requested
3865 len is greater than strlen(s2)+1. In that case try to
3866 use store_by_pieces, if it fails, punt. */
3867 if (tree_int_cst_lt (slen, len))
3869 unsigned int dest_align = get_pointer_alignment (dest);
3870 const char *p = c_getstr (src);
3871 rtx dest_mem;
3873 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3874 || !can_store_by_pieces (tree_to_uhwi (len),
3875 builtin_strncpy_read_str,
3876 CONST_CAST (char *, p),
3877 dest_align, false))
3878 return NULL_RTX;
3880 dest_mem = get_memory_rtx (dest, len);
3881 store_by_pieces (dest_mem, tree_to_uhwi (len),
3882 builtin_strncpy_read_str,
3883 CONST_CAST (char *, p), dest_align, false, 0);
3884 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3885 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3886 return dest_mem;
3889 return NULL_RTX;
3892 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3893 bytes from constant string DATA + OFFSET and return it as target
3894 constant. */
3897 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3898 machine_mode mode)
3900 const char *c = (const char *) data;
3901 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3903 memset (p, *c, GET_MODE_SIZE (mode));
3905 return c_readstr (p, mode);
3908 /* Callback routine for store_by_pieces. Return the RTL of a register
3909 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3910 char value given in the RTL register data. For example, if mode is
3911 4 bytes wide, return the RTL for 0x01010101*data. */
3913 static rtx
3914 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3915 machine_mode mode)
3917 rtx target, coeff;
3918 size_t size;
3919 char *p;
3921 size = GET_MODE_SIZE (mode);
3922 if (size == 1)
3923 return (rtx) data;
3925 p = XALLOCAVEC (char, size);
3926 memset (p, 1, size);
3927 coeff = c_readstr (p, mode);
3929 target = convert_to_mode (mode, (rtx) data, 1);
3930 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3931 return force_reg (mode, target);
3934 /* Expand expression EXP, which is a call to the memset builtin. Return
3935 NULL_RTX if we failed the caller should emit a normal call, otherwise
3936 try to get the result in TARGET, if convenient (and in mode MODE if that's
3937 convenient). */
3939 static rtx
3940 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3942 if (!validate_arglist (exp,
3943 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3944 return NULL_RTX;
3946 tree dest = CALL_EXPR_ARG (exp, 0);
3947 tree val = CALL_EXPR_ARG (exp, 1);
3948 tree len = CALL_EXPR_ARG (exp, 2);
3950 check_memop_sizes (exp, dest, len);
3952 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3955 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3956 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3957 try to get the result in TARGET, if convenient (and in mode MODE if that's
3958 convenient). */
3960 static rtx
3961 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3963 if (!validate_arglist (exp,
3964 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3965 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3966 return NULL_RTX;
3967 else
3969 tree dest = CALL_EXPR_ARG (exp, 0);
3970 tree val = CALL_EXPR_ARG (exp, 2);
3971 tree len = CALL_EXPR_ARG (exp, 3);
3972 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3974 /* Return src bounds with the result. */
3975 if (res)
3977 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3978 expand_normal (CALL_EXPR_ARG (exp, 1)));
3979 res = chkp_join_splitted_slot (res, bnd);
3981 return res;
3985 /* Helper function to do the actual work for expand_builtin_memset. The
3986 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3987 so that this can also be called without constructing an actual CALL_EXPR.
3988 The other arguments and return value are the same as for
3989 expand_builtin_memset. */
3991 static rtx
3992 expand_builtin_memset_args (tree dest, tree val, tree len,
3993 rtx target, machine_mode mode, tree orig_exp)
3995 tree fndecl, fn;
3996 enum built_in_function fcode;
3997 machine_mode val_mode;
3998 char c;
3999 unsigned int dest_align;
4000 rtx dest_mem, dest_addr, len_rtx;
4001 HOST_WIDE_INT expected_size = -1;
4002 unsigned int expected_align = 0;
4003 unsigned HOST_WIDE_INT min_size;
4004 unsigned HOST_WIDE_INT max_size;
4005 unsigned HOST_WIDE_INT probable_max_size;
4007 dest_align = get_pointer_alignment (dest);
4009 /* If DEST is not a pointer type, don't do this operation in-line. */
4010 if (dest_align == 0)
4011 return NULL_RTX;
4013 if (currently_expanding_gimple_stmt)
4014 stringop_block_profile (currently_expanding_gimple_stmt,
4015 &expected_align, &expected_size);
4017 if (expected_align < dest_align)
4018 expected_align = dest_align;
4020 /* If the LEN parameter is zero, return DEST. */
4021 if (integer_zerop (len))
4023 /* Evaluate and ignore VAL in case it has side-effects. */
4024 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4025 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4028 /* Stabilize the arguments in case we fail. */
4029 dest = builtin_save_expr (dest);
4030 val = builtin_save_expr (val);
4031 len = builtin_save_expr (len);
4033 len_rtx = expand_normal (len);
4034 determine_block_size (len, len_rtx, &min_size, &max_size,
4035 &probable_max_size);
4036 dest_mem = get_memory_rtx (dest, len);
4037 val_mode = TYPE_MODE (unsigned_char_type_node);
4039 if (TREE_CODE (val) != INTEGER_CST)
4041 rtx val_rtx;
4043 val_rtx = expand_normal (val);
4044 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4046 /* Assume that we can memset by pieces if we can store
4047 * the coefficients by pieces (in the required modes).
4048 * We can't pass builtin_memset_gen_str as that emits RTL. */
4049 c = 1;
4050 if (tree_fits_uhwi_p (len)
4051 && can_store_by_pieces (tree_to_uhwi (len),
4052 builtin_memset_read_str, &c, dest_align,
4053 true))
4055 val_rtx = force_reg (val_mode, val_rtx);
4056 store_by_pieces (dest_mem, tree_to_uhwi (len),
4057 builtin_memset_gen_str, val_rtx, dest_align,
4058 true, 0);
4060 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4061 dest_align, expected_align,
4062 expected_size, min_size, max_size,
4063 probable_max_size))
4064 goto do_libcall;
4066 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4067 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4068 return dest_mem;
4071 if (target_char_cast (val, &c))
4072 goto do_libcall;
4074 if (c)
4076 if (tree_fits_uhwi_p (len)
4077 && can_store_by_pieces (tree_to_uhwi (len),
4078 builtin_memset_read_str, &c, dest_align,
4079 true))
4080 store_by_pieces (dest_mem, tree_to_uhwi (len),
4081 builtin_memset_read_str, &c, dest_align, true, 0);
4082 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4083 gen_int_mode (c, val_mode),
4084 dest_align, expected_align,
4085 expected_size, min_size, max_size,
4086 probable_max_size))
4087 goto do_libcall;
4089 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4090 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4091 return dest_mem;
4094 set_mem_align (dest_mem, dest_align);
4095 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4096 CALL_EXPR_TAILCALL (orig_exp)
4097 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4098 expected_align, expected_size,
4099 min_size, max_size,
4100 probable_max_size);
4102 if (dest_addr == 0)
4104 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4105 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4108 return dest_addr;
4110 do_libcall:
4111 fndecl = get_callee_fndecl (orig_exp);
4112 fcode = DECL_FUNCTION_CODE (fndecl);
4113 if (fcode == BUILT_IN_MEMSET
4114 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
4115 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4116 dest, val, len);
4117 else if (fcode == BUILT_IN_BZERO)
4118 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4119 dest, len);
4120 else
4121 gcc_unreachable ();
4122 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4123 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4124 return expand_call (fn, target, target == const0_rtx);
4127 /* Expand expression EXP, which is a call to the bzero builtin. Return
4128 NULL_RTX if we failed the caller should emit a normal call. */
4130 static rtx
4131 expand_builtin_bzero (tree exp)
4133 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4134 return NULL_RTX;
4136 tree dest = CALL_EXPR_ARG (exp, 0);
4137 tree size = CALL_EXPR_ARG (exp, 1);
4139 check_memop_sizes (exp, dest, size);
4141 /* New argument list transforming bzero(ptr x, int y) to
4142 memset(ptr x, int 0, size_t y). This is done this way
4143 so that if it isn't expanded inline, we fallback to
4144 calling bzero instead of memset. */
4146 location_t loc = EXPR_LOCATION (exp);
4148 return expand_builtin_memset_args (dest, integer_zero_node,
4149 fold_convert_loc (loc,
4150 size_type_node, size),
4151 const0_rtx, VOIDmode, exp);
4154 /* Try to expand cmpstr operation ICODE with the given operands.
4155 Return the result rtx on success, otherwise return null. */
4157 static rtx
4158 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4159 HOST_WIDE_INT align)
4161 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4163 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4164 target = NULL_RTX;
4166 struct expand_operand ops[4];
4167 create_output_operand (&ops[0], target, insn_mode);
4168 create_fixed_operand (&ops[1], arg1_rtx);
4169 create_fixed_operand (&ops[2], arg2_rtx);
4170 create_integer_operand (&ops[3], align);
4171 if (maybe_expand_insn (icode, 4, ops))
4172 return ops[0].value;
4173 return NULL_RTX;
4176 /* Expand expression EXP, which is a call to the memcmp built-in function.
4177 Return NULL_RTX if we failed and the caller should emit a normal call,
4178 otherwise try to get the result in TARGET, if convenient.
4179 RESULT_EQ is true if we can relax the returned value to be either zero
4180 or nonzero, without caring about the sign. */
4182 static rtx
4183 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4185 if (!validate_arglist (exp,
4186 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4187 return NULL_RTX;
4189 tree arg1 = CALL_EXPR_ARG (exp, 0);
4190 tree arg2 = CALL_EXPR_ARG (exp, 1);
4191 tree len = CALL_EXPR_ARG (exp, 2);
4192 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4193 location_t loc = EXPR_LOCATION (exp);
4195 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4196 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4198 /* If we don't have POINTER_TYPE, call the function. */
4199 if (arg1_align == 0 || arg2_align == 0)
4200 return NULL_RTX;
4202 rtx arg1_rtx = get_memory_rtx (arg1, len);
4203 rtx arg2_rtx = get_memory_rtx (arg2, len);
4204 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4206 /* Set MEM_SIZE as appropriate. */
4207 if (CONST_INT_P (len_rtx))
4209 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4210 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4213 by_pieces_constfn constfn = NULL;
4215 const char *src_str = c_getstr (arg2);
4216 if (result_eq && src_str == NULL)
4218 src_str = c_getstr (arg1);
4219 if (src_str != NULL)
4220 std::swap (arg1_rtx, arg2_rtx);
4223 /* If SRC is a string constant and block move would be done
4224 by pieces, we can avoid loading the string from memory
4225 and only stored the computed constants. */
4226 if (src_str
4227 && CONST_INT_P (len_rtx)
4228 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4229 constfn = builtin_memcpy_read_str;
4231 rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4232 TREE_TYPE (len), target,
4233 result_eq, constfn,
4234 CONST_CAST (char *, src_str));
4236 if (result)
4238 /* Return the value in the proper mode for this function. */
4239 if (GET_MODE (result) == mode)
4240 return result;
4242 if (target != 0)
4244 convert_move (target, result, 0);
4245 return target;
4248 return convert_to_mode (mode, result, 0);
4251 return NULL_RTX;
4254 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4255 if we failed the caller should emit a normal call, otherwise try to get
4256 the result in TARGET, if convenient. */
4258 static rtx
4259 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4261 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4262 return NULL_RTX;
4264 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4265 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4266 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
4268 rtx arg1_rtx, arg2_rtx;
4269 tree fndecl, fn;
4270 tree arg1 = CALL_EXPR_ARG (exp, 0);
4271 tree arg2 = CALL_EXPR_ARG (exp, 1);
4272 rtx result = NULL_RTX;
4274 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4275 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4277 /* If we don't have POINTER_TYPE, call the function. */
4278 if (arg1_align == 0 || arg2_align == 0)
4279 return NULL_RTX;
4281 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4282 arg1 = builtin_save_expr (arg1);
4283 arg2 = builtin_save_expr (arg2);
4285 arg1_rtx = get_memory_rtx (arg1, NULL);
4286 arg2_rtx = get_memory_rtx (arg2, NULL);
4288 /* Try to call cmpstrsi. */
4289 if (cmpstr_icode != CODE_FOR_nothing)
4290 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4291 MIN (arg1_align, arg2_align));
4293 /* Try to determine at least one length and call cmpstrnsi. */
4294 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4296 tree len;
4297 rtx arg3_rtx;
4299 tree len1 = c_strlen (arg1, 1);
4300 tree len2 = c_strlen (arg2, 1);
4302 if (len1)
4303 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4304 if (len2)
4305 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4307 /* If we don't have a constant length for the first, use the length
4308 of the second, if we know it. We don't require a constant for
4309 this case; some cost analysis could be done if both are available
4310 but neither is constant. For now, assume they're equally cheap,
4311 unless one has side effects. If both strings have constant lengths,
4312 use the smaller. */
4314 if (!len1)
4315 len = len2;
4316 else if (!len2)
4317 len = len1;
4318 else if (TREE_SIDE_EFFECTS (len1))
4319 len = len2;
4320 else if (TREE_SIDE_EFFECTS (len2))
4321 len = len1;
4322 else if (TREE_CODE (len1) != INTEGER_CST)
4323 len = len2;
4324 else if (TREE_CODE (len2) != INTEGER_CST)
4325 len = len1;
4326 else if (tree_int_cst_lt (len1, len2))
4327 len = len1;
4328 else
4329 len = len2;
4331 /* If both arguments have side effects, we cannot optimize. */
4332 if (len && !TREE_SIDE_EFFECTS (len))
4334 arg3_rtx = expand_normal (len);
4335 result = expand_cmpstrn_or_cmpmem
4336 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4337 arg3_rtx, MIN (arg1_align, arg2_align));
4341 if (result)
4343 /* Return the value in the proper mode for this function. */
4344 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4345 if (GET_MODE (result) == mode)
4346 return result;
4347 if (target == 0)
4348 return convert_to_mode (mode, result, 0);
4349 convert_move (target, result, 0);
4350 return target;
4353 /* Expand the library call ourselves using a stabilized argument
4354 list to avoid re-evaluating the function's arguments twice. */
4355 fndecl = get_callee_fndecl (exp);
4356 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4357 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4358 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4359 return expand_call (fn, target, target == const0_rtx);
4361 return NULL_RTX;
4364 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4365 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4366 the result in TARGET, if convenient. */
4368 static rtx
4369 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4370 ATTRIBUTE_UNUSED machine_mode mode)
4372 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4374 if (!validate_arglist (exp,
4375 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4376 return NULL_RTX;
4378 /* If c_strlen can determine an expression for one of the string
4379 lengths, and it doesn't have side effects, then emit cmpstrnsi
4380 using length MIN(strlen(string)+1, arg3). */
4381 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4382 if (cmpstrn_icode != CODE_FOR_nothing)
4384 tree len, len1, len2, len3;
4385 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4386 rtx result;
4387 tree fndecl, fn;
4388 tree arg1 = CALL_EXPR_ARG (exp, 0);
4389 tree arg2 = CALL_EXPR_ARG (exp, 1);
4390 tree arg3 = CALL_EXPR_ARG (exp, 2);
4392 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4393 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4395 len1 = c_strlen (arg1, 1);
4396 len2 = c_strlen (arg2, 1);
4398 if (len1)
4399 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4400 if (len2)
4401 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4403 len3 = fold_convert_loc (loc, sizetype, arg3);
4405 /* If we don't have a constant length for the first, use the length
4406 of the second, if we know it. If neither string is constant length,
4407 use the given length argument. We don't require a constant for
4408 this case; some cost analysis could be done if both are available
4409 but neither is constant. For now, assume they're equally cheap,
4410 unless one has side effects. If both strings have constant lengths,
4411 use the smaller. */
4413 if (!len1 && !len2)
4414 len = len3;
4415 else if (!len1)
4416 len = len2;
4417 else if (!len2)
4418 len = len1;
4419 else if (TREE_SIDE_EFFECTS (len1))
4420 len = len2;
4421 else if (TREE_SIDE_EFFECTS (len2))
4422 len = len1;
4423 else if (TREE_CODE (len1) != INTEGER_CST)
4424 len = len2;
4425 else if (TREE_CODE (len2) != INTEGER_CST)
4426 len = len1;
4427 else if (tree_int_cst_lt (len1, len2))
4428 len = len1;
4429 else
4430 len = len2;
4432 /* If we are not using the given length, we must incorporate it here.
4433 The actual new length parameter will be MIN(len,arg3) in this case. */
4434 if (len != len3)
4435 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4436 arg1_rtx = get_memory_rtx (arg1, len);
4437 arg2_rtx = get_memory_rtx (arg2, len);
4438 arg3_rtx = expand_normal (len);
4439 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4440 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4441 MIN (arg1_align, arg2_align));
4442 if (result)
4444 /* Return the value in the proper mode for this function. */
4445 mode = TYPE_MODE (TREE_TYPE (exp));
4446 if (GET_MODE (result) == mode)
4447 return result;
4448 if (target == 0)
4449 return convert_to_mode (mode, result, 0);
4450 convert_move (target, result, 0);
4451 return target;
4454 /* Expand the library call ourselves using a stabilized argument
4455 list to avoid re-evaluating the function's arguments twice. */
4456 fndecl = get_callee_fndecl (exp);
4457 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4458 arg1, arg2, len);
4459 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4460 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4461 return expand_call (fn, target, target == const0_rtx);
4463 return NULL_RTX;
4466 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4467 if that's convenient. */
4470 expand_builtin_saveregs (void)
4472 rtx val;
4473 rtx_insn *seq;
4475 /* Don't do __builtin_saveregs more than once in a function.
4476 Save the result of the first call and reuse it. */
4477 if (saveregs_value != 0)
4478 return saveregs_value;
4480 /* When this function is called, it means that registers must be
4481 saved on entry to this function. So we migrate the call to the
4482 first insn of this function. */
4484 start_sequence ();
4486 /* Do whatever the machine needs done in this case. */
4487 val = targetm.calls.expand_builtin_saveregs ();
4489 seq = get_insns ();
4490 end_sequence ();
4492 saveregs_value = val;
4494 /* Put the insns after the NOTE that starts the function. If this
4495 is inside a start_sequence, make the outer-level insn chain current, so
4496 the code is placed at the start of the function. */
4497 push_topmost_sequence ();
4498 emit_insn_after (seq, entry_of_function ());
4499 pop_topmost_sequence ();
4501 return val;
4504 /* Expand a call to __builtin_next_arg. */
4506 static rtx
4507 expand_builtin_next_arg (void)
4509 /* Checking arguments is already done in fold_builtin_next_arg
4510 that must be called before this function. */
4511 return expand_binop (ptr_mode, add_optab,
4512 crtl->args.internal_arg_pointer,
4513 crtl->args.arg_offset_rtx,
4514 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4517 /* Make it easier for the backends by protecting the valist argument
4518 from multiple evaluations. */
4520 static tree
4521 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4523 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4525 /* The current way of determining the type of valist is completely
4526 bogus. We should have the information on the va builtin instead. */
4527 if (!vatype)
4528 vatype = targetm.fn_abi_va_list (cfun->decl);
4530 if (TREE_CODE (vatype) == ARRAY_TYPE)
4532 if (TREE_SIDE_EFFECTS (valist))
4533 valist = save_expr (valist);
4535 /* For this case, the backends will be expecting a pointer to
4536 vatype, but it's possible we've actually been given an array
4537 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4538 So fix it. */
4539 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4541 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4542 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4545 else
4547 tree pt = build_pointer_type (vatype);
4549 if (! needs_lvalue)
4551 if (! TREE_SIDE_EFFECTS (valist))
4552 return valist;
4554 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4555 TREE_SIDE_EFFECTS (valist) = 1;
4558 if (TREE_SIDE_EFFECTS (valist))
4559 valist = save_expr (valist);
4560 valist = fold_build2_loc (loc, MEM_REF,
4561 vatype, valist, build_int_cst (pt, 0));
4564 return valist;
4567 /* The "standard" definition of va_list is void*. */
4569 tree
4570 std_build_builtin_va_list (void)
4572 return ptr_type_node;
4575 /* The "standard" abi va_list is va_list_type_node. */
4577 tree
4578 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4580 return va_list_type_node;
4583 /* The "standard" type of va_list is va_list_type_node. */
4585 tree
4586 std_canonical_va_list_type (tree type)
4588 tree wtype, htype;
4590 wtype = va_list_type_node;
4591 htype = type;
4593 if (TREE_CODE (wtype) == ARRAY_TYPE)
4595 /* If va_list is an array type, the argument may have decayed
4596 to a pointer type, e.g. by being passed to another function.
4597 In that case, unwrap both types so that we can compare the
4598 underlying records. */
4599 if (TREE_CODE (htype) == ARRAY_TYPE
4600 || POINTER_TYPE_P (htype))
4602 wtype = TREE_TYPE (wtype);
4603 htype = TREE_TYPE (htype);
4606 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4607 return va_list_type_node;
4609 return NULL_TREE;
4612 /* The "standard" implementation of va_start: just assign `nextarg' to
4613 the variable. */
4615 void
4616 std_expand_builtin_va_start (tree valist, rtx nextarg)
4618 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4619 convert_move (va_r, nextarg, 0);
4621 /* We do not have any valid bounds for the pointer, so
4622 just store zero bounds for it. */
4623 if (chkp_function_instrumented_p (current_function_decl))
4624 chkp_expand_bounds_reset_for_mem (valist,
4625 make_tree (TREE_TYPE (valist),
4626 nextarg));
4629 /* Expand EXP, a call to __builtin_va_start. */
4631 static rtx
4632 expand_builtin_va_start (tree exp)
4634 rtx nextarg;
4635 tree valist;
4636 location_t loc = EXPR_LOCATION (exp);
4638 if (call_expr_nargs (exp) < 2)
4640 error_at (loc, "too few arguments to function %<va_start%>");
4641 return const0_rtx;
4644 if (fold_builtin_next_arg (exp, true))
4645 return const0_rtx;
4647 nextarg = expand_builtin_next_arg ();
4648 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4650 if (targetm.expand_builtin_va_start)
4651 targetm.expand_builtin_va_start (valist, nextarg);
4652 else
4653 std_expand_builtin_va_start (valist, nextarg);
4655 return const0_rtx;
4658 /* Expand EXP, a call to __builtin_va_end. */
4660 static rtx
4661 expand_builtin_va_end (tree exp)
4663 tree valist = CALL_EXPR_ARG (exp, 0);
4665 /* Evaluate for side effects, if needed. I hate macros that don't
4666 do that. */
4667 if (TREE_SIDE_EFFECTS (valist))
4668 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4670 return const0_rtx;
4673 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4674 builtin rather than just as an assignment in stdarg.h because of the
4675 nastiness of array-type va_list types. */
4677 static rtx
4678 expand_builtin_va_copy (tree exp)
4680 tree dst, src, t;
4681 location_t loc = EXPR_LOCATION (exp);
4683 dst = CALL_EXPR_ARG (exp, 0);
4684 src = CALL_EXPR_ARG (exp, 1);
4686 dst = stabilize_va_list_loc (loc, dst, 1);
4687 src = stabilize_va_list_loc (loc, src, 0);
4689 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4691 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4693 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4694 TREE_SIDE_EFFECTS (t) = 1;
4695 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4697 else
4699 rtx dstb, srcb, size;
4701 /* Evaluate to pointers. */
4702 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4703 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4704 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4705 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4707 dstb = convert_memory_address (Pmode, dstb);
4708 srcb = convert_memory_address (Pmode, srcb);
4710 /* "Dereference" to BLKmode memories. */
4711 dstb = gen_rtx_MEM (BLKmode, dstb);
4712 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4713 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4714 srcb = gen_rtx_MEM (BLKmode, srcb);
4715 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4716 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4718 /* Copy. */
4719 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4722 return const0_rtx;
4725 /* Expand a call to one of the builtin functions __builtin_frame_address or
4726 __builtin_return_address. */
4728 static rtx
4729 expand_builtin_frame_address (tree fndecl, tree exp)
4731 /* The argument must be a nonnegative integer constant.
4732 It counts the number of frames to scan up the stack.
4733 The value is either the frame pointer value or the return
4734 address saved in that frame. */
4735 if (call_expr_nargs (exp) == 0)
4736 /* Warning about missing arg was already issued. */
4737 return const0_rtx;
4738 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4740 error ("invalid argument to %qD", fndecl);
4741 return const0_rtx;
4743 else
4745 /* Number of frames to scan up the stack. */
4746 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4748 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4750 /* Some ports cannot access arbitrary stack frames. */
4751 if (tem == NULL)
4753 warning (0, "unsupported argument to %qD", fndecl);
4754 return const0_rtx;
4757 if (count)
4759 /* Warn since no effort is made to ensure that any frame
4760 beyond the current one exists or can be safely reached. */
4761 warning (OPT_Wframe_address, "calling %qD with "
4762 "a nonzero argument is unsafe", fndecl);
4765 /* For __builtin_frame_address, return what we've got. */
4766 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4767 return tem;
4769 if (!REG_P (tem)
4770 && ! CONSTANT_P (tem))
4771 tem = copy_addr_to_reg (tem);
4772 return tem;
4776 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4777 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4778 is the same as for allocate_dynamic_stack_space. */
4780 static rtx
4781 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4783 rtx op0;
4784 rtx result;
4785 unsigned int align;
4786 tree fndecl = get_callee_fndecl (exp);
4787 bool alloca_with_align = (DECL_FUNCTION_CODE (fndecl)
4788 == BUILT_IN_ALLOCA_WITH_ALIGN);
4790 bool valid_arglist
4791 = (alloca_with_align
4792 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4793 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4795 if (!valid_arglist)
4796 return NULL_RTX;
4798 if ((alloca_with_align && !warn_vla_limit)
4799 || (!alloca_with_align && !warn_alloca_limit))
4801 /* -Walloca-larger-than and -Wvla-larger-than settings override
4802 the more general -Walloc-size-larger-than so unless either of
4803 the former options is specified check the alloca arguments for
4804 overflow. */
4805 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
4806 int idx[] = { 0, -1 };
4807 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
4810 /* Compute the argument. */
4811 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4813 /* Compute the alignment. */
4814 align = (alloca_with_align
4815 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4816 : BIGGEST_ALIGNMENT);
4818 /* Allocate the desired space. */
4819 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4820 result = convert_memory_address (ptr_mode, result);
4822 return result;
4825 /* Expand a call to bswap builtin in EXP.
4826 Return NULL_RTX if a normal call should be emitted rather than expanding the
4827 function in-line. If convenient, the result should be placed in TARGET.
4828 SUBTARGET may be used as the target for computing one of EXP's operands. */
4830 static rtx
4831 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4832 rtx subtarget)
4834 tree arg;
4835 rtx op0;
4837 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4838 return NULL_RTX;
4840 arg = CALL_EXPR_ARG (exp, 0);
4841 op0 = expand_expr (arg,
4842 subtarget && GET_MODE (subtarget) == target_mode
4843 ? subtarget : NULL_RTX,
4844 target_mode, EXPAND_NORMAL);
4845 if (GET_MODE (op0) != target_mode)
4846 op0 = convert_to_mode (target_mode, op0, 1);
4848 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4850 gcc_assert (target);
4852 return convert_to_mode (target_mode, target, 1);
4855 /* Expand a call to a unary builtin in EXP.
4856 Return NULL_RTX if a normal call should be emitted rather than expanding the
4857 function in-line. If convenient, the result should be placed in TARGET.
4858 SUBTARGET may be used as the target for computing one of EXP's operands. */
4860 static rtx
4861 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4862 rtx subtarget, optab op_optab)
4864 rtx op0;
4866 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4867 return NULL_RTX;
4869 /* Compute the argument. */
4870 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4871 (subtarget
4872 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4873 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4874 VOIDmode, EXPAND_NORMAL);
4875 /* Compute op, into TARGET if possible.
4876 Set TARGET to wherever the result comes back. */
4877 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4878 op_optab, op0, target, op_optab != clrsb_optab);
4879 gcc_assert (target);
4881 return convert_to_mode (target_mode, target, 0);
4884 /* Expand a call to __builtin_expect. We just return our argument
4885 as the builtin_expect semantic should've been already executed by
4886 tree branch prediction pass. */
4888 static rtx
4889 expand_builtin_expect (tree exp, rtx target)
4891 tree arg;
4893 if (call_expr_nargs (exp) < 2)
4894 return const0_rtx;
4895 arg = CALL_EXPR_ARG (exp, 0);
4897 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4898 /* When guessing was done, the hints should be already stripped away. */
4899 gcc_assert (!flag_guess_branch_prob
4900 || optimize == 0 || seen_error ());
4901 return target;
4904 /* Expand a call to __builtin_assume_aligned. We just return our first
4905 argument as the builtin_assume_aligned semantic should've been already
4906 executed by CCP. */
4908 static rtx
4909 expand_builtin_assume_aligned (tree exp, rtx target)
4911 if (call_expr_nargs (exp) < 2)
4912 return const0_rtx;
4913 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4914 EXPAND_NORMAL);
4915 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4916 && (call_expr_nargs (exp) < 3
4917 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4918 return target;
4921 void
4922 expand_builtin_trap (void)
4924 if (targetm.have_trap ())
4926 rtx_insn *insn = emit_insn (targetm.gen_trap ());
4927 /* For trap insns when not accumulating outgoing args force
4928 REG_ARGS_SIZE note to prevent crossjumping of calls with
4929 different args sizes. */
4930 if (!ACCUMULATE_OUTGOING_ARGS)
4931 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4933 else
4935 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
4936 tree call_expr = build_call_expr (fn, 0);
4937 expand_call (call_expr, NULL_RTX, false);
4940 emit_barrier ();
4943 /* Expand a call to __builtin_unreachable. We do nothing except emit
4944 a barrier saying that control flow will not pass here.
4946 It is the responsibility of the program being compiled to ensure
4947 that control flow does never reach __builtin_unreachable. */
4948 static void
4949 expand_builtin_unreachable (void)
4951 emit_barrier ();
4954 /* Expand EXP, a call to fabs, fabsf or fabsl.
4955 Return NULL_RTX if a normal call should be emitted rather than expanding
4956 the function inline. If convenient, the result should be placed
4957 in TARGET. SUBTARGET may be used as the target for computing
4958 the operand. */
4960 static rtx
4961 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4963 machine_mode mode;
4964 tree arg;
4965 rtx op0;
4967 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4968 return NULL_RTX;
4970 arg = CALL_EXPR_ARG (exp, 0);
4971 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4972 mode = TYPE_MODE (TREE_TYPE (arg));
4973 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4974 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4977 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4978 Return NULL is a normal call should be emitted rather than expanding the
4979 function inline. If convenient, the result should be placed in TARGET.
4980 SUBTARGET may be used as the target for computing the operand. */
4982 static rtx
4983 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4985 rtx op0, op1;
4986 tree arg;
4988 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4989 return NULL_RTX;
4991 arg = CALL_EXPR_ARG (exp, 0);
4992 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4994 arg = CALL_EXPR_ARG (exp, 1);
4995 op1 = expand_normal (arg);
4997 return expand_copysign (op0, op1, target);
5000 /* Expand a call to __builtin___clear_cache. */
5002 static rtx
5003 expand_builtin___clear_cache (tree exp)
5005 if (!targetm.code_for_clear_cache)
5007 #ifdef CLEAR_INSN_CACHE
5008 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5009 does something. Just do the default expansion to a call to
5010 __clear_cache(). */
5011 return NULL_RTX;
5012 #else
5013 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5014 does nothing. There is no need to call it. Do nothing. */
5015 return const0_rtx;
5016 #endif /* CLEAR_INSN_CACHE */
5019 /* We have a "clear_cache" insn, and it will handle everything. */
5020 tree begin, end;
5021 rtx begin_rtx, end_rtx;
5023 /* We must not expand to a library call. If we did, any
5024 fallback library function in libgcc that might contain a call to
5025 __builtin___clear_cache() would recurse infinitely. */
5026 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5028 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5029 return const0_rtx;
5032 if (targetm.have_clear_cache ())
5034 struct expand_operand ops[2];
5036 begin = CALL_EXPR_ARG (exp, 0);
5037 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5039 end = CALL_EXPR_ARG (exp, 1);
5040 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5042 create_address_operand (&ops[0], begin_rtx);
5043 create_address_operand (&ops[1], end_rtx);
5044 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5045 return const0_rtx;
5047 return const0_rtx;
5050 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5052 static rtx
5053 round_trampoline_addr (rtx tramp)
5055 rtx temp, addend, mask;
5057 /* If we don't need too much alignment, we'll have been guaranteed
5058 proper alignment by get_trampoline_type. */
5059 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5060 return tramp;
5062 /* Round address up to desired boundary. */
5063 temp = gen_reg_rtx (Pmode);
5064 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5065 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5067 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5068 temp, 0, OPTAB_LIB_WIDEN);
5069 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5070 temp, 0, OPTAB_LIB_WIDEN);
5072 return tramp;
5075 static rtx
5076 expand_builtin_init_trampoline (tree exp, bool onstack)
5078 tree t_tramp, t_func, t_chain;
5079 rtx m_tramp, r_tramp, r_chain, tmp;
5081 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5082 POINTER_TYPE, VOID_TYPE))
5083 return NULL_RTX;
5085 t_tramp = CALL_EXPR_ARG (exp, 0);
5086 t_func = CALL_EXPR_ARG (exp, 1);
5087 t_chain = CALL_EXPR_ARG (exp, 2);
5089 r_tramp = expand_normal (t_tramp);
5090 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5091 MEM_NOTRAP_P (m_tramp) = 1;
5093 /* If ONSTACK, the TRAMP argument should be the address of a field
5094 within the local function's FRAME decl. Either way, let's see if
5095 we can fill in the MEM_ATTRs for this memory. */
5096 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5097 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5099 /* Creator of a heap trampoline is responsible for making sure the
5100 address is aligned to at least STACK_BOUNDARY. Normally malloc
5101 will ensure this anyhow. */
5102 tmp = round_trampoline_addr (r_tramp);
5103 if (tmp != r_tramp)
5105 m_tramp = change_address (m_tramp, BLKmode, tmp);
5106 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5107 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5110 /* The FUNC argument should be the address of the nested function.
5111 Extract the actual function decl to pass to the hook. */
5112 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5113 t_func = TREE_OPERAND (t_func, 0);
5114 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5116 r_chain = expand_normal (t_chain);
5118 /* Generate insns to initialize the trampoline. */
5119 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5121 if (onstack)
5123 trampolines_created = 1;
5125 if (targetm.calls.custom_function_descriptors != 0)
5126 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5127 "trampoline generated for nested function %qD", t_func);
5130 return const0_rtx;
5133 static rtx
5134 expand_builtin_adjust_trampoline (tree exp)
5136 rtx tramp;
5138 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5139 return NULL_RTX;
5141 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5142 tramp = round_trampoline_addr (tramp);
5143 if (targetm.calls.trampoline_adjust_address)
5144 tramp = targetm.calls.trampoline_adjust_address (tramp);
5146 return tramp;
5149 /* Expand a call to the builtin descriptor initialization routine.
5150 A descriptor is made up of a couple of pointers to the static
5151 chain and the code entry in this order. */
5153 static rtx
5154 expand_builtin_init_descriptor (tree exp)
5156 tree t_descr, t_func, t_chain;
5157 rtx m_descr, r_descr, r_func, r_chain;
5159 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5160 VOID_TYPE))
5161 return NULL_RTX;
5163 t_descr = CALL_EXPR_ARG (exp, 0);
5164 t_func = CALL_EXPR_ARG (exp, 1);
5165 t_chain = CALL_EXPR_ARG (exp, 2);
5167 r_descr = expand_normal (t_descr);
5168 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5169 MEM_NOTRAP_P (m_descr) = 1;
5171 r_func = expand_normal (t_func);
5172 r_chain = expand_normal (t_chain);
5174 /* Generate insns to initialize the descriptor. */
5175 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5176 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5177 POINTER_SIZE / BITS_PER_UNIT), r_func);
5179 return const0_rtx;
5182 /* Expand a call to the builtin descriptor adjustment routine. */
5184 static rtx
5185 expand_builtin_adjust_descriptor (tree exp)
5187 rtx tramp;
5189 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5190 return NULL_RTX;
5192 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5194 /* Unalign the descriptor to allow runtime identification. */
5195 tramp = plus_constant (ptr_mode, tramp,
5196 targetm.calls.custom_function_descriptors);
5198 return force_operand (tramp, NULL_RTX);
5201 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5202 function. The function first checks whether the back end provides
5203 an insn to implement signbit for the respective mode. If not, it
5204 checks whether the floating point format of the value is such that
5205 the sign bit can be extracted. If that is not the case, error out.
5206 EXP is the expression that is a call to the builtin function; if
5207 convenient, the result should be placed in TARGET. */
5208 static rtx
5209 expand_builtin_signbit (tree exp, rtx target)
5211 const struct real_format *fmt;
5212 machine_mode fmode, imode, rmode;
5213 tree arg;
5214 int word, bitpos;
5215 enum insn_code icode;
5216 rtx temp;
5217 location_t loc = EXPR_LOCATION (exp);
5219 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5220 return NULL_RTX;
5222 arg = CALL_EXPR_ARG (exp, 0);
5223 fmode = TYPE_MODE (TREE_TYPE (arg));
5224 rmode = TYPE_MODE (TREE_TYPE (exp));
5225 fmt = REAL_MODE_FORMAT (fmode);
5227 arg = builtin_save_expr (arg);
5229 /* Expand the argument yielding a RTX expression. */
5230 temp = expand_normal (arg);
5232 /* Check if the back end provides an insn that handles signbit for the
5233 argument's mode. */
5234 icode = optab_handler (signbit_optab, fmode);
5235 if (icode != CODE_FOR_nothing)
5237 rtx_insn *last = get_last_insn ();
5238 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5239 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5240 return target;
5241 delete_insns_since (last);
5244 /* For floating point formats without a sign bit, implement signbit
5245 as "ARG < 0.0". */
5246 bitpos = fmt->signbit_ro;
5247 if (bitpos < 0)
5249 /* But we can't do this if the format supports signed zero. */
5250 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5252 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5253 build_real (TREE_TYPE (arg), dconst0));
5254 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5257 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5259 imode = int_mode_for_mode (fmode);
5260 gcc_assert (imode != BLKmode);
5261 temp = gen_lowpart (imode, temp);
5263 else
5265 imode = word_mode;
5266 /* Handle targets with different FP word orders. */
5267 if (FLOAT_WORDS_BIG_ENDIAN)
5268 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5269 else
5270 word = bitpos / BITS_PER_WORD;
5271 temp = operand_subword_force (temp, word, fmode);
5272 bitpos = bitpos % BITS_PER_WORD;
5275 /* Force the intermediate word_mode (or narrower) result into a
5276 register. This avoids attempting to create paradoxical SUBREGs
5277 of floating point modes below. */
5278 temp = force_reg (imode, temp);
5280 /* If the bitpos is within the "result mode" lowpart, the operation
5281 can be implement with a single bitwise AND. Otherwise, we need
5282 a right shift and an AND. */
5284 if (bitpos < GET_MODE_BITSIZE (rmode))
5286 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5288 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5289 temp = gen_lowpart (rmode, temp);
5290 temp = expand_binop (rmode, and_optab, temp,
5291 immed_wide_int_const (mask, rmode),
5292 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5294 else
5296 /* Perform a logical right shift to place the signbit in the least
5297 significant bit, then truncate the result to the desired mode
5298 and mask just this bit. */
5299 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5300 temp = gen_lowpart (rmode, temp);
5301 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5302 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5305 return temp;
5308 /* Expand fork or exec calls. TARGET is the desired target of the
5309 call. EXP is the call. FN is the
5310 identificator of the actual function. IGNORE is nonzero if the
5311 value is to be ignored. */
5313 static rtx
5314 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5316 tree id, decl;
5317 tree call;
5319 /* If we are not profiling, just call the function. */
5320 if (!profile_arc_flag)
5321 return NULL_RTX;
5323 /* Otherwise call the wrapper. This should be equivalent for the rest of
5324 compiler, so the code does not diverge, and the wrapper may run the
5325 code necessary for keeping the profiling sane. */
5327 switch (DECL_FUNCTION_CODE (fn))
5329 case BUILT_IN_FORK:
5330 id = get_identifier ("__gcov_fork");
5331 break;
5333 case BUILT_IN_EXECL:
5334 id = get_identifier ("__gcov_execl");
5335 break;
5337 case BUILT_IN_EXECV:
5338 id = get_identifier ("__gcov_execv");
5339 break;
5341 case BUILT_IN_EXECLP:
5342 id = get_identifier ("__gcov_execlp");
5343 break;
5345 case BUILT_IN_EXECLE:
5346 id = get_identifier ("__gcov_execle");
5347 break;
5349 case BUILT_IN_EXECVP:
5350 id = get_identifier ("__gcov_execvp");
5351 break;
5353 case BUILT_IN_EXECVE:
5354 id = get_identifier ("__gcov_execve");
5355 break;
5357 default:
5358 gcc_unreachable ();
5361 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5362 FUNCTION_DECL, id, TREE_TYPE (fn));
5363 DECL_EXTERNAL (decl) = 1;
5364 TREE_PUBLIC (decl) = 1;
5365 DECL_ARTIFICIAL (decl) = 1;
5366 TREE_NOTHROW (decl) = 1;
5367 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5368 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5369 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5370 return expand_call (call, target, ignore);
5375 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5376 the pointer in these functions is void*, the tree optimizers may remove
5377 casts. The mode computed in expand_builtin isn't reliable either, due
5378 to __sync_bool_compare_and_swap.
5380 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5381 group of builtins. This gives us log2 of the mode size. */
5383 static inline machine_mode
5384 get_builtin_sync_mode (int fcode_diff)
5386 /* The size is not negotiable, so ask not to get BLKmode in return
5387 if the target indicates that a smaller size would be better. */
5388 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5391 /* Expand the memory expression LOC and return the appropriate memory operand
5392 for the builtin_sync operations. */
5394 static rtx
5395 get_builtin_sync_mem (tree loc, machine_mode mode)
5397 rtx addr, mem;
5399 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5400 addr = convert_memory_address (Pmode, addr);
5402 /* Note that we explicitly do not want any alias information for this
5403 memory, so that we kill all other live memories. Otherwise we don't
5404 satisfy the full barrier semantics of the intrinsic. */
5405 mem = validize_mem (gen_rtx_MEM (mode, addr));
5407 /* The alignment needs to be at least according to that of the mode. */
5408 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5409 get_pointer_alignment (loc)));
5410 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5411 MEM_VOLATILE_P (mem) = 1;
5413 return mem;
5416 /* Make sure an argument is in the right mode.
5417 EXP is the tree argument.
5418 MODE is the mode it should be in. */
5420 static rtx
5421 expand_expr_force_mode (tree exp, machine_mode mode)
5423 rtx val;
5424 machine_mode old_mode;
5426 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5427 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5428 of CONST_INTs, where we know the old_mode only from the call argument. */
5430 old_mode = GET_MODE (val);
5431 if (old_mode == VOIDmode)
5432 old_mode = TYPE_MODE (TREE_TYPE (exp));
5433 val = convert_modes (mode, old_mode, val, 1);
5434 return val;
5438 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5439 EXP is the CALL_EXPR. CODE is the rtx code
5440 that corresponds to the arithmetic or logical operation from the name;
5441 an exception here is that NOT actually means NAND. TARGET is an optional
5442 place for us to store the results; AFTER is true if this is the
5443 fetch_and_xxx form. */
5445 static rtx
5446 expand_builtin_sync_operation (machine_mode mode, tree exp,
5447 enum rtx_code code, bool after,
5448 rtx target)
5450 rtx val, mem;
5451 location_t loc = EXPR_LOCATION (exp);
5453 if (code == NOT && warn_sync_nand)
5455 tree fndecl = get_callee_fndecl (exp);
5456 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5458 static bool warned_f_a_n, warned_n_a_f;
5460 switch (fcode)
5462 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5463 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5464 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5465 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5466 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5467 if (warned_f_a_n)
5468 break;
5470 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5471 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5472 warned_f_a_n = true;
5473 break;
5475 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5476 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5477 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5478 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5479 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5480 if (warned_n_a_f)
5481 break;
5483 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5484 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5485 warned_n_a_f = true;
5486 break;
5488 default:
5489 gcc_unreachable ();
5493 /* Expand the operands. */
5494 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5495 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5497 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5498 after);
5501 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5502 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5503 true if this is the boolean form. TARGET is a place for us to store the
5504 results; this is NOT optional if IS_BOOL is true. */
5506 static rtx
5507 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5508 bool is_bool, rtx target)
5510 rtx old_val, new_val, mem;
5511 rtx *pbool, *poval;
5513 /* Expand the operands. */
5514 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5515 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5516 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5518 pbool = poval = NULL;
5519 if (target != const0_rtx)
5521 if (is_bool)
5522 pbool = &target;
5523 else
5524 poval = &target;
5526 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5527 false, MEMMODEL_SYNC_SEQ_CST,
5528 MEMMODEL_SYNC_SEQ_CST))
5529 return NULL_RTX;
5531 return target;
5534 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5535 general form is actually an atomic exchange, and some targets only
5536 support a reduced form with the second argument being a constant 1.
5537 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5538 the results. */
5540 static rtx
5541 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5542 rtx target)
5544 rtx val, mem;
5546 /* Expand the operands. */
5547 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5548 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5550 return expand_sync_lock_test_and_set (target, mem, val);
5553 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5555 static void
5556 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5558 rtx mem;
5560 /* Expand the operands. */
5561 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5563 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5566 /* Given an integer representing an ``enum memmodel'', verify its
5567 correctness and return the memory model enum. */
5569 static enum memmodel
5570 get_memmodel (tree exp)
5572 rtx op;
5573 unsigned HOST_WIDE_INT val;
5574 source_location loc
5575 = expansion_point_location_if_in_system_header (input_location);
5577 /* If the parameter is not a constant, it's a run time value so we'll just
5578 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5579 if (TREE_CODE (exp) != INTEGER_CST)
5580 return MEMMODEL_SEQ_CST;
5582 op = expand_normal (exp);
5584 val = INTVAL (op);
5585 if (targetm.memmodel_check)
5586 val = targetm.memmodel_check (val);
5587 else if (val & ~MEMMODEL_MASK)
5589 warning_at (loc, OPT_Winvalid_memory_model,
5590 "unknown architecture specifier in memory model to builtin");
5591 return MEMMODEL_SEQ_CST;
5594 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5595 if (memmodel_base (val) >= MEMMODEL_LAST)
5597 warning_at (loc, OPT_Winvalid_memory_model,
5598 "invalid memory model argument to builtin");
5599 return MEMMODEL_SEQ_CST;
5602 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5603 be conservative and promote consume to acquire. */
5604 if (val == MEMMODEL_CONSUME)
5605 val = MEMMODEL_ACQUIRE;
5607 return (enum memmodel) val;
5610 /* Expand the __atomic_exchange intrinsic:
5611 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5612 EXP is the CALL_EXPR.
5613 TARGET is an optional place for us to store the results. */
5615 static rtx
5616 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5618 rtx val, mem;
5619 enum memmodel model;
5621 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5623 if (!flag_inline_atomics)
5624 return NULL_RTX;
5626 /* Expand the operands. */
5627 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5628 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5630 return expand_atomic_exchange (target, mem, val, model);
5633 /* Expand the __atomic_compare_exchange intrinsic:
5634 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5635 TYPE desired, BOOL weak,
5636 enum memmodel success,
5637 enum memmodel failure)
5638 EXP is the CALL_EXPR.
5639 TARGET is an optional place for us to store the results. */
5641 static rtx
5642 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5643 rtx target)
5645 rtx expect, desired, mem, oldval;
5646 rtx_code_label *label;
5647 enum memmodel success, failure;
5648 tree weak;
5649 bool is_weak;
5650 source_location loc
5651 = expansion_point_location_if_in_system_header (input_location);
5653 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5654 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5656 if (failure > success)
5658 warning_at (loc, OPT_Winvalid_memory_model,
5659 "failure memory model cannot be stronger than success "
5660 "memory model for %<__atomic_compare_exchange%>");
5661 success = MEMMODEL_SEQ_CST;
5664 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5666 warning_at (loc, OPT_Winvalid_memory_model,
5667 "invalid failure memory model for "
5668 "%<__atomic_compare_exchange%>");
5669 failure = MEMMODEL_SEQ_CST;
5670 success = MEMMODEL_SEQ_CST;
5674 if (!flag_inline_atomics)
5675 return NULL_RTX;
5677 /* Expand the operands. */
5678 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5680 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5681 expect = convert_memory_address (Pmode, expect);
5682 expect = gen_rtx_MEM (mode, expect);
5683 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5685 weak = CALL_EXPR_ARG (exp, 3);
5686 is_weak = false;
5687 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5688 is_weak = true;
5690 if (target == const0_rtx)
5691 target = NULL;
5693 /* Lest the rtl backend create a race condition with an imporoper store
5694 to memory, always create a new pseudo for OLDVAL. */
5695 oldval = NULL;
5697 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5698 is_weak, success, failure))
5699 return NULL_RTX;
5701 /* Conditionally store back to EXPECT, lest we create a race condition
5702 with an improper store to memory. */
5703 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5704 the normal case where EXPECT is totally private, i.e. a register. At
5705 which point the store can be unconditional. */
5706 label = gen_label_rtx ();
5707 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5708 GET_MODE (target), 1, label);
5709 emit_move_insn (expect, oldval);
5710 emit_label (label);
5712 return target;
5715 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5716 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5717 call. The weak parameter must be dropped to match the expected parameter
5718 list and the expected argument changed from value to pointer to memory
5719 slot. */
5721 static void
5722 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5724 unsigned int z;
5725 vec<tree, va_gc> *vec;
5727 vec_alloc (vec, 5);
5728 vec->quick_push (gimple_call_arg (call, 0));
5729 tree expected = gimple_call_arg (call, 1);
5730 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5731 TREE_TYPE (expected));
5732 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5733 if (expd != x)
5734 emit_move_insn (x, expd);
5735 tree v = make_tree (TREE_TYPE (expected), x);
5736 vec->quick_push (build1 (ADDR_EXPR,
5737 build_pointer_type (TREE_TYPE (expected)), v));
5738 vec->quick_push (gimple_call_arg (call, 2));
5739 /* Skip the boolean weak parameter. */
5740 for (z = 4; z < 6; z++)
5741 vec->quick_push (gimple_call_arg (call, z));
5742 built_in_function fncode
5743 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5744 + exact_log2 (GET_MODE_SIZE (mode)));
5745 tree fndecl = builtin_decl_explicit (fncode);
5746 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
5747 fndecl);
5748 tree exp = build_call_vec (boolean_type_node, fn, vec);
5749 tree lhs = gimple_call_lhs (call);
5750 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
5751 if (lhs)
5753 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5754 if (GET_MODE (boolret) != mode)
5755 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5756 x = force_reg (mode, x);
5757 write_complex_part (target, boolret, true);
5758 write_complex_part (target, x, false);
5762 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
5764 void
5765 expand_ifn_atomic_compare_exchange (gcall *call)
5767 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
5768 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
5769 machine_mode mode = mode_for_size (BITS_PER_UNIT * size, MODE_INT, 0);
5770 rtx expect, desired, mem, oldval, boolret;
5771 enum memmodel success, failure;
5772 tree lhs;
5773 bool is_weak;
5774 source_location loc
5775 = expansion_point_location_if_in_system_header (gimple_location (call));
5777 success = get_memmodel (gimple_call_arg (call, 4));
5778 failure = get_memmodel (gimple_call_arg (call, 5));
5780 if (failure > success)
5782 warning_at (loc, OPT_Winvalid_memory_model,
5783 "failure memory model cannot be stronger than success "
5784 "memory model for %<__atomic_compare_exchange%>");
5785 success = MEMMODEL_SEQ_CST;
5788 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5790 warning_at (loc, OPT_Winvalid_memory_model,
5791 "invalid failure memory model for "
5792 "%<__atomic_compare_exchange%>");
5793 failure = MEMMODEL_SEQ_CST;
5794 success = MEMMODEL_SEQ_CST;
5797 if (!flag_inline_atomics)
5799 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5800 return;
5803 /* Expand the operands. */
5804 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
5806 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
5807 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
5809 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
5811 boolret = NULL;
5812 oldval = NULL;
5814 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
5815 is_weak, success, failure))
5817 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5818 return;
5821 lhs = gimple_call_lhs (call);
5822 if (lhs)
5824 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5825 if (GET_MODE (boolret) != mode)
5826 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5827 write_complex_part (target, boolret, true);
5828 write_complex_part (target, oldval, false);
5832 /* Expand the __atomic_load intrinsic:
5833 TYPE __atomic_load (TYPE *object, enum memmodel)
5834 EXP is the CALL_EXPR.
5835 TARGET is an optional place for us to store the results. */
5837 static rtx
5838 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5840 rtx mem;
5841 enum memmodel model;
5843 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5844 if (is_mm_release (model) || is_mm_acq_rel (model))
5846 source_location loc
5847 = expansion_point_location_if_in_system_header (input_location);
5848 warning_at (loc, OPT_Winvalid_memory_model,
5849 "invalid memory model for %<__atomic_load%>");
5850 model = MEMMODEL_SEQ_CST;
5853 if (!flag_inline_atomics)
5854 return NULL_RTX;
5856 /* Expand the operand. */
5857 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5859 return expand_atomic_load (target, mem, model);
5863 /* Expand the __atomic_store intrinsic:
5864 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5865 EXP is the CALL_EXPR.
5866 TARGET is an optional place for us to store the results. */
5868 static rtx
5869 expand_builtin_atomic_store (machine_mode mode, tree exp)
5871 rtx mem, val;
5872 enum memmodel model;
5874 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5875 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5876 || is_mm_release (model)))
5878 source_location loc
5879 = expansion_point_location_if_in_system_header (input_location);
5880 warning_at (loc, OPT_Winvalid_memory_model,
5881 "invalid memory model for %<__atomic_store%>");
5882 model = MEMMODEL_SEQ_CST;
5885 if (!flag_inline_atomics)
5886 return NULL_RTX;
5888 /* Expand the operands. */
5889 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5890 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5892 return expand_atomic_store (mem, val, model, false);
5895 /* Expand the __atomic_fetch_XXX intrinsic:
5896 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5897 EXP is the CALL_EXPR.
5898 TARGET is an optional place for us to store the results.
5899 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5900 FETCH_AFTER is true if returning the result of the operation.
5901 FETCH_AFTER is false if returning the value before the operation.
5902 IGNORE is true if the result is not used.
5903 EXT_CALL is the correct builtin for an external call if this cannot be
5904 resolved to an instruction sequence. */
5906 static rtx
5907 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5908 enum rtx_code code, bool fetch_after,
5909 bool ignore, enum built_in_function ext_call)
5911 rtx val, mem, ret;
5912 enum memmodel model;
5913 tree fndecl;
5914 tree addr;
5916 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5918 /* Expand the operands. */
5919 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5920 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5922 /* Only try generating instructions if inlining is turned on. */
5923 if (flag_inline_atomics)
5925 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5926 if (ret)
5927 return ret;
5930 /* Return if a different routine isn't needed for the library call. */
5931 if (ext_call == BUILT_IN_NONE)
5932 return NULL_RTX;
5934 /* Change the call to the specified function. */
5935 fndecl = get_callee_fndecl (exp);
5936 addr = CALL_EXPR_FN (exp);
5937 STRIP_NOPS (addr);
5939 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5940 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5942 /* Expand the call here so we can emit trailing code. */
5943 ret = expand_call (exp, target, ignore);
5945 /* Replace the original function just in case it matters. */
5946 TREE_OPERAND (addr, 0) = fndecl;
5948 /* Then issue the arithmetic correction to return the right result. */
5949 if (!ignore)
5951 if (code == NOT)
5953 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5954 OPTAB_LIB_WIDEN);
5955 ret = expand_simple_unop (mode, NOT, ret, target, true);
5957 else
5958 ret = expand_simple_binop (mode, code, ret, val, target, true,
5959 OPTAB_LIB_WIDEN);
5961 return ret;
5964 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
5966 void
5967 expand_ifn_atomic_bit_test_and (gcall *call)
5969 tree ptr = gimple_call_arg (call, 0);
5970 tree bit = gimple_call_arg (call, 1);
5971 tree flag = gimple_call_arg (call, 2);
5972 tree lhs = gimple_call_lhs (call);
5973 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
5974 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
5975 enum rtx_code code;
5976 optab optab;
5977 struct expand_operand ops[5];
5979 gcc_assert (flag_inline_atomics);
5981 if (gimple_call_num_args (call) == 4)
5982 model = get_memmodel (gimple_call_arg (call, 3));
5984 rtx mem = get_builtin_sync_mem (ptr, mode);
5985 rtx val = expand_expr_force_mode (bit, mode);
5987 switch (gimple_call_internal_fn (call))
5989 case IFN_ATOMIC_BIT_TEST_AND_SET:
5990 code = IOR;
5991 optab = atomic_bit_test_and_set_optab;
5992 break;
5993 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
5994 code = XOR;
5995 optab = atomic_bit_test_and_complement_optab;
5996 break;
5997 case IFN_ATOMIC_BIT_TEST_AND_RESET:
5998 code = AND;
5999 optab = atomic_bit_test_and_reset_optab;
6000 break;
6001 default:
6002 gcc_unreachable ();
6005 if (lhs == NULL_TREE)
6007 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6008 val, NULL_RTX, true, OPTAB_DIRECT);
6009 if (code == AND)
6010 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6011 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6012 return;
6015 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6016 enum insn_code icode = direct_optab_handler (optab, mode);
6017 gcc_assert (icode != CODE_FOR_nothing);
6018 create_output_operand (&ops[0], target, mode);
6019 create_fixed_operand (&ops[1], mem);
6020 create_convert_operand_to (&ops[2], val, mode, true);
6021 create_integer_operand (&ops[3], model);
6022 create_integer_operand (&ops[4], integer_onep (flag));
6023 if (maybe_expand_insn (icode, 5, ops))
6024 return;
6026 rtx bitval = val;
6027 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6028 val, NULL_RTX, true, OPTAB_DIRECT);
6029 rtx maskval = val;
6030 if (code == AND)
6031 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6032 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6033 code, model, false);
6034 if (integer_onep (flag))
6036 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6037 NULL_RTX, true, OPTAB_DIRECT);
6038 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6039 true, OPTAB_DIRECT);
6041 else
6042 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6043 OPTAB_DIRECT);
6044 if (result != target)
6045 emit_move_insn (target, result);
6048 /* Expand an atomic clear operation.
6049 void _atomic_clear (BOOL *obj, enum memmodel)
6050 EXP is the call expression. */
6052 static rtx
6053 expand_builtin_atomic_clear (tree exp)
6055 machine_mode mode;
6056 rtx mem, ret;
6057 enum memmodel model;
6059 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
6060 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6061 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6063 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6065 source_location loc
6066 = expansion_point_location_if_in_system_header (input_location);
6067 warning_at (loc, OPT_Winvalid_memory_model,
6068 "invalid memory model for %<__atomic_store%>");
6069 model = MEMMODEL_SEQ_CST;
6072 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6073 Failing that, a store is issued by __atomic_store. The only way this can
6074 fail is if the bool type is larger than a word size. Unlikely, but
6075 handle it anyway for completeness. Assume a single threaded model since
6076 there is no atomic support in this case, and no barriers are required. */
6077 ret = expand_atomic_store (mem, const0_rtx, model, true);
6078 if (!ret)
6079 emit_move_insn (mem, const0_rtx);
6080 return const0_rtx;
6083 /* Expand an atomic test_and_set operation.
6084 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6085 EXP is the call expression. */
6087 static rtx
6088 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6090 rtx mem;
6091 enum memmodel model;
6092 machine_mode mode;
6094 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
6095 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6096 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6098 return expand_atomic_test_and_set (target, mem, model);
6102 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6103 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6105 static tree
6106 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6108 int size;
6109 machine_mode mode;
6110 unsigned int mode_align, type_align;
6112 if (TREE_CODE (arg0) != INTEGER_CST)
6113 return NULL_TREE;
6115 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6116 mode = mode_for_size (size, MODE_INT, 0);
6117 mode_align = GET_MODE_ALIGNMENT (mode);
6119 if (TREE_CODE (arg1) == INTEGER_CST)
6121 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6123 /* Either this argument is null, or it's a fake pointer encoding
6124 the alignment of the object. */
6125 val = least_bit_hwi (val);
6126 val *= BITS_PER_UNIT;
6128 if (val == 0 || mode_align < val)
6129 type_align = mode_align;
6130 else
6131 type_align = val;
6133 else
6135 tree ttype = TREE_TYPE (arg1);
6137 /* This function is usually invoked and folded immediately by the front
6138 end before anything else has a chance to look at it. The pointer
6139 parameter at this point is usually cast to a void *, so check for that
6140 and look past the cast. */
6141 if (CONVERT_EXPR_P (arg1)
6142 && POINTER_TYPE_P (ttype)
6143 && VOID_TYPE_P (TREE_TYPE (ttype))
6144 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6145 arg1 = TREE_OPERAND (arg1, 0);
6147 ttype = TREE_TYPE (arg1);
6148 gcc_assert (POINTER_TYPE_P (ttype));
6150 /* Get the underlying type of the object. */
6151 ttype = TREE_TYPE (ttype);
6152 type_align = TYPE_ALIGN (ttype);
6155 /* If the object has smaller alignment, the lock free routines cannot
6156 be used. */
6157 if (type_align < mode_align)
6158 return boolean_false_node;
6160 /* Check if a compare_and_swap pattern exists for the mode which represents
6161 the required size. The pattern is not allowed to fail, so the existence
6162 of the pattern indicates support is present. Also require that an
6163 atomic load exists for the required size. */
6164 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6165 return boolean_true_node;
6166 else
6167 return boolean_false_node;
6170 /* Return true if the parameters to call EXP represent an object which will
6171 always generate lock free instructions. The first argument represents the
6172 size of the object, and the second parameter is a pointer to the object
6173 itself. If NULL is passed for the object, then the result is based on
6174 typical alignment for an object of the specified size. Otherwise return
6175 false. */
6177 static rtx
6178 expand_builtin_atomic_always_lock_free (tree exp)
6180 tree size;
6181 tree arg0 = CALL_EXPR_ARG (exp, 0);
6182 tree arg1 = CALL_EXPR_ARG (exp, 1);
6184 if (TREE_CODE (arg0) != INTEGER_CST)
6186 error ("non-constant argument 1 to __atomic_always_lock_free");
6187 return const0_rtx;
6190 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6191 if (size == boolean_true_node)
6192 return const1_rtx;
6193 return const0_rtx;
6196 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6197 is lock free on this architecture. */
6199 static tree
6200 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6202 if (!flag_inline_atomics)
6203 return NULL_TREE;
6205 /* If it isn't always lock free, don't generate a result. */
6206 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6207 return boolean_true_node;
6209 return NULL_TREE;
6212 /* Return true if the parameters to call EXP represent an object which will
6213 always generate lock free instructions. The first argument represents the
6214 size of the object, and the second parameter is a pointer to the object
6215 itself. If NULL is passed for the object, then the result is based on
6216 typical alignment for an object of the specified size. Otherwise return
6217 NULL*/
6219 static rtx
6220 expand_builtin_atomic_is_lock_free (tree exp)
6222 tree size;
6223 tree arg0 = CALL_EXPR_ARG (exp, 0);
6224 tree arg1 = CALL_EXPR_ARG (exp, 1);
6226 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6228 error ("non-integer argument 1 to __atomic_is_lock_free");
6229 return NULL_RTX;
6232 if (!flag_inline_atomics)
6233 return NULL_RTX;
6235 /* If the value is known at compile time, return the RTX for it. */
6236 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6237 if (size == boolean_true_node)
6238 return const1_rtx;
6240 return NULL_RTX;
6243 /* Expand the __atomic_thread_fence intrinsic:
6244 void __atomic_thread_fence (enum memmodel)
6245 EXP is the CALL_EXPR. */
6247 static void
6248 expand_builtin_atomic_thread_fence (tree exp)
6250 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6251 expand_mem_thread_fence (model);
6254 /* Expand the __atomic_signal_fence intrinsic:
6255 void __atomic_signal_fence (enum memmodel)
6256 EXP is the CALL_EXPR. */
6258 static void
6259 expand_builtin_atomic_signal_fence (tree exp)
6261 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6262 expand_mem_signal_fence (model);
6265 /* Expand the __sync_synchronize intrinsic. */
6267 static void
6268 expand_builtin_sync_synchronize (void)
6270 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6273 static rtx
6274 expand_builtin_thread_pointer (tree exp, rtx target)
6276 enum insn_code icode;
6277 if (!validate_arglist (exp, VOID_TYPE))
6278 return const0_rtx;
6279 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6280 if (icode != CODE_FOR_nothing)
6282 struct expand_operand op;
6283 /* If the target is not sutitable then create a new target. */
6284 if (target == NULL_RTX
6285 || !REG_P (target)
6286 || GET_MODE (target) != Pmode)
6287 target = gen_reg_rtx (Pmode);
6288 create_output_operand (&op, target, Pmode);
6289 expand_insn (icode, 1, &op);
6290 return target;
6292 error ("__builtin_thread_pointer is not supported on this target");
6293 return const0_rtx;
6296 static void
6297 expand_builtin_set_thread_pointer (tree exp)
6299 enum insn_code icode;
6300 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6301 return;
6302 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6303 if (icode != CODE_FOR_nothing)
6305 struct expand_operand op;
6306 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6307 Pmode, EXPAND_NORMAL);
6308 create_input_operand (&op, val, Pmode);
6309 expand_insn (icode, 1, &op);
6310 return;
6312 error ("__builtin_set_thread_pointer is not supported on this target");
6316 /* Emit code to restore the current value of stack. */
6318 static void
6319 expand_stack_restore (tree var)
6321 rtx_insn *prev;
6322 rtx sa = expand_normal (var);
6324 sa = convert_memory_address (Pmode, sa);
6326 prev = get_last_insn ();
6327 emit_stack_restore (SAVE_BLOCK, sa);
6329 record_new_stack_level ();
6331 fixup_args_size_notes (prev, get_last_insn (), 0);
6334 /* Emit code to save the current value of stack. */
6336 static rtx
6337 expand_stack_save (void)
6339 rtx ret = NULL_RTX;
6341 emit_stack_save (SAVE_BLOCK, &ret);
6342 return ret;
6346 /* Expand an expression EXP that calls a built-in function,
6347 with result going to TARGET if that's convenient
6348 (and in mode MODE if that's convenient).
6349 SUBTARGET may be used as the target for computing one of EXP's operands.
6350 IGNORE is nonzero if the value is to be ignored. */
6353 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
6354 int ignore)
6356 tree fndecl = get_callee_fndecl (exp);
6357 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6358 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6359 int flags;
6361 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6362 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6364 /* When ASan is enabled, we don't want to expand some memory/string
6365 builtins and rely on libsanitizer's hooks. This allows us to avoid
6366 redundant checks and be sure, that possible overflow will be detected
6367 by ASan. */
6369 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
6370 return expand_call (exp, target, ignore);
6372 /* When not optimizing, generate calls to library functions for a certain
6373 set of builtins. */
6374 if (!optimize
6375 && !called_as_built_in (fndecl)
6376 && fcode != BUILT_IN_FORK
6377 && fcode != BUILT_IN_EXECL
6378 && fcode != BUILT_IN_EXECV
6379 && fcode != BUILT_IN_EXECLP
6380 && fcode != BUILT_IN_EXECLE
6381 && fcode != BUILT_IN_EXECVP
6382 && fcode != BUILT_IN_EXECVE
6383 && fcode != BUILT_IN_ALLOCA
6384 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
6385 && fcode != BUILT_IN_FREE
6386 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
6387 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
6388 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
6389 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
6390 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6391 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
6392 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
6393 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
6394 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
6395 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
6396 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
6397 && fcode != BUILT_IN_CHKP_BNDRET)
6398 return expand_call (exp, target, ignore);
6400 /* The built-in function expanders test for target == const0_rtx
6401 to determine whether the function's result will be ignored. */
6402 if (ignore)
6403 target = const0_rtx;
6405 /* If the result of a pure or const built-in function is ignored, and
6406 none of its arguments are volatile, we can avoid expanding the
6407 built-in call and just evaluate the arguments for side-effects. */
6408 if (target == const0_rtx
6409 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6410 && !(flags & ECF_LOOPING_CONST_OR_PURE))
6412 bool volatilep = false;
6413 tree arg;
6414 call_expr_arg_iterator iter;
6416 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6417 if (TREE_THIS_VOLATILE (arg))
6419 volatilep = true;
6420 break;
6423 if (! volatilep)
6425 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6426 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6427 return const0_rtx;
6431 /* expand_builtin_with_bounds is supposed to be used for
6432 instrumented builtin calls. */
6433 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
6435 switch (fcode)
6437 CASE_FLT_FN (BUILT_IN_FABS):
6438 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
6439 case BUILT_IN_FABSD32:
6440 case BUILT_IN_FABSD64:
6441 case BUILT_IN_FABSD128:
6442 target = expand_builtin_fabs (exp, target, subtarget);
6443 if (target)
6444 return target;
6445 break;
6447 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6448 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
6449 target = expand_builtin_copysign (exp, target, subtarget);
6450 if (target)
6451 return target;
6452 break;
6454 /* Just do a normal library call if we were unable to fold
6455 the values. */
6456 CASE_FLT_FN (BUILT_IN_CABS):
6457 break;
6459 CASE_FLT_FN (BUILT_IN_FMA):
6460 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6461 if (target)
6462 return target;
6463 break;
6465 CASE_FLT_FN (BUILT_IN_ILOGB):
6466 if (! flag_unsafe_math_optimizations)
6467 break;
6468 gcc_fallthrough ();
6469 CASE_FLT_FN (BUILT_IN_ISINF):
6470 CASE_FLT_FN (BUILT_IN_FINITE):
6471 case BUILT_IN_ISFINITE:
6472 case BUILT_IN_ISNORMAL:
6473 target = expand_builtin_interclass_mathfn (exp, target);
6474 if (target)
6475 return target;
6476 break;
6478 CASE_FLT_FN (BUILT_IN_ICEIL):
6479 CASE_FLT_FN (BUILT_IN_LCEIL):
6480 CASE_FLT_FN (BUILT_IN_LLCEIL):
6481 CASE_FLT_FN (BUILT_IN_LFLOOR):
6482 CASE_FLT_FN (BUILT_IN_IFLOOR):
6483 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6484 target = expand_builtin_int_roundingfn (exp, target);
6485 if (target)
6486 return target;
6487 break;
6489 CASE_FLT_FN (BUILT_IN_IRINT):
6490 CASE_FLT_FN (BUILT_IN_LRINT):
6491 CASE_FLT_FN (BUILT_IN_LLRINT):
6492 CASE_FLT_FN (BUILT_IN_IROUND):
6493 CASE_FLT_FN (BUILT_IN_LROUND):
6494 CASE_FLT_FN (BUILT_IN_LLROUND):
6495 target = expand_builtin_int_roundingfn_2 (exp, target);
6496 if (target)
6497 return target;
6498 break;
6500 CASE_FLT_FN (BUILT_IN_POWI):
6501 target = expand_builtin_powi (exp, target);
6502 if (target)
6503 return target;
6504 break;
6506 CASE_FLT_FN (BUILT_IN_CEXPI):
6507 target = expand_builtin_cexpi (exp, target);
6508 gcc_assert (target);
6509 return target;
6511 CASE_FLT_FN (BUILT_IN_SIN):
6512 CASE_FLT_FN (BUILT_IN_COS):
6513 if (! flag_unsafe_math_optimizations)
6514 break;
6515 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6516 if (target)
6517 return target;
6518 break;
6520 CASE_FLT_FN (BUILT_IN_SINCOS):
6521 if (! flag_unsafe_math_optimizations)
6522 break;
6523 target = expand_builtin_sincos (exp);
6524 if (target)
6525 return target;
6526 break;
6528 case BUILT_IN_APPLY_ARGS:
6529 return expand_builtin_apply_args ();
6531 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6532 FUNCTION with a copy of the parameters described by
6533 ARGUMENTS, and ARGSIZE. It returns a block of memory
6534 allocated on the stack into which is stored all the registers
6535 that might possibly be used for returning the result of a
6536 function. ARGUMENTS is the value returned by
6537 __builtin_apply_args. ARGSIZE is the number of bytes of
6538 arguments that must be copied. ??? How should this value be
6539 computed? We'll also need a safe worst case value for varargs
6540 functions. */
6541 case BUILT_IN_APPLY:
6542 if (!validate_arglist (exp, POINTER_TYPE,
6543 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6544 && !validate_arglist (exp, REFERENCE_TYPE,
6545 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6546 return const0_rtx;
6547 else
6549 rtx ops[3];
6551 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6552 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6553 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6555 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6558 /* __builtin_return (RESULT) causes the function to return the
6559 value described by RESULT. RESULT is address of the block of
6560 memory returned by __builtin_apply. */
6561 case BUILT_IN_RETURN:
6562 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6563 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6564 return const0_rtx;
6566 case BUILT_IN_SAVEREGS:
6567 return expand_builtin_saveregs ();
6569 case BUILT_IN_VA_ARG_PACK:
6570 /* All valid uses of __builtin_va_arg_pack () are removed during
6571 inlining. */
6572 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6573 return const0_rtx;
6575 case BUILT_IN_VA_ARG_PACK_LEN:
6576 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6577 inlining. */
6578 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6579 return const0_rtx;
6581 /* Return the address of the first anonymous stack arg. */
6582 case BUILT_IN_NEXT_ARG:
6583 if (fold_builtin_next_arg (exp, false))
6584 return const0_rtx;
6585 return expand_builtin_next_arg ();
6587 case BUILT_IN_CLEAR_CACHE:
6588 target = expand_builtin___clear_cache (exp);
6589 if (target)
6590 return target;
6591 break;
6593 case BUILT_IN_CLASSIFY_TYPE:
6594 return expand_builtin_classify_type (exp);
6596 case BUILT_IN_CONSTANT_P:
6597 return const0_rtx;
6599 case BUILT_IN_FRAME_ADDRESS:
6600 case BUILT_IN_RETURN_ADDRESS:
6601 return expand_builtin_frame_address (fndecl, exp);
6603 /* Returns the address of the area where the structure is returned.
6604 0 otherwise. */
6605 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6606 if (call_expr_nargs (exp) != 0
6607 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6608 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6609 return const0_rtx;
6610 else
6611 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6613 case BUILT_IN_ALLOCA:
6614 case BUILT_IN_ALLOCA_WITH_ALIGN:
6615 /* If the allocation stems from the declaration of a variable-sized
6616 object, it cannot accumulate. */
6617 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6618 if (target)
6619 return target;
6620 break;
6622 case BUILT_IN_STACK_SAVE:
6623 return expand_stack_save ();
6625 case BUILT_IN_STACK_RESTORE:
6626 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6627 return const0_rtx;
6629 case BUILT_IN_BSWAP16:
6630 case BUILT_IN_BSWAP32:
6631 case BUILT_IN_BSWAP64:
6632 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6633 if (target)
6634 return target;
6635 break;
6637 CASE_INT_FN (BUILT_IN_FFS):
6638 target = expand_builtin_unop (target_mode, exp, target,
6639 subtarget, ffs_optab);
6640 if (target)
6641 return target;
6642 break;
6644 CASE_INT_FN (BUILT_IN_CLZ):
6645 target = expand_builtin_unop (target_mode, exp, target,
6646 subtarget, clz_optab);
6647 if (target)
6648 return target;
6649 break;
6651 CASE_INT_FN (BUILT_IN_CTZ):
6652 target = expand_builtin_unop (target_mode, exp, target,
6653 subtarget, ctz_optab);
6654 if (target)
6655 return target;
6656 break;
6658 CASE_INT_FN (BUILT_IN_CLRSB):
6659 target = expand_builtin_unop (target_mode, exp, target,
6660 subtarget, clrsb_optab);
6661 if (target)
6662 return target;
6663 break;
6665 CASE_INT_FN (BUILT_IN_POPCOUNT):
6666 target = expand_builtin_unop (target_mode, exp, target,
6667 subtarget, popcount_optab);
6668 if (target)
6669 return target;
6670 break;
6672 CASE_INT_FN (BUILT_IN_PARITY):
6673 target = expand_builtin_unop (target_mode, exp, target,
6674 subtarget, parity_optab);
6675 if (target)
6676 return target;
6677 break;
6679 case BUILT_IN_STRLEN:
6680 target = expand_builtin_strlen (exp, target, target_mode);
6681 if (target)
6682 return target;
6683 break;
6685 case BUILT_IN_STRCAT:
6686 target = expand_builtin_strcat (exp, target);
6687 if (target)
6688 return target;
6689 break;
6691 case BUILT_IN_STRCPY:
6692 target = expand_builtin_strcpy (exp, target);
6693 if (target)
6694 return target;
6695 break;
6697 case BUILT_IN_STRNCAT:
6698 target = expand_builtin_strncat (exp, target);
6699 if (target)
6700 return target;
6701 break;
6703 case BUILT_IN_STRNCPY:
6704 target = expand_builtin_strncpy (exp, target);
6705 if (target)
6706 return target;
6707 break;
6709 case BUILT_IN_STPCPY:
6710 target = expand_builtin_stpcpy (exp, target, mode);
6711 if (target)
6712 return target;
6713 break;
6715 case BUILT_IN_MEMCPY:
6716 target = expand_builtin_memcpy (exp, target);
6717 if (target)
6718 return target;
6719 break;
6721 case BUILT_IN_MEMPCPY:
6722 target = expand_builtin_mempcpy (exp, target, mode);
6723 if (target)
6724 return target;
6725 break;
6727 case BUILT_IN_MEMSET:
6728 target = expand_builtin_memset (exp, target, mode);
6729 if (target)
6730 return target;
6731 break;
6733 case BUILT_IN_BZERO:
6734 target = expand_builtin_bzero (exp);
6735 if (target)
6736 return target;
6737 break;
6739 case BUILT_IN_STRCMP:
6740 target = expand_builtin_strcmp (exp, target);
6741 if (target)
6742 return target;
6743 break;
6745 case BUILT_IN_STRNCMP:
6746 target = expand_builtin_strncmp (exp, target, mode);
6747 if (target)
6748 return target;
6749 break;
6751 case BUILT_IN_BCMP:
6752 case BUILT_IN_MEMCMP:
6753 case BUILT_IN_MEMCMP_EQ:
6754 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
6755 if (target)
6756 return target;
6757 if (fcode == BUILT_IN_MEMCMP_EQ)
6759 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
6760 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
6762 break;
6764 case BUILT_IN_SETJMP:
6765 /* This should have been lowered to the builtins below. */
6766 gcc_unreachable ();
6768 case BUILT_IN_SETJMP_SETUP:
6769 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6770 and the receiver label. */
6771 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6773 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6774 VOIDmode, EXPAND_NORMAL);
6775 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6776 rtx_insn *label_r = label_rtx (label);
6778 /* This is copied from the handling of non-local gotos. */
6779 expand_builtin_setjmp_setup (buf_addr, label_r);
6780 nonlocal_goto_handler_labels
6781 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6782 nonlocal_goto_handler_labels);
6783 /* ??? Do not let expand_label treat us as such since we would
6784 not want to be both on the list of non-local labels and on
6785 the list of forced labels. */
6786 FORCED_LABEL (label) = 0;
6787 return const0_rtx;
6789 break;
6791 case BUILT_IN_SETJMP_RECEIVER:
6792 /* __builtin_setjmp_receiver is passed the receiver label. */
6793 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6795 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6796 rtx_insn *label_r = label_rtx (label);
6798 expand_builtin_setjmp_receiver (label_r);
6799 return const0_rtx;
6801 break;
6803 /* __builtin_longjmp is passed a pointer to an array of five words.
6804 It's similar to the C library longjmp function but works with
6805 __builtin_setjmp above. */
6806 case BUILT_IN_LONGJMP:
6807 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6809 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6810 VOIDmode, EXPAND_NORMAL);
6811 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6813 if (value != const1_rtx)
6815 error ("%<__builtin_longjmp%> second argument must be 1");
6816 return const0_rtx;
6819 expand_builtin_longjmp (buf_addr, value);
6820 return const0_rtx;
6822 break;
6824 case BUILT_IN_NONLOCAL_GOTO:
6825 target = expand_builtin_nonlocal_goto (exp);
6826 if (target)
6827 return target;
6828 break;
6830 /* This updates the setjmp buffer that is its argument with the value
6831 of the current stack pointer. */
6832 case BUILT_IN_UPDATE_SETJMP_BUF:
6833 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6835 rtx buf_addr
6836 = expand_normal (CALL_EXPR_ARG (exp, 0));
6838 expand_builtin_update_setjmp_buf (buf_addr);
6839 return const0_rtx;
6841 break;
6843 case BUILT_IN_TRAP:
6844 expand_builtin_trap ();
6845 return const0_rtx;
6847 case BUILT_IN_UNREACHABLE:
6848 expand_builtin_unreachable ();
6849 return const0_rtx;
6851 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6852 case BUILT_IN_SIGNBITD32:
6853 case BUILT_IN_SIGNBITD64:
6854 case BUILT_IN_SIGNBITD128:
6855 target = expand_builtin_signbit (exp, target);
6856 if (target)
6857 return target;
6858 break;
6860 /* Various hooks for the DWARF 2 __throw routine. */
6861 case BUILT_IN_UNWIND_INIT:
6862 expand_builtin_unwind_init ();
6863 return const0_rtx;
6864 case BUILT_IN_DWARF_CFA:
6865 return virtual_cfa_rtx;
6866 #ifdef DWARF2_UNWIND_INFO
6867 case BUILT_IN_DWARF_SP_COLUMN:
6868 return expand_builtin_dwarf_sp_column ();
6869 case BUILT_IN_INIT_DWARF_REG_SIZES:
6870 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6871 return const0_rtx;
6872 #endif
6873 case BUILT_IN_FROB_RETURN_ADDR:
6874 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6875 case BUILT_IN_EXTRACT_RETURN_ADDR:
6876 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6877 case BUILT_IN_EH_RETURN:
6878 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6879 CALL_EXPR_ARG (exp, 1));
6880 return const0_rtx;
6881 case BUILT_IN_EH_RETURN_DATA_REGNO:
6882 return expand_builtin_eh_return_data_regno (exp);
6883 case BUILT_IN_EXTEND_POINTER:
6884 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6885 case BUILT_IN_EH_POINTER:
6886 return expand_builtin_eh_pointer (exp);
6887 case BUILT_IN_EH_FILTER:
6888 return expand_builtin_eh_filter (exp);
6889 case BUILT_IN_EH_COPY_VALUES:
6890 return expand_builtin_eh_copy_values (exp);
6892 case BUILT_IN_VA_START:
6893 return expand_builtin_va_start (exp);
6894 case BUILT_IN_VA_END:
6895 return expand_builtin_va_end (exp);
6896 case BUILT_IN_VA_COPY:
6897 return expand_builtin_va_copy (exp);
6898 case BUILT_IN_EXPECT:
6899 return expand_builtin_expect (exp, target);
6900 case BUILT_IN_ASSUME_ALIGNED:
6901 return expand_builtin_assume_aligned (exp, target);
6902 case BUILT_IN_PREFETCH:
6903 expand_builtin_prefetch (exp);
6904 return const0_rtx;
6906 case BUILT_IN_INIT_TRAMPOLINE:
6907 return expand_builtin_init_trampoline (exp, true);
6908 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6909 return expand_builtin_init_trampoline (exp, false);
6910 case BUILT_IN_ADJUST_TRAMPOLINE:
6911 return expand_builtin_adjust_trampoline (exp);
6913 case BUILT_IN_INIT_DESCRIPTOR:
6914 return expand_builtin_init_descriptor (exp);
6915 case BUILT_IN_ADJUST_DESCRIPTOR:
6916 return expand_builtin_adjust_descriptor (exp);
6918 case BUILT_IN_FORK:
6919 case BUILT_IN_EXECL:
6920 case BUILT_IN_EXECV:
6921 case BUILT_IN_EXECLP:
6922 case BUILT_IN_EXECLE:
6923 case BUILT_IN_EXECVP:
6924 case BUILT_IN_EXECVE:
6925 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6926 if (target)
6927 return target;
6928 break;
6930 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6931 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6932 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6933 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6934 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6935 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6936 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6937 if (target)
6938 return target;
6939 break;
6941 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6942 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6943 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6944 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6945 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6946 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6947 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6948 if (target)
6949 return target;
6950 break;
6952 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6953 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6954 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6955 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6956 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6957 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6958 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6959 if (target)
6960 return target;
6961 break;
6963 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6964 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6965 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6966 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6967 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6968 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6969 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6970 if (target)
6971 return target;
6972 break;
6974 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6975 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6976 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6977 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6978 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6979 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6980 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6981 if (target)
6982 return target;
6983 break;
6985 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6986 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6987 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6988 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6989 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6990 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6991 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6992 if (target)
6993 return target;
6994 break;
6996 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6997 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6998 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6999 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7000 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7001 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7002 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7003 if (target)
7004 return target;
7005 break;
7007 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7008 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7009 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7010 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7011 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7012 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7013 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7014 if (target)
7015 return target;
7016 break;
7018 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7019 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7020 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7021 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7022 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7023 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7024 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7025 if (target)
7026 return target;
7027 break;
7029 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7030 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7031 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7032 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7033 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7034 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7035 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7036 if (target)
7037 return target;
7038 break;
7040 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7041 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7042 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7043 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7044 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7045 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7046 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7047 if (target)
7048 return target;
7049 break;
7051 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7052 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7053 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7054 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7055 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7056 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7057 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7058 if (target)
7059 return target;
7060 break;
7062 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7063 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7064 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7065 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7066 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7067 if (mode == VOIDmode)
7068 mode = TYPE_MODE (boolean_type_node);
7069 if (!target || !register_operand (target, mode))
7070 target = gen_reg_rtx (mode);
7072 mode = get_builtin_sync_mode
7073 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7074 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7075 if (target)
7076 return target;
7077 break;
7079 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7080 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7081 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7082 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7083 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7084 mode = get_builtin_sync_mode
7085 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7086 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7087 if (target)
7088 return target;
7089 break;
7091 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7092 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7093 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7094 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7095 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7096 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7097 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7098 if (target)
7099 return target;
7100 break;
7102 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7103 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7104 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7105 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7106 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7107 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7108 expand_builtin_sync_lock_release (mode, exp);
7109 return const0_rtx;
7111 case BUILT_IN_SYNC_SYNCHRONIZE:
7112 expand_builtin_sync_synchronize ();
7113 return const0_rtx;
7115 case BUILT_IN_ATOMIC_EXCHANGE_1:
7116 case BUILT_IN_ATOMIC_EXCHANGE_2:
7117 case BUILT_IN_ATOMIC_EXCHANGE_4:
7118 case BUILT_IN_ATOMIC_EXCHANGE_8:
7119 case BUILT_IN_ATOMIC_EXCHANGE_16:
7120 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7121 target = expand_builtin_atomic_exchange (mode, exp, target);
7122 if (target)
7123 return target;
7124 break;
7126 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7127 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7128 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7129 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7130 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7132 unsigned int nargs, z;
7133 vec<tree, va_gc> *vec;
7135 mode =
7136 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7137 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7138 if (target)
7139 return target;
7141 /* If this is turned into an external library call, the weak parameter
7142 must be dropped to match the expected parameter list. */
7143 nargs = call_expr_nargs (exp);
7144 vec_alloc (vec, nargs - 1);
7145 for (z = 0; z < 3; z++)
7146 vec->quick_push (CALL_EXPR_ARG (exp, z));
7147 /* Skip the boolean weak parameter. */
7148 for (z = 4; z < 6; z++)
7149 vec->quick_push (CALL_EXPR_ARG (exp, z));
7150 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7151 break;
7154 case BUILT_IN_ATOMIC_LOAD_1:
7155 case BUILT_IN_ATOMIC_LOAD_2:
7156 case BUILT_IN_ATOMIC_LOAD_4:
7157 case BUILT_IN_ATOMIC_LOAD_8:
7158 case BUILT_IN_ATOMIC_LOAD_16:
7159 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7160 target = expand_builtin_atomic_load (mode, exp, target);
7161 if (target)
7162 return target;
7163 break;
7165 case BUILT_IN_ATOMIC_STORE_1:
7166 case BUILT_IN_ATOMIC_STORE_2:
7167 case BUILT_IN_ATOMIC_STORE_4:
7168 case BUILT_IN_ATOMIC_STORE_8:
7169 case BUILT_IN_ATOMIC_STORE_16:
7170 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7171 target = expand_builtin_atomic_store (mode, exp);
7172 if (target)
7173 return const0_rtx;
7174 break;
7176 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7177 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7178 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7179 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7180 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7182 enum built_in_function lib;
7183 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7184 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7185 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7186 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7187 ignore, lib);
7188 if (target)
7189 return target;
7190 break;
7192 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7193 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7194 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7195 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7196 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7198 enum built_in_function lib;
7199 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7200 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7201 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7202 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7203 ignore, lib);
7204 if (target)
7205 return target;
7206 break;
7208 case BUILT_IN_ATOMIC_AND_FETCH_1:
7209 case BUILT_IN_ATOMIC_AND_FETCH_2:
7210 case BUILT_IN_ATOMIC_AND_FETCH_4:
7211 case BUILT_IN_ATOMIC_AND_FETCH_8:
7212 case BUILT_IN_ATOMIC_AND_FETCH_16:
7214 enum built_in_function lib;
7215 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7216 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7217 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7218 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7219 ignore, lib);
7220 if (target)
7221 return target;
7222 break;
7224 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7225 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7226 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7227 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7228 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7230 enum built_in_function lib;
7231 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7232 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7233 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7234 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7235 ignore, lib);
7236 if (target)
7237 return target;
7238 break;
7240 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7241 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7242 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7243 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7244 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7246 enum built_in_function lib;
7247 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7248 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7249 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7250 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7251 ignore, lib);
7252 if (target)
7253 return target;
7254 break;
7256 case BUILT_IN_ATOMIC_OR_FETCH_1:
7257 case BUILT_IN_ATOMIC_OR_FETCH_2:
7258 case BUILT_IN_ATOMIC_OR_FETCH_4:
7259 case BUILT_IN_ATOMIC_OR_FETCH_8:
7260 case BUILT_IN_ATOMIC_OR_FETCH_16:
7262 enum built_in_function lib;
7263 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7264 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7265 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7266 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7267 ignore, lib);
7268 if (target)
7269 return target;
7270 break;
7272 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7273 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7274 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7275 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7276 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7277 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7278 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7279 ignore, BUILT_IN_NONE);
7280 if (target)
7281 return target;
7282 break;
7284 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7285 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7286 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7287 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7288 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7289 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7290 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7291 ignore, BUILT_IN_NONE);
7292 if (target)
7293 return target;
7294 break;
7296 case BUILT_IN_ATOMIC_FETCH_AND_1:
7297 case BUILT_IN_ATOMIC_FETCH_AND_2:
7298 case BUILT_IN_ATOMIC_FETCH_AND_4:
7299 case BUILT_IN_ATOMIC_FETCH_AND_8:
7300 case BUILT_IN_ATOMIC_FETCH_AND_16:
7301 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7302 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7303 ignore, BUILT_IN_NONE);
7304 if (target)
7305 return target;
7306 break;
7308 case BUILT_IN_ATOMIC_FETCH_NAND_1:
7309 case BUILT_IN_ATOMIC_FETCH_NAND_2:
7310 case BUILT_IN_ATOMIC_FETCH_NAND_4:
7311 case BUILT_IN_ATOMIC_FETCH_NAND_8:
7312 case BUILT_IN_ATOMIC_FETCH_NAND_16:
7313 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
7314 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
7315 ignore, BUILT_IN_NONE);
7316 if (target)
7317 return target;
7318 break;
7320 case BUILT_IN_ATOMIC_FETCH_XOR_1:
7321 case BUILT_IN_ATOMIC_FETCH_XOR_2:
7322 case BUILT_IN_ATOMIC_FETCH_XOR_4:
7323 case BUILT_IN_ATOMIC_FETCH_XOR_8:
7324 case BUILT_IN_ATOMIC_FETCH_XOR_16:
7325 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
7326 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
7327 ignore, BUILT_IN_NONE);
7328 if (target)
7329 return target;
7330 break;
7332 case BUILT_IN_ATOMIC_FETCH_OR_1:
7333 case BUILT_IN_ATOMIC_FETCH_OR_2:
7334 case BUILT_IN_ATOMIC_FETCH_OR_4:
7335 case BUILT_IN_ATOMIC_FETCH_OR_8:
7336 case BUILT_IN_ATOMIC_FETCH_OR_16:
7337 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
7338 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
7339 ignore, BUILT_IN_NONE);
7340 if (target)
7341 return target;
7342 break;
7344 case BUILT_IN_ATOMIC_TEST_AND_SET:
7345 return expand_builtin_atomic_test_and_set (exp, target);
7347 case BUILT_IN_ATOMIC_CLEAR:
7348 return expand_builtin_atomic_clear (exp);
7350 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
7351 return expand_builtin_atomic_always_lock_free (exp);
7353 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
7354 target = expand_builtin_atomic_is_lock_free (exp);
7355 if (target)
7356 return target;
7357 break;
7359 case BUILT_IN_ATOMIC_THREAD_FENCE:
7360 expand_builtin_atomic_thread_fence (exp);
7361 return const0_rtx;
7363 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
7364 expand_builtin_atomic_signal_fence (exp);
7365 return const0_rtx;
7367 case BUILT_IN_OBJECT_SIZE:
7368 return expand_builtin_object_size (exp);
7370 case BUILT_IN_MEMCPY_CHK:
7371 case BUILT_IN_MEMPCPY_CHK:
7372 case BUILT_IN_MEMMOVE_CHK:
7373 case BUILT_IN_MEMSET_CHK:
7374 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7375 if (target)
7376 return target;
7377 break;
7379 case BUILT_IN_STRCPY_CHK:
7380 case BUILT_IN_STPCPY_CHK:
7381 case BUILT_IN_STRNCPY_CHK:
7382 case BUILT_IN_STPNCPY_CHK:
7383 case BUILT_IN_STRCAT_CHK:
7384 case BUILT_IN_STRNCAT_CHK:
7385 case BUILT_IN_SNPRINTF_CHK:
7386 case BUILT_IN_VSNPRINTF_CHK:
7387 maybe_emit_chk_warning (exp, fcode);
7388 break;
7390 case BUILT_IN_SPRINTF_CHK:
7391 case BUILT_IN_VSPRINTF_CHK:
7392 maybe_emit_sprintf_chk_warning (exp, fcode);
7393 break;
7395 case BUILT_IN_FREE:
7396 if (warn_free_nonheap_object)
7397 maybe_emit_free_warning (exp);
7398 break;
7400 case BUILT_IN_THREAD_POINTER:
7401 return expand_builtin_thread_pointer (exp, target);
7403 case BUILT_IN_SET_THREAD_POINTER:
7404 expand_builtin_set_thread_pointer (exp);
7405 return const0_rtx;
7407 case BUILT_IN_CILK_DETACH:
7408 expand_builtin_cilk_detach (exp);
7409 return const0_rtx;
7411 case BUILT_IN_CILK_POP_FRAME:
7412 expand_builtin_cilk_pop_frame (exp);
7413 return const0_rtx;
7415 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
7416 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
7417 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
7418 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
7419 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7420 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7421 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7422 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7423 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7424 case BUILT_IN_CHKP_GET_PTR_LBOUND:
7425 case BUILT_IN_CHKP_GET_PTR_UBOUND:
7426 /* We allow user CHKP builtins if Pointer Bounds
7427 Checker is off. */
7428 if (!chkp_function_instrumented_p (current_function_decl))
7430 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7431 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7432 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7433 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7434 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7435 return expand_normal (CALL_EXPR_ARG (exp, 0));
7436 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7437 return expand_normal (size_zero_node);
7438 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7439 return expand_normal (size_int (-1));
7440 else
7441 return const0_rtx;
7443 /* FALLTHROUGH */
7445 case BUILT_IN_CHKP_BNDMK:
7446 case BUILT_IN_CHKP_BNDSTX:
7447 case BUILT_IN_CHKP_BNDCL:
7448 case BUILT_IN_CHKP_BNDCU:
7449 case BUILT_IN_CHKP_BNDLDX:
7450 case BUILT_IN_CHKP_BNDRET:
7451 case BUILT_IN_CHKP_INTERSECT:
7452 case BUILT_IN_CHKP_NARROW:
7453 case BUILT_IN_CHKP_EXTRACT_LOWER:
7454 case BUILT_IN_CHKP_EXTRACT_UPPER:
7455 /* Software implementation of Pointer Bounds Checker is NYI.
7456 Target support is required. */
7457 error ("Your target platform does not support -fcheck-pointer-bounds");
7458 break;
7460 case BUILT_IN_ACC_ON_DEVICE:
7461 /* Do library call, if we failed to expand the builtin when
7462 folding. */
7463 break;
7465 default: /* just do library call, if unknown builtin */
7466 break;
7469 /* The switch statement above can drop through to cause the function
7470 to be called normally. */
7471 return expand_call (exp, target, ignore);
7474 /* Similar to expand_builtin but is used for instrumented calls. */
7477 expand_builtin_with_bounds (tree exp, rtx target,
7478 rtx subtarget ATTRIBUTE_UNUSED,
7479 machine_mode mode, int ignore)
7481 tree fndecl = get_callee_fndecl (exp);
7482 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7484 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7486 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7487 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7489 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7490 && fcode < END_CHKP_BUILTINS);
7492 switch (fcode)
7494 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7495 target = expand_builtin_memcpy_with_bounds (exp, target);
7496 if (target)
7497 return target;
7498 break;
7500 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7501 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7502 if (target)
7503 return target;
7504 break;
7506 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7507 target = expand_builtin_memset_with_bounds (exp, target, mode);
7508 if (target)
7509 return target;
7510 break;
7512 default:
7513 break;
7516 /* The switch statement above can drop through to cause the function
7517 to be called normally. */
7518 return expand_call (exp, target, ignore);
7521 /* Determine whether a tree node represents a call to a built-in
7522 function. If the tree T is a call to a built-in function with
7523 the right number of arguments of the appropriate types, return
7524 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7525 Otherwise the return value is END_BUILTINS. */
7527 enum built_in_function
7528 builtin_mathfn_code (const_tree t)
7530 const_tree fndecl, arg, parmlist;
7531 const_tree argtype, parmtype;
7532 const_call_expr_arg_iterator iter;
7534 if (TREE_CODE (t) != CALL_EXPR
7535 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7536 return END_BUILTINS;
7538 fndecl = get_callee_fndecl (t);
7539 if (fndecl == NULL_TREE
7540 || TREE_CODE (fndecl) != FUNCTION_DECL
7541 || ! DECL_BUILT_IN (fndecl)
7542 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7543 return END_BUILTINS;
7545 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7546 init_const_call_expr_arg_iterator (t, &iter);
7547 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7549 /* If a function doesn't take a variable number of arguments,
7550 the last element in the list will have type `void'. */
7551 parmtype = TREE_VALUE (parmlist);
7552 if (VOID_TYPE_P (parmtype))
7554 if (more_const_call_expr_args_p (&iter))
7555 return END_BUILTINS;
7556 return DECL_FUNCTION_CODE (fndecl);
7559 if (! more_const_call_expr_args_p (&iter))
7560 return END_BUILTINS;
7562 arg = next_const_call_expr_arg (&iter);
7563 argtype = TREE_TYPE (arg);
7565 if (SCALAR_FLOAT_TYPE_P (parmtype))
7567 if (! SCALAR_FLOAT_TYPE_P (argtype))
7568 return END_BUILTINS;
7570 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7572 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7573 return END_BUILTINS;
7575 else if (POINTER_TYPE_P (parmtype))
7577 if (! POINTER_TYPE_P (argtype))
7578 return END_BUILTINS;
7580 else if (INTEGRAL_TYPE_P (parmtype))
7582 if (! INTEGRAL_TYPE_P (argtype))
7583 return END_BUILTINS;
7585 else
7586 return END_BUILTINS;
7589 /* Variable-length argument list. */
7590 return DECL_FUNCTION_CODE (fndecl);
7593 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7594 evaluate to a constant. */
7596 static tree
7597 fold_builtin_constant_p (tree arg)
7599 /* We return 1 for a numeric type that's known to be a constant
7600 value at compile-time or for an aggregate type that's a
7601 literal constant. */
7602 STRIP_NOPS (arg);
7604 /* If we know this is a constant, emit the constant of one. */
7605 if (CONSTANT_CLASS_P (arg)
7606 || (TREE_CODE (arg) == CONSTRUCTOR
7607 && TREE_CONSTANT (arg)))
7608 return integer_one_node;
7609 if (TREE_CODE (arg) == ADDR_EXPR)
7611 tree op = TREE_OPERAND (arg, 0);
7612 if (TREE_CODE (op) == STRING_CST
7613 || (TREE_CODE (op) == ARRAY_REF
7614 && integer_zerop (TREE_OPERAND (op, 1))
7615 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7616 return integer_one_node;
7619 /* If this expression has side effects, show we don't know it to be a
7620 constant. Likewise if it's a pointer or aggregate type since in
7621 those case we only want literals, since those are only optimized
7622 when generating RTL, not later.
7623 And finally, if we are compiling an initializer, not code, we
7624 need to return a definite result now; there's not going to be any
7625 more optimization done. */
7626 if (TREE_SIDE_EFFECTS (arg)
7627 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7628 || POINTER_TYPE_P (TREE_TYPE (arg))
7629 || cfun == 0
7630 || folding_initializer
7631 || force_folding_builtin_constant_p)
7632 return integer_zero_node;
7634 return NULL_TREE;
7637 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7638 return it as a truthvalue. */
7640 static tree
7641 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7642 tree predictor)
7644 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7646 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7647 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7648 ret_type = TREE_TYPE (TREE_TYPE (fn));
7649 pred_type = TREE_VALUE (arg_types);
7650 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7652 pred = fold_convert_loc (loc, pred_type, pred);
7653 expected = fold_convert_loc (loc, expected_type, expected);
7654 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7655 predictor);
7657 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7658 build_int_cst (ret_type, 0));
7661 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7662 NULL_TREE if no simplification is possible. */
7664 tree
7665 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7667 tree inner, fndecl, inner_arg0;
7668 enum tree_code code;
7670 /* Distribute the expected value over short-circuiting operators.
7671 See through the cast from truthvalue_type_node to long. */
7672 inner_arg0 = arg0;
7673 while (CONVERT_EXPR_P (inner_arg0)
7674 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7675 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7676 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7678 /* If this is a builtin_expect within a builtin_expect keep the
7679 inner one. See through a comparison against a constant. It
7680 might have been added to create a thruthvalue. */
7681 inner = inner_arg0;
7683 if (COMPARISON_CLASS_P (inner)
7684 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7685 inner = TREE_OPERAND (inner, 0);
7687 if (TREE_CODE (inner) == CALL_EXPR
7688 && (fndecl = get_callee_fndecl (inner))
7689 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7690 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7691 return arg0;
7693 inner = inner_arg0;
7694 code = TREE_CODE (inner);
7695 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7697 tree op0 = TREE_OPERAND (inner, 0);
7698 tree op1 = TREE_OPERAND (inner, 1);
7700 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7701 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7702 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7704 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7707 /* If the argument isn't invariant then there's nothing else we can do. */
7708 if (!TREE_CONSTANT (inner_arg0))
7709 return NULL_TREE;
7711 /* If we expect that a comparison against the argument will fold to
7712 a constant return the constant. In practice, this means a true
7713 constant or the address of a non-weak symbol. */
7714 inner = inner_arg0;
7715 STRIP_NOPS (inner);
7716 if (TREE_CODE (inner) == ADDR_EXPR)
7720 inner = TREE_OPERAND (inner, 0);
7722 while (TREE_CODE (inner) == COMPONENT_REF
7723 || TREE_CODE (inner) == ARRAY_REF);
7724 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
7725 return NULL_TREE;
7728 /* Otherwise, ARG0 already has the proper type for the return value. */
7729 return arg0;
7732 /* Fold a call to __builtin_classify_type with argument ARG. */
7734 static tree
7735 fold_builtin_classify_type (tree arg)
7737 if (arg == 0)
7738 return build_int_cst (integer_type_node, no_type_class);
7740 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7743 /* Fold a call to __builtin_strlen with argument ARG. */
7745 static tree
7746 fold_builtin_strlen (location_t loc, tree type, tree arg)
7748 if (!validate_arg (arg, POINTER_TYPE))
7749 return NULL_TREE;
7750 else
7752 tree len = c_strlen (arg, 0);
7754 if (len)
7755 return fold_convert_loc (loc, type, len);
7757 return NULL_TREE;
7761 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7763 static tree
7764 fold_builtin_inf (location_t loc, tree type, int warn)
7766 REAL_VALUE_TYPE real;
7768 /* __builtin_inff is intended to be usable to define INFINITY on all
7769 targets. If an infinity is not available, INFINITY expands "to a
7770 positive constant of type float that overflows at translation
7771 time", footnote "In this case, using INFINITY will violate the
7772 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7773 Thus we pedwarn to ensure this constraint violation is
7774 diagnosed. */
7775 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7776 pedwarn (loc, 0, "target format does not support infinity");
7778 real_inf (&real);
7779 return build_real (type, real);
7782 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7783 NULL_TREE if no simplification can be made. */
7785 static tree
7786 fold_builtin_sincos (location_t loc,
7787 tree arg0, tree arg1, tree arg2)
7789 tree type;
7790 tree fndecl, call = NULL_TREE;
7792 if (!validate_arg (arg0, REAL_TYPE)
7793 || !validate_arg (arg1, POINTER_TYPE)
7794 || !validate_arg (arg2, POINTER_TYPE))
7795 return NULL_TREE;
7797 type = TREE_TYPE (arg0);
7799 /* Calculate the result when the argument is a constant. */
7800 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
7801 if (fn == END_BUILTINS)
7802 return NULL_TREE;
7804 /* Canonicalize sincos to cexpi. */
7805 if (TREE_CODE (arg0) == REAL_CST)
7807 tree complex_type = build_complex_type (type);
7808 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
7810 if (!call)
7812 if (!targetm.libc_has_function (function_c99_math_complex)
7813 || !builtin_decl_implicit_p (fn))
7814 return NULL_TREE;
7815 fndecl = builtin_decl_explicit (fn);
7816 call = build_call_expr_loc (loc, fndecl, 1, arg0);
7817 call = builtin_save_expr (call);
7820 return build2 (COMPOUND_EXPR, void_type_node,
7821 build2 (MODIFY_EXPR, void_type_node,
7822 build_fold_indirect_ref_loc (loc, arg1),
7823 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
7824 build2 (MODIFY_EXPR, void_type_node,
7825 build_fold_indirect_ref_loc (loc, arg2),
7826 fold_build1_loc (loc, REALPART_EXPR, type, call)));
7829 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7830 Return NULL_TREE if no simplification can be made. */
7832 static tree
7833 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
7835 if (!validate_arg (arg1, POINTER_TYPE)
7836 || !validate_arg (arg2, POINTER_TYPE)
7837 || !validate_arg (len, INTEGER_TYPE))
7838 return NULL_TREE;
7840 /* If the LEN parameter is zero, return zero. */
7841 if (integer_zerop (len))
7842 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7843 arg1, arg2);
7845 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7846 if (operand_equal_p (arg1, arg2, 0))
7847 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7849 /* If len parameter is one, return an expression corresponding to
7850 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7851 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7853 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7854 tree cst_uchar_ptr_node
7855 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7857 tree ind1
7858 = fold_convert_loc (loc, integer_type_node,
7859 build1 (INDIRECT_REF, cst_uchar_node,
7860 fold_convert_loc (loc,
7861 cst_uchar_ptr_node,
7862 arg1)));
7863 tree ind2
7864 = fold_convert_loc (loc, integer_type_node,
7865 build1 (INDIRECT_REF, cst_uchar_node,
7866 fold_convert_loc (loc,
7867 cst_uchar_ptr_node,
7868 arg2)));
7869 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7872 return NULL_TREE;
7875 /* Fold a call to builtin isascii with argument ARG. */
7877 static tree
7878 fold_builtin_isascii (location_t loc, tree arg)
7880 if (!validate_arg (arg, INTEGER_TYPE))
7881 return NULL_TREE;
7882 else
7884 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
7885 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
7886 build_int_cst (integer_type_node,
7887 ~ (unsigned HOST_WIDE_INT) 0x7f));
7888 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
7889 arg, integer_zero_node);
7893 /* Fold a call to builtin toascii with argument ARG. */
7895 static tree
7896 fold_builtin_toascii (location_t loc, tree arg)
7898 if (!validate_arg (arg, INTEGER_TYPE))
7899 return NULL_TREE;
7901 /* Transform toascii(c) -> (c & 0x7f). */
7902 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
7903 build_int_cst (integer_type_node, 0x7f));
7906 /* Fold a call to builtin isdigit with argument ARG. */
7908 static tree
7909 fold_builtin_isdigit (location_t loc, tree arg)
7911 if (!validate_arg (arg, INTEGER_TYPE))
7912 return NULL_TREE;
7913 else
7915 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
7916 /* According to the C standard, isdigit is unaffected by locale.
7917 However, it definitely is affected by the target character set. */
7918 unsigned HOST_WIDE_INT target_digit0
7919 = lang_hooks.to_target_charset ('0');
7921 if (target_digit0 == 0)
7922 return NULL_TREE;
7924 arg = fold_convert_loc (loc, unsigned_type_node, arg);
7925 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
7926 build_int_cst (unsigned_type_node, target_digit0));
7927 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
7928 build_int_cst (unsigned_type_node, 9));
7932 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
7934 static tree
7935 fold_builtin_fabs (location_t loc, tree arg, tree type)
7937 if (!validate_arg (arg, REAL_TYPE))
7938 return NULL_TREE;
7940 arg = fold_convert_loc (loc, type, arg);
7941 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7944 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
7946 static tree
7947 fold_builtin_abs (location_t loc, tree arg, tree type)
7949 if (!validate_arg (arg, INTEGER_TYPE))
7950 return NULL_TREE;
7952 arg = fold_convert_loc (loc, type, arg);
7953 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7956 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
7958 static tree
7959 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
7961 /* ??? Only expand to FMA_EXPR if it's directly supported. */
7962 if (validate_arg (arg0, REAL_TYPE)
7963 && validate_arg (arg1, REAL_TYPE)
7964 && validate_arg (arg2, REAL_TYPE)
7965 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
7966 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
7968 return NULL_TREE;
7971 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
7973 static tree
7974 fold_builtin_carg (location_t loc, tree arg, tree type)
7976 if (validate_arg (arg, COMPLEX_TYPE)
7977 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7979 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
7981 if (atan2_fn)
7983 tree new_arg = builtin_save_expr (arg);
7984 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
7985 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
7986 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
7990 return NULL_TREE;
7993 /* Fold a call to builtin frexp, we can assume the base is 2. */
7995 static tree
7996 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
7998 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7999 return NULL_TREE;
8001 STRIP_NOPS (arg0);
8003 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8004 return NULL_TREE;
8006 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8008 /* Proceed if a valid pointer type was passed in. */
8009 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8011 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8012 tree frac, exp;
8014 switch (value->cl)
8016 case rvc_zero:
8017 /* For +-0, return (*exp = 0, +-0). */
8018 exp = integer_zero_node;
8019 frac = arg0;
8020 break;
8021 case rvc_nan:
8022 case rvc_inf:
8023 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8024 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8025 case rvc_normal:
8027 /* Since the frexp function always expects base 2, and in
8028 GCC normalized significands are already in the range
8029 [0.5, 1.0), we have exactly what frexp wants. */
8030 REAL_VALUE_TYPE frac_rvt = *value;
8031 SET_REAL_EXP (&frac_rvt, 0);
8032 frac = build_real (rettype, frac_rvt);
8033 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8035 break;
8036 default:
8037 gcc_unreachable ();
8040 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8041 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8042 TREE_SIDE_EFFECTS (arg1) = 1;
8043 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8046 return NULL_TREE;
8049 /* Fold a call to builtin modf. */
8051 static tree
8052 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8054 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8055 return NULL_TREE;
8057 STRIP_NOPS (arg0);
8059 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8060 return NULL_TREE;
8062 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8064 /* Proceed if a valid pointer type was passed in. */
8065 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8067 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8068 REAL_VALUE_TYPE trunc, frac;
8070 switch (value->cl)
8072 case rvc_nan:
8073 case rvc_zero:
8074 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8075 trunc = frac = *value;
8076 break;
8077 case rvc_inf:
8078 /* For +-Inf, return (*arg1 = arg0, +-0). */
8079 frac = dconst0;
8080 frac.sign = value->sign;
8081 trunc = *value;
8082 break;
8083 case rvc_normal:
8084 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8085 real_trunc (&trunc, VOIDmode, value);
8086 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8087 /* If the original number was negative and already
8088 integral, then the fractional part is -0.0. */
8089 if (value->sign && frac.cl == rvc_zero)
8090 frac.sign = value->sign;
8091 break;
8094 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8095 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8096 build_real (rettype, trunc));
8097 TREE_SIDE_EFFECTS (arg1) = 1;
8098 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8099 build_real (rettype, frac));
8102 return NULL_TREE;
8105 /* Given a location LOC, an interclass builtin function decl FNDECL
8106 and its single argument ARG, return an folded expression computing
8107 the same, or NULL_TREE if we either couldn't or didn't want to fold
8108 (the latter happen if there's an RTL instruction available). */
8110 static tree
8111 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8113 machine_mode mode;
8115 if (!validate_arg (arg, REAL_TYPE))
8116 return NULL_TREE;
8118 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8119 return NULL_TREE;
8121 mode = TYPE_MODE (TREE_TYPE (arg));
8123 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8125 /* If there is no optab, try generic code. */
8126 switch (DECL_FUNCTION_CODE (fndecl))
8128 tree result;
8130 CASE_FLT_FN (BUILT_IN_ISINF):
8132 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8133 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8134 tree type = TREE_TYPE (arg);
8135 REAL_VALUE_TYPE r;
8136 char buf[128];
8138 if (is_ibm_extended)
8140 /* NaN and Inf are encoded in the high-order double value
8141 only. The low-order value is not significant. */
8142 type = double_type_node;
8143 mode = DFmode;
8144 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8146 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8147 real_from_string (&r, buf);
8148 result = build_call_expr (isgr_fn, 2,
8149 fold_build1_loc (loc, ABS_EXPR, type, arg),
8150 build_real (type, r));
8151 return result;
8153 CASE_FLT_FN (BUILT_IN_FINITE):
8154 case BUILT_IN_ISFINITE:
8156 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8157 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8158 tree type = TREE_TYPE (arg);
8159 REAL_VALUE_TYPE r;
8160 char buf[128];
8162 if (is_ibm_extended)
8164 /* NaN and Inf are encoded in the high-order double value
8165 only. The low-order value is not significant. */
8166 type = double_type_node;
8167 mode = DFmode;
8168 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8170 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8171 real_from_string (&r, buf);
8172 result = build_call_expr (isle_fn, 2,
8173 fold_build1_loc (loc, ABS_EXPR, type, arg),
8174 build_real (type, r));
8175 /*result = fold_build2_loc (loc, UNGT_EXPR,
8176 TREE_TYPE (TREE_TYPE (fndecl)),
8177 fold_build1_loc (loc, ABS_EXPR, type, arg),
8178 build_real (type, r));
8179 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8180 TREE_TYPE (TREE_TYPE (fndecl)),
8181 result);*/
8182 return result;
8184 case BUILT_IN_ISNORMAL:
8186 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8187 islessequal(fabs(x),DBL_MAX). */
8188 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8189 tree type = TREE_TYPE (arg);
8190 tree orig_arg, max_exp, min_exp;
8191 machine_mode orig_mode = mode;
8192 REAL_VALUE_TYPE rmax, rmin;
8193 char buf[128];
8195 orig_arg = arg = builtin_save_expr (arg);
8196 if (is_ibm_extended)
8198 /* Use double to test the normal range of IBM extended
8199 precision. Emin for IBM extended precision is
8200 different to emin for IEEE double, being 53 higher
8201 since the low double exponent is at least 53 lower
8202 than the high double exponent. */
8203 type = double_type_node;
8204 mode = DFmode;
8205 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8207 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8209 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8210 real_from_string (&rmax, buf);
8211 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8212 real_from_string (&rmin, buf);
8213 max_exp = build_real (type, rmax);
8214 min_exp = build_real (type, rmin);
8216 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8217 if (is_ibm_extended)
8219 /* Testing the high end of the range is done just using
8220 the high double, using the same test as isfinite().
8221 For the subnormal end of the range we first test the
8222 high double, then if its magnitude is equal to the
8223 limit of 0x1p-969, we test whether the low double is
8224 non-zero and opposite sign to the high double. */
8225 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8226 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8227 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8228 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8229 arg, min_exp);
8230 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8231 complex_double_type_node, orig_arg);
8232 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8233 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8234 tree zero = build_real (type, dconst0);
8235 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8236 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8237 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8238 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8239 fold_build3 (COND_EXPR,
8240 integer_type_node,
8241 hilt, logt, lolt));
8242 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8243 eq_min, ok_lo);
8244 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8245 gt_min, eq_min);
8247 else
8249 tree const isge_fn
8250 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8251 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8253 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8254 max_exp, min_exp);
8255 return result;
8257 default:
8258 break;
8261 return NULL_TREE;
8264 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8265 ARG is the argument for the call. */
8267 static tree
8268 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8270 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8272 if (!validate_arg (arg, REAL_TYPE))
8273 return NULL_TREE;
8275 switch (builtin_index)
8277 case BUILT_IN_ISINF:
8278 if (!HONOR_INFINITIES (arg))
8279 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8281 return NULL_TREE;
8283 case BUILT_IN_ISINF_SIGN:
8285 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8286 /* In a boolean context, GCC will fold the inner COND_EXPR to
8287 1. So e.g. "if (isinf_sign(x))" would be folded to just
8288 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8289 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8290 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8291 tree tmp = NULL_TREE;
8293 arg = builtin_save_expr (arg);
8295 if (signbit_fn && isinf_fn)
8297 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8298 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8300 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8301 signbit_call, integer_zero_node);
8302 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8303 isinf_call, integer_zero_node);
8305 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8306 integer_minus_one_node, integer_one_node);
8307 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8308 isinf_call, tmp,
8309 integer_zero_node);
8312 return tmp;
8315 case BUILT_IN_ISFINITE:
8316 if (!HONOR_NANS (arg)
8317 && !HONOR_INFINITIES (arg))
8318 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8320 return NULL_TREE;
8322 case BUILT_IN_ISNAN:
8323 if (!HONOR_NANS (arg))
8324 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8327 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8328 if (is_ibm_extended)
8330 /* NaN and Inf are encoded in the high-order double value
8331 only. The low-order value is not significant. */
8332 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8335 arg = builtin_save_expr (arg);
8336 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8338 default:
8339 gcc_unreachable ();
8343 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8344 This builtin will generate code to return the appropriate floating
8345 point classification depending on the value of the floating point
8346 number passed in. The possible return values must be supplied as
8347 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8348 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8349 one floating point argument which is "type generic". */
8351 static tree
8352 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8354 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8355 arg, type, res, tmp;
8356 machine_mode mode;
8357 REAL_VALUE_TYPE r;
8358 char buf[128];
8360 /* Verify the required arguments in the original call. */
8361 if (nargs != 6
8362 || !validate_arg (args[0], INTEGER_TYPE)
8363 || !validate_arg (args[1], INTEGER_TYPE)
8364 || !validate_arg (args[2], INTEGER_TYPE)
8365 || !validate_arg (args[3], INTEGER_TYPE)
8366 || !validate_arg (args[4], INTEGER_TYPE)
8367 || !validate_arg (args[5], REAL_TYPE))
8368 return NULL_TREE;
8370 fp_nan = args[0];
8371 fp_infinite = args[1];
8372 fp_normal = args[2];
8373 fp_subnormal = args[3];
8374 fp_zero = args[4];
8375 arg = args[5];
8376 type = TREE_TYPE (arg);
8377 mode = TYPE_MODE (type);
8378 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8380 /* fpclassify(x) ->
8381 isnan(x) ? FP_NAN :
8382 (fabs(x) == Inf ? FP_INFINITE :
8383 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8384 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8386 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8387 build_real (type, dconst0));
8388 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8389 tmp, fp_zero, fp_subnormal);
8391 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8392 real_from_string (&r, buf);
8393 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8394 arg, build_real (type, r));
8395 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8397 if (HONOR_INFINITIES (mode))
8399 real_inf (&r);
8400 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8401 build_real (type, r));
8402 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8403 fp_infinite, res);
8406 if (HONOR_NANS (mode))
8408 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8409 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8412 return res;
8415 /* Fold a call to an unordered comparison function such as
8416 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8417 being called and ARG0 and ARG1 are the arguments for the call.
8418 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8419 the opposite of the desired result. UNORDERED_CODE is used
8420 for modes that can hold NaNs and ORDERED_CODE is used for
8421 the rest. */
8423 static tree
8424 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8425 enum tree_code unordered_code,
8426 enum tree_code ordered_code)
8428 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8429 enum tree_code code;
8430 tree type0, type1;
8431 enum tree_code code0, code1;
8432 tree cmp_type = NULL_TREE;
8434 type0 = TREE_TYPE (arg0);
8435 type1 = TREE_TYPE (arg1);
8437 code0 = TREE_CODE (type0);
8438 code1 = TREE_CODE (type1);
8440 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8441 /* Choose the wider of two real types. */
8442 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8443 ? type0 : type1;
8444 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8445 cmp_type = type0;
8446 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8447 cmp_type = type1;
8449 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8450 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8452 if (unordered_code == UNORDERED_EXPR)
8454 if (!HONOR_NANS (arg0))
8455 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8456 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8459 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8460 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8461 fold_build2_loc (loc, code, type, arg0, arg1));
8464 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8465 arithmetics if it can never overflow, or into internal functions that
8466 return both result of arithmetics and overflowed boolean flag in
8467 a complex integer result, or some other check for overflow.
8468 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8469 checking part of that. */
8471 static tree
8472 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8473 tree arg0, tree arg1, tree arg2)
8475 enum internal_fn ifn = IFN_LAST;
8476 /* The code of the expression corresponding to the type-generic
8477 built-in, or ERROR_MARK for the type-specific ones. */
8478 enum tree_code opcode = ERROR_MARK;
8479 bool ovf_only = false;
8481 switch (fcode)
8483 case BUILT_IN_ADD_OVERFLOW_P:
8484 ovf_only = true;
8485 /* FALLTHRU */
8486 case BUILT_IN_ADD_OVERFLOW:
8487 opcode = PLUS_EXPR;
8488 /* FALLTHRU */
8489 case BUILT_IN_SADD_OVERFLOW:
8490 case BUILT_IN_SADDL_OVERFLOW:
8491 case BUILT_IN_SADDLL_OVERFLOW:
8492 case BUILT_IN_UADD_OVERFLOW:
8493 case BUILT_IN_UADDL_OVERFLOW:
8494 case BUILT_IN_UADDLL_OVERFLOW:
8495 ifn = IFN_ADD_OVERFLOW;
8496 break;
8497 case BUILT_IN_SUB_OVERFLOW_P:
8498 ovf_only = true;
8499 /* FALLTHRU */
8500 case BUILT_IN_SUB_OVERFLOW:
8501 opcode = MINUS_EXPR;
8502 /* FALLTHRU */
8503 case BUILT_IN_SSUB_OVERFLOW:
8504 case BUILT_IN_SSUBL_OVERFLOW:
8505 case BUILT_IN_SSUBLL_OVERFLOW:
8506 case BUILT_IN_USUB_OVERFLOW:
8507 case BUILT_IN_USUBL_OVERFLOW:
8508 case BUILT_IN_USUBLL_OVERFLOW:
8509 ifn = IFN_SUB_OVERFLOW;
8510 break;
8511 case BUILT_IN_MUL_OVERFLOW_P:
8512 ovf_only = true;
8513 /* FALLTHRU */
8514 case BUILT_IN_MUL_OVERFLOW:
8515 opcode = MULT_EXPR;
8516 /* FALLTHRU */
8517 case BUILT_IN_SMUL_OVERFLOW:
8518 case BUILT_IN_SMULL_OVERFLOW:
8519 case BUILT_IN_SMULLL_OVERFLOW:
8520 case BUILT_IN_UMUL_OVERFLOW:
8521 case BUILT_IN_UMULL_OVERFLOW:
8522 case BUILT_IN_UMULLL_OVERFLOW:
8523 ifn = IFN_MUL_OVERFLOW;
8524 break;
8525 default:
8526 gcc_unreachable ();
8529 /* For the "generic" overloads, the first two arguments can have different
8530 types and the last argument determines the target type to use to check
8531 for overflow. The arguments of the other overloads all have the same
8532 type. */
8533 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
8535 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8536 arguments are constant, attempt to fold the built-in call into a constant
8537 expression indicating whether or not it detected an overflow. */
8538 if (ovf_only
8539 && TREE_CODE (arg0) == INTEGER_CST
8540 && TREE_CODE (arg1) == INTEGER_CST)
8541 /* Perform the computation in the target type and check for overflow. */
8542 return omit_one_operand_loc (loc, boolean_type_node,
8543 arith_overflowed_p (opcode, type, arg0, arg1)
8544 ? boolean_true_node : boolean_false_node,
8545 arg2);
8547 tree ctype = build_complex_type (type);
8548 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8549 2, arg0, arg1);
8550 tree tgt = save_expr (call);
8551 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8552 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8553 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8555 if (ovf_only)
8556 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
8558 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8559 tree store
8560 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8561 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8564 /* Fold a call to __builtin_FILE to a constant string. */
8566 static inline tree
8567 fold_builtin_FILE (location_t loc)
8569 if (const char *fname = LOCATION_FILE (loc))
8570 return build_string_literal (strlen (fname) + 1, fname);
8572 return build_string_literal (1, "");
8575 /* Fold a call to __builtin_FUNCTION to a constant string. */
8577 static inline tree
8578 fold_builtin_FUNCTION ()
8580 if (current_function_decl)
8582 const char *name = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
8583 return build_string_literal (strlen (name) + 1, name);
8586 return build_string_literal (1, "");
8589 /* Fold a call to __builtin_LINE to an integer constant. */
8591 static inline tree
8592 fold_builtin_LINE (location_t loc, tree type)
8594 return build_int_cst (type, LOCATION_LINE (loc));
8597 /* Fold a call to built-in function FNDECL with 0 arguments.
8598 This function returns NULL_TREE if no simplification was possible. */
8600 static tree
8601 fold_builtin_0 (location_t loc, tree fndecl)
8603 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8604 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8605 switch (fcode)
8607 case BUILT_IN_FILE:
8608 return fold_builtin_FILE (loc);
8610 case BUILT_IN_FUNCTION:
8611 return fold_builtin_FUNCTION ();
8613 case BUILT_IN_LINE:
8614 return fold_builtin_LINE (loc, type);
8616 CASE_FLT_FN (BUILT_IN_INF):
8617 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
8618 case BUILT_IN_INFD32:
8619 case BUILT_IN_INFD64:
8620 case BUILT_IN_INFD128:
8621 return fold_builtin_inf (loc, type, true);
8623 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8624 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
8625 return fold_builtin_inf (loc, type, false);
8627 case BUILT_IN_CLASSIFY_TYPE:
8628 return fold_builtin_classify_type (NULL_TREE);
8630 default:
8631 break;
8633 return NULL_TREE;
8636 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8637 This function returns NULL_TREE if no simplification was possible. */
8639 static tree
8640 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8642 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8643 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8645 if (TREE_CODE (arg0) == ERROR_MARK)
8646 return NULL_TREE;
8648 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
8649 return ret;
8651 switch (fcode)
8653 case BUILT_IN_CONSTANT_P:
8655 tree val = fold_builtin_constant_p (arg0);
8657 /* Gimplification will pull the CALL_EXPR for the builtin out of
8658 an if condition. When not optimizing, we'll not CSE it back.
8659 To avoid link error types of regressions, return false now. */
8660 if (!val && !optimize)
8661 val = integer_zero_node;
8663 return val;
8666 case BUILT_IN_CLASSIFY_TYPE:
8667 return fold_builtin_classify_type (arg0);
8669 case BUILT_IN_STRLEN:
8670 return fold_builtin_strlen (loc, type, arg0);
8672 CASE_FLT_FN (BUILT_IN_FABS):
8673 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8674 case BUILT_IN_FABSD32:
8675 case BUILT_IN_FABSD64:
8676 case BUILT_IN_FABSD128:
8677 return fold_builtin_fabs (loc, arg0, type);
8679 case BUILT_IN_ABS:
8680 case BUILT_IN_LABS:
8681 case BUILT_IN_LLABS:
8682 case BUILT_IN_IMAXABS:
8683 return fold_builtin_abs (loc, arg0, type);
8685 CASE_FLT_FN (BUILT_IN_CONJ):
8686 if (validate_arg (arg0, COMPLEX_TYPE)
8687 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8688 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8689 break;
8691 CASE_FLT_FN (BUILT_IN_CREAL):
8692 if (validate_arg (arg0, COMPLEX_TYPE)
8693 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8694 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8695 break;
8697 CASE_FLT_FN (BUILT_IN_CIMAG):
8698 if (validate_arg (arg0, COMPLEX_TYPE)
8699 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8700 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8701 break;
8703 CASE_FLT_FN (BUILT_IN_CARG):
8704 return fold_builtin_carg (loc, arg0, type);
8706 case BUILT_IN_ISASCII:
8707 return fold_builtin_isascii (loc, arg0);
8709 case BUILT_IN_TOASCII:
8710 return fold_builtin_toascii (loc, arg0);
8712 case BUILT_IN_ISDIGIT:
8713 return fold_builtin_isdigit (loc, arg0);
8715 CASE_FLT_FN (BUILT_IN_FINITE):
8716 case BUILT_IN_FINITED32:
8717 case BUILT_IN_FINITED64:
8718 case BUILT_IN_FINITED128:
8719 case BUILT_IN_ISFINITE:
8721 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
8722 if (ret)
8723 return ret;
8724 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8727 CASE_FLT_FN (BUILT_IN_ISINF):
8728 case BUILT_IN_ISINFD32:
8729 case BUILT_IN_ISINFD64:
8730 case BUILT_IN_ISINFD128:
8732 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
8733 if (ret)
8734 return ret;
8735 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8738 case BUILT_IN_ISNORMAL:
8739 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8741 case BUILT_IN_ISINF_SIGN:
8742 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
8744 CASE_FLT_FN (BUILT_IN_ISNAN):
8745 case BUILT_IN_ISNAND32:
8746 case BUILT_IN_ISNAND64:
8747 case BUILT_IN_ISNAND128:
8748 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
8750 case BUILT_IN_FREE:
8751 if (integer_zerop (arg0))
8752 return build_empty_stmt (loc);
8753 break;
8755 default:
8756 break;
8759 return NULL_TREE;
8763 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8764 This function returns NULL_TREE if no simplification was possible. */
8766 static tree
8767 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
8769 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8770 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8772 if (TREE_CODE (arg0) == ERROR_MARK
8773 || TREE_CODE (arg1) == ERROR_MARK)
8774 return NULL_TREE;
8776 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
8777 return ret;
8779 switch (fcode)
8781 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
8782 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
8783 if (validate_arg (arg0, REAL_TYPE)
8784 && validate_arg (arg1, POINTER_TYPE))
8785 return do_mpfr_lgamma_r (arg0, arg1, type);
8786 break;
8788 CASE_FLT_FN (BUILT_IN_FREXP):
8789 return fold_builtin_frexp (loc, arg0, arg1, type);
8791 CASE_FLT_FN (BUILT_IN_MODF):
8792 return fold_builtin_modf (loc, arg0, arg1, type);
8794 case BUILT_IN_STRSPN:
8795 return fold_builtin_strspn (loc, arg0, arg1);
8797 case BUILT_IN_STRCSPN:
8798 return fold_builtin_strcspn (loc, arg0, arg1);
8800 case BUILT_IN_STRPBRK:
8801 return fold_builtin_strpbrk (loc, arg0, arg1, type);
8803 case BUILT_IN_EXPECT:
8804 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
8806 case BUILT_IN_ISGREATER:
8807 return fold_builtin_unordered_cmp (loc, fndecl,
8808 arg0, arg1, UNLE_EXPR, LE_EXPR);
8809 case BUILT_IN_ISGREATEREQUAL:
8810 return fold_builtin_unordered_cmp (loc, fndecl,
8811 arg0, arg1, UNLT_EXPR, LT_EXPR);
8812 case BUILT_IN_ISLESS:
8813 return fold_builtin_unordered_cmp (loc, fndecl,
8814 arg0, arg1, UNGE_EXPR, GE_EXPR);
8815 case BUILT_IN_ISLESSEQUAL:
8816 return fold_builtin_unordered_cmp (loc, fndecl,
8817 arg0, arg1, UNGT_EXPR, GT_EXPR);
8818 case BUILT_IN_ISLESSGREATER:
8819 return fold_builtin_unordered_cmp (loc, fndecl,
8820 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
8821 case BUILT_IN_ISUNORDERED:
8822 return fold_builtin_unordered_cmp (loc, fndecl,
8823 arg0, arg1, UNORDERED_EXPR,
8824 NOP_EXPR);
8826 /* We do the folding for va_start in the expander. */
8827 case BUILT_IN_VA_START:
8828 break;
8830 case BUILT_IN_OBJECT_SIZE:
8831 return fold_builtin_object_size (arg0, arg1);
8833 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8834 return fold_builtin_atomic_always_lock_free (arg0, arg1);
8836 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8837 return fold_builtin_atomic_is_lock_free (arg0, arg1);
8839 default:
8840 break;
8842 return NULL_TREE;
8845 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
8846 and ARG2.
8847 This function returns NULL_TREE if no simplification was possible. */
8849 static tree
8850 fold_builtin_3 (location_t loc, tree fndecl,
8851 tree arg0, tree arg1, tree arg2)
8853 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8854 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8856 if (TREE_CODE (arg0) == ERROR_MARK
8857 || TREE_CODE (arg1) == ERROR_MARK
8858 || TREE_CODE (arg2) == ERROR_MARK)
8859 return NULL_TREE;
8861 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
8862 arg0, arg1, arg2))
8863 return ret;
8865 switch (fcode)
8868 CASE_FLT_FN (BUILT_IN_SINCOS):
8869 return fold_builtin_sincos (loc, arg0, arg1, arg2);
8871 CASE_FLT_FN (BUILT_IN_FMA):
8872 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
8874 CASE_FLT_FN (BUILT_IN_REMQUO):
8875 if (validate_arg (arg0, REAL_TYPE)
8876 && validate_arg (arg1, REAL_TYPE)
8877 && validate_arg (arg2, POINTER_TYPE))
8878 return do_mpfr_remquo (arg0, arg1, arg2);
8879 break;
8881 case BUILT_IN_BCMP:
8882 case BUILT_IN_MEMCMP:
8883 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
8885 case BUILT_IN_EXPECT:
8886 return fold_builtin_expect (loc, arg0, arg1, arg2);
8888 case BUILT_IN_ADD_OVERFLOW:
8889 case BUILT_IN_SUB_OVERFLOW:
8890 case BUILT_IN_MUL_OVERFLOW:
8891 case BUILT_IN_ADD_OVERFLOW_P:
8892 case BUILT_IN_SUB_OVERFLOW_P:
8893 case BUILT_IN_MUL_OVERFLOW_P:
8894 case BUILT_IN_SADD_OVERFLOW:
8895 case BUILT_IN_SADDL_OVERFLOW:
8896 case BUILT_IN_SADDLL_OVERFLOW:
8897 case BUILT_IN_SSUB_OVERFLOW:
8898 case BUILT_IN_SSUBL_OVERFLOW:
8899 case BUILT_IN_SSUBLL_OVERFLOW:
8900 case BUILT_IN_SMUL_OVERFLOW:
8901 case BUILT_IN_SMULL_OVERFLOW:
8902 case BUILT_IN_SMULLL_OVERFLOW:
8903 case BUILT_IN_UADD_OVERFLOW:
8904 case BUILT_IN_UADDL_OVERFLOW:
8905 case BUILT_IN_UADDLL_OVERFLOW:
8906 case BUILT_IN_USUB_OVERFLOW:
8907 case BUILT_IN_USUBL_OVERFLOW:
8908 case BUILT_IN_USUBLL_OVERFLOW:
8909 case BUILT_IN_UMUL_OVERFLOW:
8910 case BUILT_IN_UMULL_OVERFLOW:
8911 case BUILT_IN_UMULLL_OVERFLOW:
8912 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
8914 default:
8915 break;
8917 return NULL_TREE;
8920 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
8921 arguments. IGNORE is true if the result of the
8922 function call is ignored. This function returns NULL_TREE if no
8923 simplification was possible. */
8925 tree
8926 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
8928 tree ret = NULL_TREE;
8930 switch (nargs)
8932 case 0:
8933 ret = fold_builtin_0 (loc, fndecl);
8934 break;
8935 case 1:
8936 ret = fold_builtin_1 (loc, fndecl, args[0]);
8937 break;
8938 case 2:
8939 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
8940 break;
8941 case 3:
8942 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
8943 break;
8944 default:
8945 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
8946 break;
8948 if (ret)
8950 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
8951 SET_EXPR_LOCATION (ret, loc);
8952 TREE_NO_WARNING (ret) = 1;
8953 return ret;
8955 return NULL_TREE;
8958 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
8959 list ARGS along with N new arguments in NEWARGS. SKIP is the number
8960 of arguments in ARGS to be omitted. OLDNARGS is the number of
8961 elements in ARGS. */
8963 static tree
8964 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
8965 int skip, tree fndecl, int n, va_list newargs)
8967 int nargs = oldnargs - skip + n;
8968 tree *buffer;
8970 if (n > 0)
8972 int i, j;
8974 buffer = XALLOCAVEC (tree, nargs);
8975 for (i = 0; i < n; i++)
8976 buffer[i] = va_arg (newargs, tree);
8977 for (j = skip; j < oldnargs; j++, i++)
8978 buffer[i] = args[j];
8980 else
8981 buffer = args + skip;
8983 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
8986 /* Return true if FNDECL shouldn't be folded right now.
8987 If a built-in function has an inline attribute always_inline
8988 wrapper, defer folding it after always_inline functions have
8989 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
8990 might not be performed. */
8992 bool
8993 avoid_folding_inline_builtin (tree fndecl)
8995 return (DECL_DECLARED_INLINE_P (fndecl)
8996 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
8997 && cfun
8998 && !cfun->always_inline_functions_inlined
8999 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9002 /* A wrapper function for builtin folding that prevents warnings for
9003 "statement without effect" and the like, caused by removing the
9004 call node earlier than the warning is generated. */
9006 tree
9007 fold_call_expr (location_t loc, tree exp, bool ignore)
9009 tree ret = NULL_TREE;
9010 tree fndecl = get_callee_fndecl (exp);
9011 if (fndecl
9012 && TREE_CODE (fndecl) == FUNCTION_DECL
9013 && DECL_BUILT_IN (fndecl)
9014 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9015 yet. Defer folding until we see all the arguments
9016 (after inlining). */
9017 && !CALL_EXPR_VA_ARG_PACK (exp))
9019 int nargs = call_expr_nargs (exp);
9021 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9022 instead last argument is __builtin_va_arg_pack (). Defer folding
9023 even in that case, until arguments are finalized. */
9024 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9026 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9027 if (fndecl2
9028 && TREE_CODE (fndecl2) == FUNCTION_DECL
9029 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9030 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9031 return NULL_TREE;
9034 if (avoid_folding_inline_builtin (fndecl))
9035 return NULL_TREE;
9037 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9038 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9039 CALL_EXPR_ARGP (exp), ignore);
9040 else
9042 tree *args = CALL_EXPR_ARGP (exp);
9043 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9044 if (ret)
9045 return ret;
9048 return NULL_TREE;
9051 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9052 N arguments are passed in the array ARGARRAY. Return a folded
9053 expression or NULL_TREE if no simplification was possible. */
9055 tree
9056 fold_builtin_call_array (location_t loc, tree,
9057 tree fn,
9058 int n,
9059 tree *argarray)
9061 if (TREE_CODE (fn) != ADDR_EXPR)
9062 return NULL_TREE;
9064 tree fndecl = TREE_OPERAND (fn, 0);
9065 if (TREE_CODE (fndecl) == FUNCTION_DECL
9066 && DECL_BUILT_IN (fndecl))
9068 /* If last argument is __builtin_va_arg_pack (), arguments to this
9069 function are not finalized yet. Defer folding until they are. */
9070 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9072 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9073 if (fndecl2
9074 && TREE_CODE (fndecl2) == FUNCTION_DECL
9075 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9076 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9077 return NULL_TREE;
9079 if (avoid_folding_inline_builtin (fndecl))
9080 return NULL_TREE;
9081 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9082 return targetm.fold_builtin (fndecl, n, argarray, false);
9083 else
9084 return fold_builtin_n (loc, fndecl, argarray, n, false);
9087 return NULL_TREE;
9090 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9091 along with N new arguments specified as the "..." parameters. SKIP
9092 is the number of arguments in EXP to be omitted. This function is used
9093 to do varargs-to-varargs transformations. */
9095 static tree
9096 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9098 va_list ap;
9099 tree t;
9101 va_start (ap, n);
9102 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9103 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9104 va_end (ap);
9106 return t;
9109 /* Validate a single argument ARG against a tree code CODE representing
9110 a type. Return true when argument is valid. */
9112 static bool
9113 validate_arg (const_tree arg, enum tree_code code)
9115 if (!arg)
9116 return false;
9117 else if (code == POINTER_TYPE)
9118 return POINTER_TYPE_P (TREE_TYPE (arg));
9119 else if (code == INTEGER_TYPE)
9120 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9121 return code == TREE_CODE (TREE_TYPE (arg));
9124 /* This function validates the types of a function call argument list
9125 against a specified list of tree_codes. If the last specifier is a 0,
9126 that represents an ellipses, otherwise the last specifier must be a
9127 VOID_TYPE.
9129 This is the GIMPLE version of validate_arglist. Eventually we want to
9130 completely convert builtins.c to work from GIMPLEs and the tree based
9131 validate_arglist will then be removed. */
9133 bool
9134 validate_gimple_arglist (const gcall *call, ...)
9136 enum tree_code code;
9137 bool res = 0;
9138 va_list ap;
9139 const_tree arg;
9140 size_t i;
9142 va_start (ap, call);
9143 i = 0;
9147 code = (enum tree_code) va_arg (ap, int);
9148 switch (code)
9150 case 0:
9151 /* This signifies an ellipses, any further arguments are all ok. */
9152 res = true;
9153 goto end;
9154 case VOID_TYPE:
9155 /* This signifies an endlink, if no arguments remain, return
9156 true, otherwise return false. */
9157 res = (i == gimple_call_num_args (call));
9158 goto end;
9159 default:
9160 /* If no parameters remain or the parameter's code does not
9161 match the specified code, return false. Otherwise continue
9162 checking any remaining arguments. */
9163 arg = gimple_call_arg (call, i++);
9164 if (!validate_arg (arg, code))
9165 goto end;
9166 break;
9169 while (1);
9171 /* We need gotos here since we can only have one VA_CLOSE in a
9172 function. */
9173 end: ;
9174 va_end (ap);
9176 return res;
9179 /* Default target-specific builtin expander that does nothing. */
9182 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9183 rtx target ATTRIBUTE_UNUSED,
9184 rtx subtarget ATTRIBUTE_UNUSED,
9185 machine_mode mode ATTRIBUTE_UNUSED,
9186 int ignore ATTRIBUTE_UNUSED)
9188 return NULL_RTX;
9191 /* Returns true is EXP represents data that would potentially reside
9192 in a readonly section. */
9194 bool
9195 readonly_data_expr (tree exp)
9197 STRIP_NOPS (exp);
9199 if (TREE_CODE (exp) != ADDR_EXPR)
9200 return false;
9202 exp = get_base_address (TREE_OPERAND (exp, 0));
9203 if (!exp)
9204 return false;
9206 /* Make sure we call decl_readonly_section only for trees it
9207 can handle (since it returns true for everything it doesn't
9208 understand). */
9209 if (TREE_CODE (exp) == STRING_CST
9210 || TREE_CODE (exp) == CONSTRUCTOR
9211 || (VAR_P (exp) && TREE_STATIC (exp)))
9212 return decl_readonly_section (exp, 0);
9213 else
9214 return false;
9217 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9218 to the call, and TYPE is its return type.
9220 Return NULL_TREE if no simplification was possible, otherwise return the
9221 simplified form of the call as a tree.
9223 The simplified form may be a constant or other expression which
9224 computes the same value, but in a more efficient manner (including
9225 calls to other builtin functions).
9227 The call may contain arguments which need to be evaluated, but
9228 which are not useful to determine the result of the call. In
9229 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9230 COMPOUND_EXPR will be an argument which must be evaluated.
9231 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9232 COMPOUND_EXPR in the chain will contain the tree for the simplified
9233 form of the builtin function call. */
9235 static tree
9236 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9238 if (!validate_arg (s1, POINTER_TYPE)
9239 || !validate_arg (s2, POINTER_TYPE))
9240 return NULL_TREE;
9241 else
9243 tree fn;
9244 const char *p1, *p2;
9246 p2 = c_getstr (s2);
9247 if (p2 == NULL)
9248 return NULL_TREE;
9250 p1 = c_getstr (s1);
9251 if (p1 != NULL)
9253 const char *r = strpbrk (p1, p2);
9254 tree tem;
9256 if (r == NULL)
9257 return build_int_cst (TREE_TYPE (s1), 0);
9259 /* Return an offset into the constant string argument. */
9260 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9261 return fold_convert_loc (loc, type, tem);
9264 if (p2[0] == '\0')
9265 /* strpbrk(x, "") == NULL.
9266 Evaluate and ignore s1 in case it had side-effects. */
9267 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
9269 if (p2[1] != '\0')
9270 return NULL_TREE; /* Really call strpbrk. */
9272 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9273 if (!fn)
9274 return NULL_TREE;
9276 /* New argument list transforming strpbrk(s1, s2) to
9277 strchr(s1, s2[0]). */
9278 return build_call_expr_loc (loc, fn, 2, s1,
9279 build_int_cst (integer_type_node, p2[0]));
9283 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9284 to the call.
9286 Return NULL_TREE if no simplification was possible, otherwise return the
9287 simplified form of the call as a tree.
9289 The simplified form may be a constant or other expression which
9290 computes the same value, but in a more efficient manner (including
9291 calls to other builtin functions).
9293 The call may contain arguments which need to be evaluated, but
9294 which are not useful to determine the result of the call. In
9295 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9296 COMPOUND_EXPR will be an argument which must be evaluated.
9297 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9298 COMPOUND_EXPR in the chain will contain the tree for the simplified
9299 form of the builtin function call. */
9301 static tree
9302 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9304 if (!validate_arg (s1, POINTER_TYPE)
9305 || !validate_arg (s2, POINTER_TYPE))
9306 return NULL_TREE;
9307 else
9309 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9311 /* If either argument is "", return NULL_TREE. */
9312 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9313 /* Evaluate and ignore both arguments in case either one has
9314 side-effects. */
9315 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9316 s1, s2);
9317 return NULL_TREE;
9321 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9322 to the call.
9324 Return NULL_TREE if no simplification was possible, otherwise return the
9325 simplified form of the call as a tree.
9327 The simplified form may be a constant or other expression which
9328 computes the same value, but in a more efficient manner (including
9329 calls to other builtin functions).
9331 The call may contain arguments which need to be evaluated, but
9332 which are not useful to determine the result of the call. In
9333 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9334 COMPOUND_EXPR will be an argument which must be evaluated.
9335 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9336 COMPOUND_EXPR in the chain will contain the tree for the simplified
9337 form of the builtin function call. */
9339 static tree
9340 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9342 if (!validate_arg (s1, POINTER_TYPE)
9343 || !validate_arg (s2, POINTER_TYPE))
9344 return NULL_TREE;
9345 else
9347 /* If the first argument is "", return NULL_TREE. */
9348 const char *p1 = c_getstr (s1);
9349 if (p1 && *p1 == '\0')
9351 /* Evaluate and ignore argument s2 in case it has
9352 side-effects. */
9353 return omit_one_operand_loc (loc, size_type_node,
9354 size_zero_node, s2);
9357 /* If the second argument is "", return __builtin_strlen(s1). */
9358 const char *p2 = c_getstr (s2);
9359 if (p2 && *p2 == '\0')
9361 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9363 /* If the replacement _DECL isn't initialized, don't do the
9364 transformation. */
9365 if (!fn)
9366 return NULL_TREE;
9368 return build_call_expr_loc (loc, fn, 1, s1);
9370 return NULL_TREE;
9374 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9375 produced. False otherwise. This is done so that we don't output the error
9376 or warning twice or three times. */
9378 bool
9379 fold_builtin_next_arg (tree exp, bool va_start_p)
9381 tree fntype = TREE_TYPE (current_function_decl);
9382 int nargs = call_expr_nargs (exp);
9383 tree arg;
9384 /* There is good chance the current input_location points inside the
9385 definition of the va_start macro (perhaps on the token for
9386 builtin) in a system header, so warnings will not be emitted.
9387 Use the location in real source code. */
9388 source_location current_location =
9389 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9390 NULL);
9392 if (!stdarg_p (fntype))
9394 error ("%<va_start%> used in function with fixed args");
9395 return true;
9398 if (va_start_p)
9400 if (va_start_p && (nargs != 2))
9402 error ("wrong number of arguments to function %<va_start%>");
9403 return true;
9405 arg = CALL_EXPR_ARG (exp, 1);
9407 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9408 when we checked the arguments and if needed issued a warning. */
9409 else
9411 if (nargs == 0)
9413 /* Evidently an out of date version of <stdarg.h>; can't validate
9414 va_start's second argument, but can still work as intended. */
9415 warning_at (current_location,
9416 OPT_Wvarargs,
9417 "%<__builtin_next_arg%> called without an argument");
9418 return true;
9420 else if (nargs > 1)
9422 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9423 return true;
9425 arg = CALL_EXPR_ARG (exp, 0);
9428 if (TREE_CODE (arg) == SSA_NAME)
9429 arg = SSA_NAME_VAR (arg);
9431 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9432 or __builtin_next_arg (0) the first time we see it, after checking
9433 the arguments and if needed issuing a warning. */
9434 if (!integer_zerop (arg))
9436 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9438 /* Strip off all nops for the sake of the comparison. This
9439 is not quite the same as STRIP_NOPS. It does more.
9440 We must also strip off INDIRECT_EXPR for C++ reference
9441 parameters. */
9442 while (CONVERT_EXPR_P (arg)
9443 || TREE_CODE (arg) == INDIRECT_REF)
9444 arg = TREE_OPERAND (arg, 0);
9445 if (arg != last_parm)
9447 /* FIXME: Sometimes with the tree optimizers we can get the
9448 not the last argument even though the user used the last
9449 argument. We just warn and set the arg to be the last
9450 argument so that we will get wrong-code because of
9451 it. */
9452 warning_at (current_location,
9453 OPT_Wvarargs,
9454 "second parameter of %<va_start%> not last named argument");
9457 /* Undefined by C99 7.15.1.4p4 (va_start):
9458 "If the parameter parmN is declared with the register storage
9459 class, with a function or array type, or with a type that is
9460 not compatible with the type that results after application of
9461 the default argument promotions, the behavior is undefined."
9463 else if (DECL_REGISTER (arg))
9465 warning_at (current_location,
9466 OPT_Wvarargs,
9467 "undefined behavior when second parameter of "
9468 "%<va_start%> is declared with %<register%> storage");
9471 /* We want to verify the second parameter just once before the tree
9472 optimizers are run and then avoid keeping it in the tree,
9473 as otherwise we could warn even for correct code like:
9474 void foo (int i, ...)
9475 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9476 if (va_start_p)
9477 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9478 else
9479 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9481 return false;
9485 /* Expand a call EXP to __builtin_object_size. */
9487 static rtx
9488 expand_builtin_object_size (tree exp)
9490 tree ost;
9491 int object_size_type;
9492 tree fndecl = get_callee_fndecl (exp);
9494 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9496 error ("%Kfirst argument of %D must be a pointer, second integer constant",
9497 exp, fndecl);
9498 expand_builtin_trap ();
9499 return const0_rtx;
9502 ost = CALL_EXPR_ARG (exp, 1);
9503 STRIP_NOPS (ost);
9505 if (TREE_CODE (ost) != INTEGER_CST
9506 || tree_int_cst_sgn (ost) < 0
9507 || compare_tree_int (ost, 3) > 0)
9509 error ("%Klast argument of %D is not integer constant between 0 and 3",
9510 exp, fndecl);
9511 expand_builtin_trap ();
9512 return const0_rtx;
9515 object_size_type = tree_to_shwi (ost);
9517 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9520 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9521 FCODE is the BUILT_IN_* to use.
9522 Return NULL_RTX if we failed; the caller should emit a normal call,
9523 otherwise try to get the result in TARGET, if convenient (and in
9524 mode MODE if that's convenient). */
9526 static rtx
9527 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9528 enum built_in_function fcode)
9530 tree dest, src, len, size;
9532 if (!validate_arglist (exp,
9533 POINTER_TYPE,
9534 fcode == BUILT_IN_MEMSET_CHK
9535 ? INTEGER_TYPE : POINTER_TYPE,
9536 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9537 return NULL_RTX;
9539 dest = CALL_EXPR_ARG (exp, 0);
9540 src = CALL_EXPR_ARG (exp, 1);
9541 len = CALL_EXPR_ARG (exp, 2);
9542 size = CALL_EXPR_ARG (exp, 3);
9544 bool sizes_ok = check_sizes (OPT_Wstringop_overflow_,
9545 exp, len, /*maxlen=*/NULL_TREE,
9546 /*str=*/NULL_TREE, size);
9548 if (!tree_fits_uhwi_p (size))
9549 return NULL_RTX;
9551 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
9553 /* Avoid transforming the checking call to an ordinary one when
9554 an overflow has been detected or when the call couldn't be
9555 validated because the size is not constant. */
9556 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
9557 return NULL_RTX;
9559 tree fn = NULL_TREE;
9560 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9561 mem{cpy,pcpy,move,set} is available. */
9562 switch (fcode)
9564 case BUILT_IN_MEMCPY_CHK:
9565 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
9566 break;
9567 case BUILT_IN_MEMPCPY_CHK:
9568 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
9569 break;
9570 case BUILT_IN_MEMMOVE_CHK:
9571 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
9572 break;
9573 case BUILT_IN_MEMSET_CHK:
9574 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
9575 break;
9576 default:
9577 break;
9580 if (! fn)
9581 return NULL_RTX;
9583 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
9584 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9585 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9586 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9588 else if (fcode == BUILT_IN_MEMSET_CHK)
9589 return NULL_RTX;
9590 else
9592 unsigned int dest_align = get_pointer_alignment (dest);
9594 /* If DEST is not a pointer type, call the normal function. */
9595 if (dest_align == 0)
9596 return NULL_RTX;
9598 /* If SRC and DEST are the same (and not volatile), do nothing. */
9599 if (operand_equal_p (src, dest, 0))
9601 tree expr;
9603 if (fcode != BUILT_IN_MEMPCPY_CHK)
9605 /* Evaluate and ignore LEN in case it has side-effects. */
9606 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9607 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9610 expr = fold_build_pointer_plus (dest, len);
9611 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9614 /* __memmove_chk special case. */
9615 if (fcode == BUILT_IN_MEMMOVE_CHK)
9617 unsigned int src_align = get_pointer_alignment (src);
9619 if (src_align == 0)
9620 return NULL_RTX;
9622 /* If src is categorized for a readonly section we can use
9623 normal __memcpy_chk. */
9624 if (readonly_data_expr (src))
9626 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
9627 if (!fn)
9628 return NULL_RTX;
9629 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9630 dest, src, len, size);
9631 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9632 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9633 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9636 return NULL_RTX;
9640 /* Emit warning if a buffer overflow is detected at compile time. */
9642 static void
9643 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9645 /* The source string. */
9646 tree srcstr = NULL_TREE;
9647 /* The size of the destination object. */
9648 tree objsize = NULL_TREE;
9649 /* The string that is being concatenated with (as in __strcat_chk)
9650 or null if it isn't. */
9651 tree catstr = NULL_TREE;
9652 /* The maximum length of the source sequence in a bounded operation
9653 (such as __strncat_chk) or null if the operation isn't bounded
9654 (such as __strcat_chk). */
9655 tree maxlen = NULL_TREE;
9657 switch (fcode)
9659 case BUILT_IN_STRCPY_CHK:
9660 case BUILT_IN_STPCPY_CHK:
9661 srcstr = CALL_EXPR_ARG (exp, 1);
9662 objsize = CALL_EXPR_ARG (exp, 2);
9663 break;
9665 case BUILT_IN_STRCAT_CHK:
9666 /* For __strcat_chk the warning will be emitted only if overflowing
9667 by at least strlen (dest) + 1 bytes. */
9668 catstr = CALL_EXPR_ARG (exp, 0);
9669 srcstr = CALL_EXPR_ARG (exp, 1);
9670 objsize = CALL_EXPR_ARG (exp, 2);
9671 break;
9673 case BUILT_IN_STRNCAT_CHK:
9674 catstr = CALL_EXPR_ARG (exp, 0);
9675 srcstr = CALL_EXPR_ARG (exp, 1);
9676 maxlen = CALL_EXPR_ARG (exp, 2);
9677 objsize = CALL_EXPR_ARG (exp, 3);
9678 break;
9680 case BUILT_IN_STRNCPY_CHK:
9681 case BUILT_IN_STPNCPY_CHK:
9682 srcstr = CALL_EXPR_ARG (exp, 1);
9683 maxlen = CALL_EXPR_ARG (exp, 2);
9684 objsize = CALL_EXPR_ARG (exp, 3);
9685 break;
9687 case BUILT_IN_SNPRINTF_CHK:
9688 case BUILT_IN_VSNPRINTF_CHK:
9689 maxlen = CALL_EXPR_ARG (exp, 1);
9690 objsize = CALL_EXPR_ARG (exp, 3);
9691 break;
9692 default:
9693 gcc_unreachable ();
9696 if (catstr && maxlen)
9698 /* Check __strncat_chk. There is no way to determine the length
9699 of the string to which the source string is being appended so
9700 just warn when the length of the source string is not known. */
9701 if (!check_strncat_sizes (exp, objsize))
9702 return;
9705 check_sizes (OPT_Wstringop_overflow_, exp,
9706 /*size=*/NULL_TREE, maxlen, srcstr, objsize);
9709 /* Emit warning if a buffer overflow is detected at compile time
9710 in __sprintf_chk/__vsprintf_chk calls. */
9712 static void
9713 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
9715 tree size, len, fmt;
9716 const char *fmt_str;
9717 int nargs = call_expr_nargs (exp);
9719 /* Verify the required arguments in the original call. */
9721 if (nargs < 4)
9722 return;
9723 size = CALL_EXPR_ARG (exp, 2);
9724 fmt = CALL_EXPR_ARG (exp, 3);
9726 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9727 return;
9729 /* Check whether the format is a literal string constant. */
9730 fmt_str = c_getstr (fmt);
9731 if (fmt_str == NULL)
9732 return;
9734 if (!init_target_chars ())
9735 return;
9737 /* If the format doesn't contain % args or %%, we know its size. */
9738 if (strchr (fmt_str, target_percent) == 0)
9739 len = build_int_cstu (size_type_node, strlen (fmt_str));
9740 /* If the format is "%s" and first ... argument is a string literal,
9741 we know it too. */
9742 else if (fcode == BUILT_IN_SPRINTF_CHK
9743 && strcmp (fmt_str, target_percent_s) == 0)
9745 tree arg;
9747 if (nargs < 5)
9748 return;
9749 arg = CALL_EXPR_ARG (exp, 4);
9750 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
9751 return;
9753 len = c_strlen (arg, 1);
9754 if (!len || ! tree_fits_uhwi_p (len))
9755 return;
9757 else
9758 return;
9760 /* Add one for the terminating nul. */
9761 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
9762 check_sizes (OPT_Wstringop_overflow_,
9763 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, len, size);
9766 /* Emit warning if a free is called with address of a variable. */
9768 static void
9769 maybe_emit_free_warning (tree exp)
9771 tree arg = CALL_EXPR_ARG (exp, 0);
9773 STRIP_NOPS (arg);
9774 if (TREE_CODE (arg) != ADDR_EXPR)
9775 return;
9777 arg = get_base_address (TREE_OPERAND (arg, 0));
9778 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
9779 return;
9781 if (SSA_VAR_P (arg))
9782 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9783 "%Kattempt to free a non-heap object %qD", exp, arg);
9784 else
9785 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9786 "%Kattempt to free a non-heap object", exp);
9789 /* Fold a call to __builtin_object_size with arguments PTR and OST,
9790 if possible. */
9792 static tree
9793 fold_builtin_object_size (tree ptr, tree ost)
9795 unsigned HOST_WIDE_INT bytes;
9796 int object_size_type;
9798 if (!validate_arg (ptr, POINTER_TYPE)
9799 || !validate_arg (ost, INTEGER_TYPE))
9800 return NULL_TREE;
9802 STRIP_NOPS (ost);
9804 if (TREE_CODE (ost) != INTEGER_CST
9805 || tree_int_cst_sgn (ost) < 0
9806 || compare_tree_int (ost, 3) > 0)
9807 return NULL_TREE;
9809 object_size_type = tree_to_shwi (ost);
9811 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
9812 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
9813 and (size_t) 0 for types 2 and 3. */
9814 if (TREE_SIDE_EFFECTS (ptr))
9815 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
9817 if (TREE_CODE (ptr) == ADDR_EXPR)
9819 compute_builtin_object_size (ptr, object_size_type, &bytes);
9820 if (wi::fits_to_tree_p (bytes, size_type_node))
9821 return build_int_cstu (size_type_node, bytes);
9823 else if (TREE_CODE (ptr) == SSA_NAME)
9825 /* If object size is not known yet, delay folding until
9826 later. Maybe subsequent passes will help determining
9827 it. */
9828 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
9829 && wi::fits_to_tree_p (bytes, size_type_node))
9830 return build_int_cstu (size_type_node, bytes);
9833 return NULL_TREE;
9836 /* Builtins with folding operations that operate on "..." arguments
9837 need special handling; we need to store the arguments in a convenient
9838 data structure before attempting any folding. Fortunately there are
9839 only a few builtins that fall into this category. FNDECL is the
9840 function, EXP is the CALL_EXPR for the call. */
9842 static tree
9843 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
9845 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9846 tree ret = NULL_TREE;
9848 switch (fcode)
9850 case BUILT_IN_FPCLASSIFY:
9851 ret = fold_builtin_fpclassify (loc, args, nargs);
9852 break;
9854 default:
9855 break;
9857 if (ret)
9859 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9860 SET_EXPR_LOCATION (ret, loc);
9861 TREE_NO_WARNING (ret) = 1;
9862 return ret;
9864 return NULL_TREE;
9867 /* Initialize format string characters in the target charset. */
9869 bool
9870 init_target_chars (void)
9872 static bool init;
9873 if (!init)
9875 target_newline = lang_hooks.to_target_charset ('\n');
9876 target_percent = lang_hooks.to_target_charset ('%');
9877 target_c = lang_hooks.to_target_charset ('c');
9878 target_s = lang_hooks.to_target_charset ('s');
9879 if (target_newline == 0 || target_percent == 0 || target_c == 0
9880 || target_s == 0)
9881 return false;
9883 target_percent_c[0] = target_percent;
9884 target_percent_c[1] = target_c;
9885 target_percent_c[2] = '\0';
9887 target_percent_s[0] = target_percent;
9888 target_percent_s[1] = target_s;
9889 target_percent_s[2] = '\0';
9891 target_percent_s_newline[0] = target_percent;
9892 target_percent_s_newline[1] = target_s;
9893 target_percent_s_newline[2] = target_newline;
9894 target_percent_s_newline[3] = '\0';
9896 init = true;
9898 return true;
9901 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
9902 and no overflow/underflow occurred. INEXACT is true if M was not
9903 exactly calculated. TYPE is the tree type for the result. This
9904 function assumes that you cleared the MPFR flags and then
9905 calculated M to see if anything subsequently set a flag prior to
9906 entering this function. Return NULL_TREE if any checks fail. */
9908 static tree
9909 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
9911 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9912 overflow/underflow occurred. If -frounding-math, proceed iff the
9913 result of calling FUNC was exact. */
9914 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
9915 && (!flag_rounding_math || !inexact))
9917 REAL_VALUE_TYPE rr;
9919 real_from_mpfr (&rr, m, type, GMP_RNDN);
9920 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
9921 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9922 but the mpft_t is not, then we underflowed in the
9923 conversion. */
9924 if (real_isfinite (&rr)
9925 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
9927 REAL_VALUE_TYPE rmode;
9929 real_convert (&rmode, TYPE_MODE (type), &rr);
9930 /* Proceed iff the specified mode can hold the value. */
9931 if (real_identical (&rmode, &rr))
9932 return build_real (type, rmode);
9935 return NULL_TREE;
9938 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
9939 number and no overflow/underflow occurred. INEXACT is true if M
9940 was not exactly calculated. TYPE is the tree type for the result.
9941 This function assumes that you cleared the MPFR flags and then
9942 calculated M to see if anything subsequently set a flag prior to
9943 entering this function. Return NULL_TREE if any checks fail, if
9944 FORCE_CONVERT is true, then bypass the checks. */
9946 static tree
9947 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
9949 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9950 overflow/underflow occurred. If -frounding-math, proceed iff the
9951 result of calling FUNC was exact. */
9952 if (force_convert
9953 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
9954 && !mpfr_overflow_p () && !mpfr_underflow_p ()
9955 && (!flag_rounding_math || !inexact)))
9957 REAL_VALUE_TYPE re, im;
9959 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
9960 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
9961 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
9962 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9963 but the mpft_t is not, then we underflowed in the
9964 conversion. */
9965 if (force_convert
9966 || (real_isfinite (&re) && real_isfinite (&im)
9967 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
9968 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
9970 REAL_VALUE_TYPE re_mode, im_mode;
9972 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
9973 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
9974 /* Proceed iff the specified mode can hold the value. */
9975 if (force_convert
9976 || (real_identical (&re_mode, &re)
9977 && real_identical (&im_mode, &im)))
9978 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
9979 build_real (TREE_TYPE (type), im_mode));
9982 return NULL_TREE;
9985 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
9986 the pointer *(ARG_QUO) and return the result. The type is taken
9987 from the type of ARG0 and is used for setting the precision of the
9988 calculation and results. */
9990 static tree
9991 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
9993 tree const type = TREE_TYPE (arg0);
9994 tree result = NULL_TREE;
9996 STRIP_NOPS (arg0);
9997 STRIP_NOPS (arg1);
9999 /* To proceed, MPFR must exactly represent the target floating point
10000 format, which only happens when the target base equals two. */
10001 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10002 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10003 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10005 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10006 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10008 if (real_isfinite (ra0) && real_isfinite (ra1))
10010 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10011 const int prec = fmt->p;
10012 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10013 tree result_rem;
10014 long integer_quo;
10015 mpfr_t m0, m1;
10017 mpfr_inits2 (prec, m0, m1, NULL);
10018 mpfr_from_real (m0, ra0, GMP_RNDN);
10019 mpfr_from_real (m1, ra1, GMP_RNDN);
10020 mpfr_clear_flags ();
10021 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10022 /* Remquo is independent of the rounding mode, so pass
10023 inexact=0 to do_mpfr_ckconv(). */
10024 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10025 mpfr_clears (m0, m1, NULL);
10026 if (result_rem)
10028 /* MPFR calculates quo in the host's long so it may
10029 return more bits in quo than the target int can hold
10030 if sizeof(host long) > sizeof(target int). This can
10031 happen even for native compilers in LP64 mode. In
10032 these cases, modulo the quo value with the largest
10033 number that the target int can hold while leaving one
10034 bit for the sign. */
10035 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10036 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10038 /* Dereference the quo pointer argument. */
10039 arg_quo = build_fold_indirect_ref (arg_quo);
10040 /* Proceed iff a valid pointer type was passed in. */
10041 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10043 /* Set the value. */
10044 tree result_quo
10045 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10046 build_int_cst (TREE_TYPE (arg_quo),
10047 integer_quo));
10048 TREE_SIDE_EFFECTS (result_quo) = 1;
10049 /* Combine the quo assignment with the rem. */
10050 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10051 result_quo, result_rem));
10056 return result;
10059 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10060 resulting value as a tree with type TYPE. The mpfr precision is
10061 set to the precision of TYPE. We assume that this mpfr function
10062 returns zero if the result could be calculated exactly within the
10063 requested precision. In addition, the integer pointer represented
10064 by ARG_SG will be dereferenced and set to the appropriate signgam
10065 (-1,1) value. */
10067 static tree
10068 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10070 tree result = NULL_TREE;
10072 STRIP_NOPS (arg);
10074 /* To proceed, MPFR must exactly represent the target floating point
10075 format, which only happens when the target base equals two. Also
10076 verify ARG is a constant and that ARG_SG is an int pointer. */
10077 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10078 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10079 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10080 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10082 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10084 /* In addition to NaN and Inf, the argument cannot be zero or a
10085 negative integer. */
10086 if (real_isfinite (ra)
10087 && ra->cl != rvc_zero
10088 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10090 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10091 const int prec = fmt->p;
10092 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10093 int inexact, sg;
10094 mpfr_t m;
10095 tree result_lg;
10097 mpfr_init2 (m, prec);
10098 mpfr_from_real (m, ra, GMP_RNDN);
10099 mpfr_clear_flags ();
10100 inexact = mpfr_lgamma (m, &sg, m, rnd);
10101 result_lg = do_mpfr_ckconv (m, type, inexact);
10102 mpfr_clear (m);
10103 if (result_lg)
10105 tree result_sg;
10107 /* Dereference the arg_sg pointer argument. */
10108 arg_sg = build_fold_indirect_ref (arg_sg);
10109 /* Assign the signgam value into *arg_sg. */
10110 result_sg = fold_build2 (MODIFY_EXPR,
10111 TREE_TYPE (arg_sg), arg_sg,
10112 build_int_cst (TREE_TYPE (arg_sg), sg));
10113 TREE_SIDE_EFFECTS (result_sg) = 1;
10114 /* Combine the signgam assignment with the lgamma result. */
10115 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10116 result_sg, result_lg));
10121 return result;
10124 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10125 mpc function FUNC on it and return the resulting value as a tree
10126 with type TYPE. The mpfr precision is set to the precision of
10127 TYPE. We assume that function FUNC returns zero if the result
10128 could be calculated exactly within the requested precision. If
10129 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10130 in the arguments and/or results. */
10132 tree
10133 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10134 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10136 tree result = NULL_TREE;
10138 STRIP_NOPS (arg0);
10139 STRIP_NOPS (arg1);
10141 /* To proceed, MPFR must exactly represent the target floating point
10142 format, which only happens when the target base equals two. */
10143 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10144 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10145 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10146 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10147 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10149 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10150 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10151 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10152 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10154 if (do_nonfinite
10155 || (real_isfinite (re0) && real_isfinite (im0)
10156 && real_isfinite (re1) && real_isfinite (im1)))
10158 const struct real_format *const fmt =
10159 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10160 const int prec = fmt->p;
10161 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10162 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10163 int inexact;
10164 mpc_t m0, m1;
10166 mpc_init2 (m0, prec);
10167 mpc_init2 (m1, prec);
10168 mpfr_from_real (mpc_realref (m0), re0, rnd);
10169 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10170 mpfr_from_real (mpc_realref (m1), re1, rnd);
10171 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10172 mpfr_clear_flags ();
10173 inexact = func (m0, m0, m1, crnd);
10174 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10175 mpc_clear (m0);
10176 mpc_clear (m1);
10180 return result;
10183 /* A wrapper function for builtin folding that prevents warnings for
10184 "statement without effect" and the like, caused by removing the
10185 call node earlier than the warning is generated. */
10187 tree
10188 fold_call_stmt (gcall *stmt, bool ignore)
10190 tree ret = NULL_TREE;
10191 tree fndecl = gimple_call_fndecl (stmt);
10192 location_t loc = gimple_location (stmt);
10193 if (fndecl
10194 && TREE_CODE (fndecl) == FUNCTION_DECL
10195 && DECL_BUILT_IN (fndecl)
10196 && !gimple_call_va_arg_pack_p (stmt))
10198 int nargs = gimple_call_num_args (stmt);
10199 tree *args = (nargs > 0
10200 ? gimple_call_arg_ptr (stmt, 0)
10201 : &error_mark_node);
10203 if (avoid_folding_inline_builtin (fndecl))
10204 return NULL_TREE;
10205 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10207 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10209 else
10211 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10212 if (ret)
10214 /* Propagate location information from original call to
10215 expansion of builtin. Otherwise things like
10216 maybe_emit_chk_warning, that operate on the expansion
10217 of a builtin, will use the wrong location information. */
10218 if (gimple_has_location (stmt))
10220 tree realret = ret;
10221 if (TREE_CODE (ret) == NOP_EXPR)
10222 realret = TREE_OPERAND (ret, 0);
10223 if (CAN_HAVE_LOCATION_P (realret)
10224 && !EXPR_HAS_LOCATION (realret))
10225 SET_EXPR_LOCATION (realret, loc);
10226 return realret;
10228 return ret;
10232 return NULL_TREE;
10235 /* Look up the function in builtin_decl that corresponds to DECL
10236 and set ASMSPEC as its user assembler name. DECL must be a
10237 function decl that declares a builtin. */
10239 void
10240 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10242 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10243 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10244 && asmspec != 0);
10246 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10247 set_user_assembler_name (builtin, asmspec);
10249 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10250 && INT_TYPE_SIZE < BITS_PER_WORD)
10252 set_user_assembler_libfunc ("ffs", asmspec);
10253 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE, MODE_INT, 0),
10254 "ffs");
10258 /* Return true if DECL is a builtin that expands to a constant or similarly
10259 simple code. */
10260 bool
10261 is_simple_builtin (tree decl)
10263 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10264 switch (DECL_FUNCTION_CODE (decl))
10266 /* Builtins that expand to constants. */
10267 case BUILT_IN_CONSTANT_P:
10268 case BUILT_IN_EXPECT:
10269 case BUILT_IN_OBJECT_SIZE:
10270 case BUILT_IN_UNREACHABLE:
10271 /* Simple register moves or loads from stack. */
10272 case BUILT_IN_ASSUME_ALIGNED:
10273 case BUILT_IN_RETURN_ADDRESS:
10274 case BUILT_IN_EXTRACT_RETURN_ADDR:
10275 case BUILT_IN_FROB_RETURN_ADDR:
10276 case BUILT_IN_RETURN:
10277 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10278 case BUILT_IN_FRAME_ADDRESS:
10279 case BUILT_IN_VA_END:
10280 case BUILT_IN_STACK_SAVE:
10281 case BUILT_IN_STACK_RESTORE:
10282 /* Exception state returns or moves registers around. */
10283 case BUILT_IN_EH_FILTER:
10284 case BUILT_IN_EH_POINTER:
10285 case BUILT_IN_EH_COPY_VALUES:
10286 return true;
10288 default:
10289 return false;
10292 return false;
10295 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10296 most probably expanded inline into reasonably simple code. This is a
10297 superset of is_simple_builtin. */
10298 bool
10299 is_inexpensive_builtin (tree decl)
10301 if (!decl)
10302 return false;
10303 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10304 return true;
10305 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10306 switch (DECL_FUNCTION_CODE (decl))
10308 case BUILT_IN_ABS:
10309 case BUILT_IN_ALLOCA:
10310 case BUILT_IN_ALLOCA_WITH_ALIGN:
10311 case BUILT_IN_BSWAP16:
10312 case BUILT_IN_BSWAP32:
10313 case BUILT_IN_BSWAP64:
10314 case BUILT_IN_CLZ:
10315 case BUILT_IN_CLZIMAX:
10316 case BUILT_IN_CLZL:
10317 case BUILT_IN_CLZLL:
10318 case BUILT_IN_CTZ:
10319 case BUILT_IN_CTZIMAX:
10320 case BUILT_IN_CTZL:
10321 case BUILT_IN_CTZLL:
10322 case BUILT_IN_FFS:
10323 case BUILT_IN_FFSIMAX:
10324 case BUILT_IN_FFSL:
10325 case BUILT_IN_FFSLL:
10326 case BUILT_IN_IMAXABS:
10327 case BUILT_IN_FINITE:
10328 case BUILT_IN_FINITEF:
10329 case BUILT_IN_FINITEL:
10330 case BUILT_IN_FINITED32:
10331 case BUILT_IN_FINITED64:
10332 case BUILT_IN_FINITED128:
10333 case BUILT_IN_FPCLASSIFY:
10334 case BUILT_IN_ISFINITE:
10335 case BUILT_IN_ISINF_SIGN:
10336 case BUILT_IN_ISINF:
10337 case BUILT_IN_ISINFF:
10338 case BUILT_IN_ISINFL:
10339 case BUILT_IN_ISINFD32:
10340 case BUILT_IN_ISINFD64:
10341 case BUILT_IN_ISINFD128:
10342 case BUILT_IN_ISNAN:
10343 case BUILT_IN_ISNANF:
10344 case BUILT_IN_ISNANL:
10345 case BUILT_IN_ISNAND32:
10346 case BUILT_IN_ISNAND64:
10347 case BUILT_IN_ISNAND128:
10348 case BUILT_IN_ISNORMAL:
10349 case BUILT_IN_ISGREATER:
10350 case BUILT_IN_ISGREATEREQUAL:
10351 case BUILT_IN_ISLESS:
10352 case BUILT_IN_ISLESSEQUAL:
10353 case BUILT_IN_ISLESSGREATER:
10354 case BUILT_IN_ISUNORDERED:
10355 case BUILT_IN_VA_ARG_PACK:
10356 case BUILT_IN_VA_ARG_PACK_LEN:
10357 case BUILT_IN_VA_COPY:
10358 case BUILT_IN_TRAP:
10359 case BUILT_IN_SAVEREGS:
10360 case BUILT_IN_POPCOUNTL:
10361 case BUILT_IN_POPCOUNTLL:
10362 case BUILT_IN_POPCOUNTIMAX:
10363 case BUILT_IN_POPCOUNT:
10364 case BUILT_IN_PARITYL:
10365 case BUILT_IN_PARITYLL:
10366 case BUILT_IN_PARITYIMAX:
10367 case BUILT_IN_PARITY:
10368 case BUILT_IN_LABS:
10369 case BUILT_IN_LLABS:
10370 case BUILT_IN_PREFETCH:
10371 case BUILT_IN_ACC_ON_DEVICE:
10372 return true;
10374 default:
10375 return is_simple_builtin (decl);
10378 return false;
10381 /* Return true if T is a constant and the value cast to a target char
10382 can be represented by a host char.
10383 Store the casted char constant in *P if so. */
10385 bool
10386 target_char_cst_p (tree t, char *p)
10388 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
10389 return false;
10391 *p = (char)tree_to_uhwi (t);
10392 return true;