PR tree-optimization/71831 - __builtin_object_size poor results with no
[official-gcc.git] / gcc / builtins.c
blob1073e35b17b1bc1f6974c71c940bd9d82bbbfc0f
1 /* Expand builtin functions.
2 Copyright (C) 1988-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "predict.h"
33 #include "tm_p.h"
34 #include "stringpool.h"
35 #include "tree-vrp.h"
36 #include "tree-ssanames.h"
37 #include "expmed.h"
38 #include "optabs.h"
39 #include "emit-rtl.h"
40 #include "recog.h"
41 #include "diagnostic-core.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "fold-const-call.h"
45 #include "stor-layout.h"
46 #include "calls.h"
47 #include "varasm.h"
48 #include "tree-object-size.h"
49 #include "realmpfr.h"
50 #include "cfgrtl.h"
51 #include "except.h"
52 #include "dojump.h"
53 #include "explow.h"
54 #include "stmt.h"
55 #include "expr.h"
56 #include "libfuncs.h"
57 #include "output.h"
58 #include "typeclass.h"
59 #include "langhooks.h"
60 #include "value-prof.h"
61 #include "builtins.h"
62 #include "asan.h"
63 #include "cilk.h"
64 #include "tree-chkp.h"
65 #include "rtl-chkp.h"
66 #include "internal-fn.h"
67 #include "case-cfn-macros.h"
68 #include "gimple-fold.h"
71 struct target_builtins default_target_builtins;
72 #if SWITCHABLE_TARGET
73 struct target_builtins *this_target_builtins = &default_target_builtins;
74 #endif
76 /* Define the names of the builtin function types and codes. */
77 const char *const built_in_class_names[BUILT_IN_LAST]
78 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
80 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
81 const char * built_in_names[(int) END_BUILTINS] =
83 #include "builtins.def"
86 /* Setup an array of builtin_info_type, make sure each element decl is
87 initialized to NULL_TREE. */
88 builtin_info_type builtin_info[(int)END_BUILTINS];
90 /* Non-zero if __builtin_constant_p should be folded right away. */
91 bool force_folding_builtin_constant_p;
93 static rtx c_readstr (const char *, machine_mode);
94 static int target_char_cast (tree, char *);
95 static rtx get_memory_rtx (tree, tree);
96 static int apply_args_size (void);
97 static int apply_result_size (void);
98 static rtx result_vector (int, rtx);
99 static void expand_builtin_prefetch (tree);
100 static rtx expand_builtin_apply_args (void);
101 static rtx expand_builtin_apply_args_1 (void);
102 static rtx expand_builtin_apply (rtx, rtx, rtx);
103 static void expand_builtin_return (rtx);
104 static enum type_class type_to_class (tree);
105 static rtx expand_builtin_classify_type (tree);
106 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
107 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
108 static rtx expand_builtin_interclass_mathfn (tree, rtx);
109 static rtx expand_builtin_sincos (tree);
110 static rtx expand_builtin_cexpi (tree, rtx);
111 static rtx expand_builtin_int_roundingfn (tree, rtx);
112 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
113 static rtx expand_builtin_next_arg (void);
114 static rtx expand_builtin_va_start (tree);
115 static rtx expand_builtin_va_end (tree);
116 static rtx expand_builtin_va_copy (tree);
117 static rtx expand_builtin_strcmp (tree, rtx);
118 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
119 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
120 static rtx expand_builtin_memcpy (tree, rtx);
121 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
122 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
123 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
124 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
125 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
126 machine_mode, int, tree);
127 static rtx expand_builtin_strcpy (tree, rtx);
128 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
129 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
130 static rtx expand_builtin_strncpy (tree, rtx);
131 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
132 static rtx expand_builtin_memset (tree, rtx, machine_mode);
133 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
134 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
135 static rtx expand_builtin_bzero (tree);
136 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
137 static rtx expand_builtin_alloca (tree, bool);
138 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
139 static rtx expand_builtin_frame_address (tree, tree);
140 static tree stabilize_va_list_loc (location_t, tree, int);
141 static rtx expand_builtin_expect (tree, rtx);
142 static tree fold_builtin_constant_p (tree);
143 static tree fold_builtin_classify_type (tree);
144 static tree fold_builtin_strlen (location_t, tree, tree);
145 static tree fold_builtin_inf (location_t, tree, int);
146 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
147 static bool validate_arg (const_tree, enum tree_code code);
148 static rtx expand_builtin_fabs (tree, rtx, rtx);
149 static rtx expand_builtin_signbit (tree, rtx);
150 static tree fold_builtin_strchr (location_t, tree, tree, tree);
151 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
152 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
153 static tree fold_builtin_strcmp (location_t, tree, tree);
154 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
155 static tree fold_builtin_isascii (location_t, tree);
156 static tree fold_builtin_toascii (location_t, tree);
157 static tree fold_builtin_isdigit (location_t, tree);
158 static tree fold_builtin_fabs (location_t, tree, tree);
159 static tree fold_builtin_abs (location_t, tree, tree);
160 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
161 enum tree_code);
162 static tree fold_builtin_0 (location_t, tree);
163 static tree fold_builtin_1 (location_t, tree, tree);
164 static tree fold_builtin_2 (location_t, tree, tree, tree);
165 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
166 static tree fold_builtin_varargs (location_t, tree, tree*, int);
168 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
169 static tree fold_builtin_strstr (location_t, tree, tree, tree);
170 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
171 static tree fold_builtin_strspn (location_t, tree, tree);
172 static tree fold_builtin_strcspn (location_t, tree, tree);
174 static rtx expand_builtin_object_size (tree);
175 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
176 enum built_in_function);
177 static void maybe_emit_chk_warning (tree, enum built_in_function);
178 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
179 static void maybe_emit_free_warning (tree);
180 static tree fold_builtin_object_size (tree, tree);
182 unsigned HOST_WIDE_INT target_newline;
183 unsigned HOST_WIDE_INT target_percent;
184 static unsigned HOST_WIDE_INT target_c;
185 static unsigned HOST_WIDE_INT target_s;
186 char target_percent_c[3];
187 char target_percent_s[3];
188 char target_percent_s_newline[4];
189 static tree do_mpfr_remquo (tree, tree, tree);
190 static tree do_mpfr_lgamma_r (tree, tree, tree);
191 static void expand_builtin_sync_synchronize (void);
193 /* Return true if NAME starts with __builtin_ or __sync_. */
195 static bool
196 is_builtin_name (const char *name)
198 if (strncmp (name, "__builtin_", 10) == 0)
199 return true;
200 if (strncmp (name, "__sync_", 7) == 0)
201 return true;
202 if (strncmp (name, "__atomic_", 9) == 0)
203 return true;
204 if (flag_cilkplus
205 && (!strcmp (name, "__cilkrts_detach")
206 || !strcmp (name, "__cilkrts_pop_frame")))
207 return true;
208 return false;
212 /* Return true if DECL is a function symbol representing a built-in. */
214 bool
215 is_builtin_fn (tree decl)
217 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
220 /* Return true if NODE should be considered for inline expansion regardless
221 of the optimization level. This means whenever a function is invoked with
222 its "internal" name, which normally contains the prefix "__builtin". */
224 bool
225 called_as_built_in (tree node)
227 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
228 we want the name used to call the function, not the name it
229 will have. */
230 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
231 return is_builtin_name (name);
234 /* Compute values M and N such that M divides (address of EXP - N) and such
235 that N < M. If these numbers can be determined, store M in alignp and N in
236 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
237 *alignp and any bit-offset to *bitposp.
239 Note that the address (and thus the alignment) computed here is based
240 on the address to which a symbol resolves, whereas DECL_ALIGN is based
241 on the address at which an object is actually located. These two
242 addresses are not always the same. For example, on ARM targets,
243 the address &foo of a Thumb function foo() has the lowest bit set,
244 whereas foo() itself starts on an even address.
246 If ADDR_P is true we are taking the address of the memory reference EXP
247 and thus cannot rely on the access taking place. */
249 static bool
250 get_object_alignment_2 (tree exp, unsigned int *alignp,
251 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
253 HOST_WIDE_INT bitsize, bitpos;
254 tree offset;
255 machine_mode mode;
256 int unsignedp, reversep, volatilep;
257 unsigned int align = BITS_PER_UNIT;
258 bool known_alignment = false;
260 /* Get the innermost object and the constant (bitpos) and possibly
261 variable (offset) offset of the access. */
262 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
263 &unsignedp, &reversep, &volatilep);
265 /* Extract alignment information from the innermost object and
266 possibly adjust bitpos and offset. */
267 if (TREE_CODE (exp) == FUNCTION_DECL)
269 /* Function addresses can encode extra information besides their
270 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
271 allows the low bit to be used as a virtual bit, we know
272 that the address itself must be at least 2-byte aligned. */
273 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
274 align = 2 * BITS_PER_UNIT;
276 else if (TREE_CODE (exp) == LABEL_DECL)
278 else if (TREE_CODE (exp) == CONST_DECL)
280 /* The alignment of a CONST_DECL is determined by its initializer. */
281 exp = DECL_INITIAL (exp);
282 align = TYPE_ALIGN (TREE_TYPE (exp));
283 if (CONSTANT_CLASS_P (exp))
284 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
286 known_alignment = true;
288 else if (DECL_P (exp))
290 align = DECL_ALIGN (exp);
291 known_alignment = true;
293 else if (TREE_CODE (exp) == INDIRECT_REF
294 || TREE_CODE (exp) == MEM_REF
295 || TREE_CODE (exp) == TARGET_MEM_REF)
297 tree addr = TREE_OPERAND (exp, 0);
298 unsigned ptr_align;
299 unsigned HOST_WIDE_INT ptr_bitpos;
300 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
302 /* If the address is explicitely aligned, handle that. */
303 if (TREE_CODE (addr) == BIT_AND_EXPR
304 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
306 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
307 ptr_bitmask *= BITS_PER_UNIT;
308 align = ptr_bitmask & -ptr_bitmask;
309 addr = TREE_OPERAND (addr, 0);
312 known_alignment
313 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
314 align = MAX (ptr_align, align);
316 /* Re-apply explicit alignment to the bitpos. */
317 ptr_bitpos &= ptr_bitmask;
319 /* The alignment of the pointer operand in a TARGET_MEM_REF
320 has to take the variable offset parts into account. */
321 if (TREE_CODE (exp) == TARGET_MEM_REF)
323 if (TMR_INDEX (exp))
325 unsigned HOST_WIDE_INT step = 1;
326 if (TMR_STEP (exp))
327 step = TREE_INT_CST_LOW (TMR_STEP (exp));
328 align = MIN (align, (step & -step) * BITS_PER_UNIT);
330 if (TMR_INDEX2 (exp))
331 align = BITS_PER_UNIT;
332 known_alignment = false;
335 /* When EXP is an actual memory reference then we can use
336 TYPE_ALIGN of a pointer indirection to derive alignment.
337 Do so only if get_pointer_alignment_1 did not reveal absolute
338 alignment knowledge and if using that alignment would
339 improve the situation. */
340 if (!addr_p && !known_alignment
341 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
342 align = TYPE_ALIGN (TREE_TYPE (exp));
343 else
345 /* Else adjust bitpos accordingly. */
346 bitpos += ptr_bitpos;
347 if (TREE_CODE (exp) == MEM_REF
348 || TREE_CODE (exp) == TARGET_MEM_REF)
349 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
352 else if (TREE_CODE (exp) == STRING_CST)
354 /* STRING_CST are the only constant objects we allow to be not
355 wrapped inside a CONST_DECL. */
356 align = TYPE_ALIGN (TREE_TYPE (exp));
357 if (CONSTANT_CLASS_P (exp))
358 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
360 known_alignment = true;
363 /* If there is a non-constant offset part extract the maximum
364 alignment that can prevail. */
365 if (offset)
367 unsigned int trailing_zeros = tree_ctz (offset);
368 if (trailing_zeros < HOST_BITS_PER_INT)
370 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
371 if (inner)
372 align = MIN (align, inner);
376 *alignp = align;
377 *bitposp = bitpos & (*alignp - 1);
378 return known_alignment;
381 /* For a memory reference expression EXP compute values M and N such that M
382 divides (&EXP - N) and such that N < M. If these numbers can be determined,
383 store M in alignp and N in *BITPOSP and return true. Otherwise return false
384 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
386 bool
387 get_object_alignment_1 (tree exp, unsigned int *alignp,
388 unsigned HOST_WIDE_INT *bitposp)
390 return get_object_alignment_2 (exp, alignp, bitposp, false);
393 /* Return the alignment in bits of EXP, an object. */
395 unsigned int
396 get_object_alignment (tree exp)
398 unsigned HOST_WIDE_INT bitpos = 0;
399 unsigned int align;
401 get_object_alignment_1 (exp, &align, &bitpos);
403 /* align and bitpos now specify known low bits of the pointer.
404 ptr & (align - 1) == bitpos. */
406 if (bitpos != 0)
407 align = (bitpos & -bitpos);
408 return align;
411 /* For a pointer valued expression EXP compute values M and N such that M
412 divides (EXP - N) and such that N < M. If these numbers can be determined,
413 store M in alignp and N in *BITPOSP and return true. Return false if
414 the results are just a conservative approximation.
416 If EXP is not a pointer, false is returned too. */
418 bool
419 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
420 unsigned HOST_WIDE_INT *bitposp)
422 STRIP_NOPS (exp);
424 if (TREE_CODE (exp) == ADDR_EXPR)
425 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
426 alignp, bitposp, true);
427 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
429 unsigned int align;
430 unsigned HOST_WIDE_INT bitpos;
431 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
432 &align, &bitpos);
433 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
434 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
435 else
437 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
438 if (trailing_zeros < HOST_BITS_PER_INT)
440 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
441 if (inner)
442 align = MIN (align, inner);
445 *alignp = align;
446 *bitposp = bitpos & (align - 1);
447 return res;
449 else if (TREE_CODE (exp) == SSA_NAME
450 && POINTER_TYPE_P (TREE_TYPE (exp)))
452 unsigned int ptr_align, ptr_misalign;
453 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
455 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
457 *bitposp = ptr_misalign * BITS_PER_UNIT;
458 *alignp = ptr_align * BITS_PER_UNIT;
459 /* Make sure to return a sensible alignment when the multiplication
460 by BITS_PER_UNIT overflowed. */
461 if (*alignp == 0)
462 *alignp = 1u << (HOST_BITS_PER_INT - 1);
463 /* We cannot really tell whether this result is an approximation. */
464 return false;
466 else
468 *bitposp = 0;
469 *alignp = BITS_PER_UNIT;
470 return false;
473 else if (TREE_CODE (exp) == INTEGER_CST)
475 *alignp = BIGGEST_ALIGNMENT;
476 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
477 & (BIGGEST_ALIGNMENT - 1));
478 return true;
481 *bitposp = 0;
482 *alignp = BITS_PER_UNIT;
483 return false;
486 /* Return the alignment in bits of EXP, a pointer valued expression.
487 The alignment returned is, by default, the alignment of the thing that
488 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
490 Otherwise, look at the expression to see if we can do better, i.e., if the
491 expression is actually pointing at an object whose alignment is tighter. */
493 unsigned int
494 get_pointer_alignment (tree exp)
496 unsigned HOST_WIDE_INT bitpos = 0;
497 unsigned int align;
499 get_pointer_alignment_1 (exp, &align, &bitpos);
501 /* align and bitpos now specify known low bits of the pointer.
502 ptr & (align - 1) == bitpos. */
504 if (bitpos != 0)
505 align = (bitpos & -bitpos);
507 return align;
510 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
511 way, because it could contain a zero byte in the middle.
512 TREE_STRING_LENGTH is the size of the character array, not the string.
514 ONLY_VALUE should be nonzero if the result is not going to be emitted
515 into the instruction stream and zero if it is going to be expanded.
516 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
517 is returned, otherwise NULL, since
518 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
519 evaluate the side-effects.
521 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
522 accesses. Note that this implies the result is not going to be emitted
523 into the instruction stream.
525 The value returned is of type `ssizetype'.
527 Unfortunately, string_constant can't access the values of const char
528 arrays with initializers, so neither can we do so here. */
530 tree
531 c_strlen (tree src, int only_value)
533 tree offset_node;
534 HOST_WIDE_INT offset;
535 int max;
536 const char *ptr;
537 location_t loc;
539 STRIP_NOPS (src);
540 if (TREE_CODE (src) == COND_EXPR
541 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
543 tree len1, len2;
545 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
546 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
547 if (tree_int_cst_equal (len1, len2))
548 return len1;
551 if (TREE_CODE (src) == COMPOUND_EXPR
552 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
553 return c_strlen (TREE_OPERAND (src, 1), only_value);
555 loc = EXPR_LOC_OR_LOC (src, input_location);
557 src = string_constant (src, &offset_node);
558 if (src == 0)
559 return NULL_TREE;
561 max = TREE_STRING_LENGTH (src) - 1;
562 ptr = TREE_STRING_POINTER (src);
564 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
566 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
567 compute the offset to the following null if we don't know where to
568 start searching for it. */
569 int i;
571 for (i = 0; i < max; i++)
572 if (ptr[i] == 0)
573 return NULL_TREE;
575 /* We don't know the starting offset, but we do know that the string
576 has no internal zero bytes. We can assume that the offset falls
577 within the bounds of the string; otherwise, the programmer deserves
578 what he gets. Subtract the offset from the length of the string,
579 and return that. This would perhaps not be valid if we were dealing
580 with named arrays in addition to literal string constants. */
582 return size_diffop_loc (loc, size_int (max), offset_node);
585 /* We have a known offset into the string. Start searching there for
586 a null character if we can represent it as a single HOST_WIDE_INT. */
587 if (offset_node == 0)
588 offset = 0;
589 else if (! tree_fits_shwi_p (offset_node))
590 offset = -1;
591 else
592 offset = tree_to_shwi (offset_node);
594 /* If the offset is known to be out of bounds, warn, and call strlen at
595 runtime. */
596 if (offset < 0 || offset > max)
598 /* Suppress multiple warnings for propagated constant strings. */
599 if (only_value != 2
600 && !TREE_NO_WARNING (src))
602 warning_at (loc, 0, "offset outside bounds of constant string");
603 TREE_NO_WARNING (src) = 1;
605 return NULL_TREE;
608 /* Use strlen to search for the first zero byte. Since any strings
609 constructed with build_string will have nulls appended, we win even
610 if we get handed something like (char[4])"abcd".
612 Since OFFSET is our starting index into the string, no further
613 calculation is needed. */
614 return ssize_int (strlen (ptr + offset));
617 /* Return a constant integer corresponding to target reading
618 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
620 static rtx
621 c_readstr (const char *str, machine_mode mode)
623 HOST_WIDE_INT ch;
624 unsigned int i, j;
625 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
627 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
628 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
629 / HOST_BITS_PER_WIDE_INT;
631 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
632 for (i = 0; i < len; i++)
633 tmp[i] = 0;
635 ch = 1;
636 for (i = 0; i < GET_MODE_SIZE (mode); i++)
638 j = i;
639 if (WORDS_BIG_ENDIAN)
640 j = GET_MODE_SIZE (mode) - i - 1;
641 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
642 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
643 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
644 j *= BITS_PER_UNIT;
646 if (ch)
647 ch = (unsigned char) str[i];
648 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
651 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
652 return immed_wide_int_const (c, mode);
655 /* Cast a target constant CST to target CHAR and if that value fits into
656 host char type, return zero and put that value into variable pointed to by
657 P. */
659 static int
660 target_char_cast (tree cst, char *p)
662 unsigned HOST_WIDE_INT val, hostval;
664 if (TREE_CODE (cst) != INTEGER_CST
665 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
666 return 1;
668 /* Do not care if it fits or not right here. */
669 val = TREE_INT_CST_LOW (cst);
671 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
672 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
674 hostval = val;
675 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
676 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
678 if (val != hostval)
679 return 1;
681 *p = hostval;
682 return 0;
685 /* Similar to save_expr, but assumes that arbitrary code is not executed
686 in between the multiple evaluations. In particular, we assume that a
687 non-addressable local variable will not be modified. */
689 static tree
690 builtin_save_expr (tree exp)
692 if (TREE_CODE (exp) == SSA_NAME
693 || (TREE_ADDRESSABLE (exp) == 0
694 && (TREE_CODE (exp) == PARM_DECL
695 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
696 return exp;
698 return save_expr (exp);
701 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
702 times to get the address of either a higher stack frame, or a return
703 address located within it (depending on FNDECL_CODE). */
705 static rtx
706 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
708 int i;
709 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
710 if (tem == NULL_RTX)
712 /* For a zero count with __builtin_return_address, we don't care what
713 frame address we return, because target-specific definitions will
714 override us. Therefore frame pointer elimination is OK, and using
715 the soft frame pointer is OK.
717 For a nonzero count, or a zero count with __builtin_frame_address,
718 we require a stable offset from the current frame pointer to the
719 previous one, so we must use the hard frame pointer, and
720 we must disable frame pointer elimination. */
721 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
722 tem = frame_pointer_rtx;
723 else
725 tem = hard_frame_pointer_rtx;
727 /* Tell reload not to eliminate the frame pointer. */
728 crtl->accesses_prior_frames = 1;
732 if (count > 0)
733 SETUP_FRAME_ADDRESSES ();
735 /* On the SPARC, the return address is not in the frame, it is in a
736 register. There is no way to access it off of the current frame
737 pointer, but it can be accessed off the previous frame pointer by
738 reading the value from the register window save area. */
739 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
740 count--;
742 /* Scan back COUNT frames to the specified frame. */
743 for (i = 0; i < count; i++)
745 /* Assume the dynamic chain pointer is in the word that the
746 frame address points to, unless otherwise specified. */
747 tem = DYNAMIC_CHAIN_ADDRESS (tem);
748 tem = memory_address (Pmode, tem);
749 tem = gen_frame_mem (Pmode, tem);
750 tem = copy_to_reg (tem);
753 /* For __builtin_frame_address, return what we've got. But, on
754 the SPARC for example, we may have to add a bias. */
755 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
756 return FRAME_ADDR_RTX (tem);
758 /* For __builtin_return_address, get the return address from that frame. */
759 #ifdef RETURN_ADDR_RTX
760 tem = RETURN_ADDR_RTX (count, tem);
761 #else
762 tem = memory_address (Pmode,
763 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
764 tem = gen_frame_mem (Pmode, tem);
765 #endif
766 return tem;
769 /* Alias set used for setjmp buffer. */
770 static alias_set_type setjmp_alias_set = -1;
772 /* Construct the leading half of a __builtin_setjmp call. Control will
773 return to RECEIVER_LABEL. This is also called directly by the SJLJ
774 exception handling code. */
776 void
777 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
779 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
780 rtx stack_save;
781 rtx mem;
783 if (setjmp_alias_set == -1)
784 setjmp_alias_set = new_alias_set ();
786 buf_addr = convert_memory_address (Pmode, buf_addr);
788 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
790 /* We store the frame pointer and the address of receiver_label in
791 the buffer and use the rest of it for the stack save area, which
792 is machine-dependent. */
794 mem = gen_rtx_MEM (Pmode, buf_addr);
795 set_mem_alias_set (mem, setjmp_alias_set);
796 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
798 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
799 GET_MODE_SIZE (Pmode))),
800 set_mem_alias_set (mem, setjmp_alias_set);
802 emit_move_insn (validize_mem (mem),
803 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
805 stack_save = gen_rtx_MEM (sa_mode,
806 plus_constant (Pmode, buf_addr,
807 2 * GET_MODE_SIZE (Pmode)));
808 set_mem_alias_set (stack_save, setjmp_alias_set);
809 emit_stack_save (SAVE_NONLOCAL, &stack_save);
811 /* If there is further processing to do, do it. */
812 if (targetm.have_builtin_setjmp_setup ())
813 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
815 /* We have a nonlocal label. */
816 cfun->has_nonlocal_label = 1;
819 /* Construct the trailing part of a __builtin_setjmp call. This is
820 also called directly by the SJLJ exception handling code.
821 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
823 void
824 expand_builtin_setjmp_receiver (rtx receiver_label)
826 rtx chain;
828 /* Mark the FP as used when we get here, so we have to make sure it's
829 marked as used by this function. */
830 emit_use (hard_frame_pointer_rtx);
832 /* Mark the static chain as clobbered here so life information
833 doesn't get messed up for it. */
834 chain = targetm.calls.static_chain (current_function_decl, true);
835 if (chain && REG_P (chain))
836 emit_clobber (chain);
838 /* Now put in the code to restore the frame pointer, and argument
839 pointer, if needed. */
840 if (! targetm.have_nonlocal_goto ())
842 /* First adjust our frame pointer to its actual value. It was
843 previously set to the start of the virtual area corresponding to
844 the stacked variables when we branched here and now needs to be
845 adjusted to the actual hardware fp value.
847 Assignments to virtual registers are converted by
848 instantiate_virtual_regs into the corresponding assignment
849 to the underlying register (fp in this case) that makes
850 the original assignment true.
851 So the following insn will actually be decrementing fp by
852 STARTING_FRAME_OFFSET. */
853 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
855 /* Restoring the frame pointer also modifies the hard frame pointer.
856 Mark it used (so that the previous assignment remains live once
857 the frame pointer is eliminated) and clobbered (to represent the
858 implicit update from the assignment). */
859 emit_use (hard_frame_pointer_rtx);
860 emit_clobber (hard_frame_pointer_rtx);
863 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
865 #ifdef ELIMINABLE_REGS
866 /* If the argument pointer can be eliminated in favor of the
867 frame pointer, we don't need to restore it. We assume here
868 that if such an elimination is present, it can always be used.
869 This is the case on all known machines; if we don't make this
870 assumption, we do unnecessary saving on many machines. */
871 size_t i;
872 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
874 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
875 if (elim_regs[i].from == ARG_POINTER_REGNUM
876 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
877 break;
879 if (i == ARRAY_SIZE (elim_regs))
880 #endif
882 /* Now restore our arg pointer from the address at which it
883 was saved in our stack frame. */
884 emit_move_insn (crtl->args.internal_arg_pointer,
885 copy_to_reg (get_arg_pointer_save_area ()));
889 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
890 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
891 else if (targetm.have_nonlocal_goto_receiver ())
892 emit_insn (targetm.gen_nonlocal_goto_receiver ());
893 else
894 { /* Nothing */ }
896 /* We must not allow the code we just generated to be reordered by
897 scheduling. Specifically, the update of the frame pointer must
898 happen immediately, not later. */
899 emit_insn (gen_blockage ());
902 /* __builtin_longjmp is passed a pointer to an array of five words (not
903 all will be used on all machines). It operates similarly to the C
904 library function of the same name, but is more efficient. Much of
905 the code below is copied from the handling of non-local gotos. */
907 static void
908 expand_builtin_longjmp (rtx buf_addr, rtx value)
910 rtx fp, lab, stack;
911 rtx_insn *insn, *last;
912 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
914 /* DRAP is needed for stack realign if longjmp is expanded to current
915 function */
916 if (SUPPORTS_STACK_ALIGNMENT)
917 crtl->need_drap = true;
919 if (setjmp_alias_set == -1)
920 setjmp_alias_set = new_alias_set ();
922 buf_addr = convert_memory_address (Pmode, buf_addr);
924 buf_addr = force_reg (Pmode, buf_addr);
926 /* We require that the user must pass a second argument of 1, because
927 that is what builtin_setjmp will return. */
928 gcc_assert (value == const1_rtx);
930 last = get_last_insn ();
931 if (targetm.have_builtin_longjmp ())
932 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
933 else
935 fp = gen_rtx_MEM (Pmode, buf_addr);
936 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
937 GET_MODE_SIZE (Pmode)));
939 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
940 2 * GET_MODE_SIZE (Pmode)));
941 set_mem_alias_set (fp, setjmp_alias_set);
942 set_mem_alias_set (lab, setjmp_alias_set);
943 set_mem_alias_set (stack, setjmp_alias_set);
945 /* Pick up FP, label, and SP from the block and jump. This code is
946 from expand_goto in stmt.c; see there for detailed comments. */
947 if (targetm.have_nonlocal_goto ())
948 /* We have to pass a value to the nonlocal_goto pattern that will
949 get copied into the static_chain pointer, but it does not matter
950 what that value is, because builtin_setjmp does not use it. */
951 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
952 else
954 lab = copy_to_reg (lab);
956 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
957 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
959 emit_move_insn (hard_frame_pointer_rtx, fp);
960 emit_stack_restore (SAVE_NONLOCAL, stack);
962 emit_use (hard_frame_pointer_rtx);
963 emit_use (stack_pointer_rtx);
964 emit_indirect_jump (lab);
968 /* Search backwards and mark the jump insn as a non-local goto.
969 Note that this precludes the use of __builtin_longjmp to a
970 __builtin_setjmp target in the same function. However, we've
971 already cautioned the user that these functions are for
972 internal exception handling use only. */
973 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
975 gcc_assert (insn != last);
977 if (JUMP_P (insn))
979 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
980 break;
982 else if (CALL_P (insn))
983 break;
987 static inline bool
988 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
990 return (iter->i < iter->n);
993 /* This function validates the types of a function call argument list
994 against a specified list of tree_codes. If the last specifier is a 0,
995 that represents an ellipses, otherwise the last specifier must be a
996 VOID_TYPE. */
998 static bool
999 validate_arglist (const_tree callexpr, ...)
1001 enum tree_code code;
1002 bool res = 0;
1003 va_list ap;
1004 const_call_expr_arg_iterator iter;
1005 const_tree arg;
1007 va_start (ap, callexpr);
1008 init_const_call_expr_arg_iterator (callexpr, &iter);
1012 code = (enum tree_code) va_arg (ap, int);
1013 switch (code)
1015 case 0:
1016 /* This signifies an ellipses, any further arguments are all ok. */
1017 res = true;
1018 goto end;
1019 case VOID_TYPE:
1020 /* This signifies an endlink, if no arguments remain, return
1021 true, otherwise return false. */
1022 res = !more_const_call_expr_args_p (&iter);
1023 goto end;
1024 default:
1025 /* If no parameters remain or the parameter's code does not
1026 match the specified code, return false. Otherwise continue
1027 checking any remaining arguments. */
1028 arg = next_const_call_expr_arg (&iter);
1029 if (!validate_arg (arg, code))
1030 goto end;
1031 break;
1034 while (1);
1036 /* We need gotos here since we can only have one VA_CLOSE in a
1037 function. */
1038 end: ;
1039 va_end (ap);
1041 return res;
1044 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1045 and the address of the save area. */
1047 static rtx
1048 expand_builtin_nonlocal_goto (tree exp)
1050 tree t_label, t_save_area;
1051 rtx r_label, r_save_area, r_fp, r_sp;
1052 rtx_insn *insn;
1054 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1055 return NULL_RTX;
1057 t_label = CALL_EXPR_ARG (exp, 0);
1058 t_save_area = CALL_EXPR_ARG (exp, 1);
1060 r_label = expand_normal (t_label);
1061 r_label = convert_memory_address (Pmode, r_label);
1062 r_save_area = expand_normal (t_save_area);
1063 r_save_area = convert_memory_address (Pmode, r_save_area);
1064 /* Copy the address of the save location to a register just in case it was
1065 based on the frame pointer. */
1066 r_save_area = copy_to_reg (r_save_area);
1067 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1068 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1069 plus_constant (Pmode, r_save_area,
1070 GET_MODE_SIZE (Pmode)));
1072 crtl->has_nonlocal_goto = 1;
1074 /* ??? We no longer need to pass the static chain value, afaik. */
1075 if (targetm.have_nonlocal_goto ())
1076 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1077 else
1079 r_label = copy_to_reg (r_label);
1081 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1082 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1084 /* Restore frame pointer for containing function. */
1085 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1086 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1088 /* USE of hard_frame_pointer_rtx added for consistency;
1089 not clear if really needed. */
1090 emit_use (hard_frame_pointer_rtx);
1091 emit_use (stack_pointer_rtx);
1093 /* If the architecture is using a GP register, we must
1094 conservatively assume that the target function makes use of it.
1095 The prologue of functions with nonlocal gotos must therefore
1096 initialize the GP register to the appropriate value, and we
1097 must then make sure that this value is live at the point
1098 of the jump. (Note that this doesn't necessarily apply
1099 to targets with a nonlocal_goto pattern; they are free
1100 to implement it in their own way. Note also that this is
1101 a no-op if the GP register is a global invariant.) */
1102 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1103 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1104 emit_use (pic_offset_table_rtx);
1106 emit_indirect_jump (r_label);
1109 /* Search backwards to the jump insn and mark it as a
1110 non-local goto. */
1111 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1113 if (JUMP_P (insn))
1115 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1116 break;
1118 else if (CALL_P (insn))
1119 break;
1122 return const0_rtx;
1125 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1126 (not all will be used on all machines) that was passed to __builtin_setjmp.
1127 It updates the stack pointer in that block to the current value. This is
1128 also called directly by the SJLJ exception handling code. */
1130 void
1131 expand_builtin_update_setjmp_buf (rtx buf_addr)
1133 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1134 rtx stack_save
1135 = gen_rtx_MEM (sa_mode,
1136 memory_address
1137 (sa_mode,
1138 plus_constant (Pmode, buf_addr,
1139 2 * GET_MODE_SIZE (Pmode))));
1141 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1144 /* Expand a call to __builtin_prefetch. For a target that does not support
1145 data prefetch, evaluate the memory address argument in case it has side
1146 effects. */
1148 static void
1149 expand_builtin_prefetch (tree exp)
1151 tree arg0, arg1, arg2;
1152 int nargs;
1153 rtx op0, op1, op2;
1155 if (!validate_arglist (exp, POINTER_TYPE, 0))
1156 return;
1158 arg0 = CALL_EXPR_ARG (exp, 0);
1160 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1161 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1162 locality). */
1163 nargs = call_expr_nargs (exp);
1164 if (nargs > 1)
1165 arg1 = CALL_EXPR_ARG (exp, 1);
1166 else
1167 arg1 = integer_zero_node;
1168 if (nargs > 2)
1169 arg2 = CALL_EXPR_ARG (exp, 2);
1170 else
1171 arg2 = integer_three_node;
1173 /* Argument 0 is an address. */
1174 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1176 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1177 if (TREE_CODE (arg1) != INTEGER_CST)
1179 error ("second argument to %<__builtin_prefetch%> must be a constant");
1180 arg1 = integer_zero_node;
1182 op1 = expand_normal (arg1);
1183 /* Argument 1 must be either zero or one. */
1184 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1186 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1187 " using zero");
1188 op1 = const0_rtx;
1191 /* Argument 2 (locality) must be a compile-time constant int. */
1192 if (TREE_CODE (arg2) != INTEGER_CST)
1194 error ("third argument to %<__builtin_prefetch%> must be a constant");
1195 arg2 = integer_zero_node;
1197 op2 = expand_normal (arg2);
1198 /* Argument 2 must be 0, 1, 2, or 3. */
1199 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1201 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1202 op2 = const0_rtx;
1205 if (targetm.have_prefetch ())
1207 struct expand_operand ops[3];
1209 create_address_operand (&ops[0], op0);
1210 create_integer_operand (&ops[1], INTVAL (op1));
1211 create_integer_operand (&ops[2], INTVAL (op2));
1212 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1213 return;
1216 /* Don't do anything with direct references to volatile memory, but
1217 generate code to handle other side effects. */
1218 if (!MEM_P (op0) && side_effects_p (op0))
1219 emit_insn (op0);
1222 /* Get a MEM rtx for expression EXP which is the address of an operand
1223 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1224 the maximum length of the block of memory that might be accessed or
1225 NULL if unknown. */
1227 static rtx
1228 get_memory_rtx (tree exp, tree len)
1230 tree orig_exp = exp;
1231 rtx addr, mem;
1233 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1234 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1235 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1236 exp = TREE_OPERAND (exp, 0);
1238 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1239 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1241 /* Get an expression we can use to find the attributes to assign to MEM.
1242 First remove any nops. */
1243 while (CONVERT_EXPR_P (exp)
1244 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1245 exp = TREE_OPERAND (exp, 0);
1247 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1248 (as builtin stringops may alias with anything). */
1249 exp = fold_build2 (MEM_REF,
1250 build_array_type (char_type_node,
1251 build_range_type (sizetype,
1252 size_one_node, len)),
1253 exp, build_int_cst (ptr_type_node, 0));
1255 /* If the MEM_REF has no acceptable address, try to get the base object
1256 from the original address we got, and build an all-aliasing
1257 unknown-sized access to that one. */
1258 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1259 set_mem_attributes (mem, exp, 0);
1260 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1261 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1262 0))))
1264 exp = build_fold_addr_expr (exp);
1265 exp = fold_build2 (MEM_REF,
1266 build_array_type (char_type_node,
1267 build_range_type (sizetype,
1268 size_zero_node,
1269 NULL)),
1270 exp, build_int_cst (ptr_type_node, 0));
1271 set_mem_attributes (mem, exp, 0);
1273 set_mem_alias_set (mem, 0);
1274 return mem;
1277 /* Built-in functions to perform an untyped call and return. */
1279 #define apply_args_mode \
1280 (this_target_builtins->x_apply_args_mode)
1281 #define apply_result_mode \
1282 (this_target_builtins->x_apply_result_mode)
1284 /* Return the size required for the block returned by __builtin_apply_args,
1285 and initialize apply_args_mode. */
1287 static int
1288 apply_args_size (void)
1290 static int size = -1;
1291 int align;
1292 unsigned int regno;
1293 machine_mode mode;
1295 /* The values computed by this function never change. */
1296 if (size < 0)
1298 /* The first value is the incoming arg-pointer. */
1299 size = GET_MODE_SIZE (Pmode);
1301 /* The second value is the structure value address unless this is
1302 passed as an "invisible" first argument. */
1303 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1304 size += GET_MODE_SIZE (Pmode);
1306 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1307 if (FUNCTION_ARG_REGNO_P (regno))
1309 mode = targetm.calls.get_raw_arg_mode (regno);
1311 gcc_assert (mode != VOIDmode);
1313 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1314 if (size % align != 0)
1315 size = CEIL (size, align) * align;
1316 size += GET_MODE_SIZE (mode);
1317 apply_args_mode[regno] = mode;
1319 else
1321 apply_args_mode[regno] = VOIDmode;
1324 return size;
1327 /* Return the size required for the block returned by __builtin_apply,
1328 and initialize apply_result_mode. */
1330 static int
1331 apply_result_size (void)
1333 static int size = -1;
1334 int align, regno;
1335 machine_mode mode;
1337 /* The values computed by this function never change. */
1338 if (size < 0)
1340 size = 0;
1342 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1343 if (targetm.calls.function_value_regno_p (regno))
1345 mode = targetm.calls.get_raw_result_mode (regno);
1347 gcc_assert (mode != VOIDmode);
1349 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1350 if (size % align != 0)
1351 size = CEIL (size, align) * align;
1352 size += GET_MODE_SIZE (mode);
1353 apply_result_mode[regno] = mode;
1355 else
1356 apply_result_mode[regno] = VOIDmode;
1358 /* Allow targets that use untyped_call and untyped_return to override
1359 the size so that machine-specific information can be stored here. */
1360 #ifdef APPLY_RESULT_SIZE
1361 size = APPLY_RESULT_SIZE;
1362 #endif
1364 return size;
1367 /* Create a vector describing the result block RESULT. If SAVEP is true,
1368 the result block is used to save the values; otherwise it is used to
1369 restore the values. */
1371 static rtx
1372 result_vector (int savep, rtx result)
1374 int regno, size, align, nelts;
1375 machine_mode mode;
1376 rtx reg, mem;
1377 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1379 size = nelts = 0;
1380 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1381 if ((mode = apply_result_mode[regno]) != VOIDmode)
1383 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1384 if (size % align != 0)
1385 size = CEIL (size, align) * align;
1386 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1387 mem = adjust_address (result, mode, size);
1388 savevec[nelts++] = (savep
1389 ? gen_rtx_SET (mem, reg)
1390 : gen_rtx_SET (reg, mem));
1391 size += GET_MODE_SIZE (mode);
1393 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1396 /* Save the state required to perform an untyped call with the same
1397 arguments as were passed to the current function. */
1399 static rtx
1400 expand_builtin_apply_args_1 (void)
1402 rtx registers, tem;
1403 int size, align, regno;
1404 machine_mode mode;
1405 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1407 /* Create a block where the arg-pointer, structure value address,
1408 and argument registers can be saved. */
1409 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1411 /* Walk past the arg-pointer and structure value address. */
1412 size = GET_MODE_SIZE (Pmode);
1413 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1414 size += GET_MODE_SIZE (Pmode);
1416 /* Save each register used in calling a function to the block. */
1417 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1418 if ((mode = apply_args_mode[regno]) != VOIDmode)
1420 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1421 if (size % align != 0)
1422 size = CEIL (size, align) * align;
1424 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1426 emit_move_insn (adjust_address (registers, mode, size), tem);
1427 size += GET_MODE_SIZE (mode);
1430 /* Save the arg pointer to the block. */
1431 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1432 /* We need the pointer as the caller actually passed them to us, not
1433 as we might have pretended they were passed. Make sure it's a valid
1434 operand, as emit_move_insn isn't expected to handle a PLUS. */
1435 if (STACK_GROWS_DOWNWARD)
1437 = force_operand (plus_constant (Pmode, tem,
1438 crtl->args.pretend_args_size),
1439 NULL_RTX);
1440 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1442 size = GET_MODE_SIZE (Pmode);
1444 /* Save the structure value address unless this is passed as an
1445 "invisible" first argument. */
1446 if (struct_incoming_value)
1448 emit_move_insn (adjust_address (registers, Pmode, size),
1449 copy_to_reg (struct_incoming_value));
1450 size += GET_MODE_SIZE (Pmode);
1453 /* Return the address of the block. */
1454 return copy_addr_to_reg (XEXP (registers, 0));
1457 /* __builtin_apply_args returns block of memory allocated on
1458 the stack into which is stored the arg pointer, structure
1459 value address, static chain, and all the registers that might
1460 possibly be used in performing a function call. The code is
1461 moved to the start of the function so the incoming values are
1462 saved. */
1464 static rtx
1465 expand_builtin_apply_args (void)
1467 /* Don't do __builtin_apply_args more than once in a function.
1468 Save the result of the first call and reuse it. */
1469 if (apply_args_value != 0)
1470 return apply_args_value;
1472 /* When this function is called, it means that registers must be
1473 saved on entry to this function. So we migrate the
1474 call to the first insn of this function. */
1475 rtx temp;
1477 start_sequence ();
1478 temp = expand_builtin_apply_args_1 ();
1479 rtx_insn *seq = get_insns ();
1480 end_sequence ();
1482 apply_args_value = temp;
1484 /* Put the insns after the NOTE that starts the function.
1485 If this is inside a start_sequence, make the outer-level insn
1486 chain current, so the code is placed at the start of the
1487 function. If internal_arg_pointer is a non-virtual pseudo,
1488 it needs to be placed after the function that initializes
1489 that pseudo. */
1490 push_topmost_sequence ();
1491 if (REG_P (crtl->args.internal_arg_pointer)
1492 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1493 emit_insn_before (seq, parm_birth_insn);
1494 else
1495 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1496 pop_topmost_sequence ();
1497 return temp;
1501 /* Perform an untyped call and save the state required to perform an
1502 untyped return of whatever value was returned by the given function. */
1504 static rtx
1505 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1507 int size, align, regno;
1508 machine_mode mode;
1509 rtx incoming_args, result, reg, dest, src;
1510 rtx_call_insn *call_insn;
1511 rtx old_stack_level = 0;
1512 rtx call_fusage = 0;
1513 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1515 arguments = convert_memory_address (Pmode, arguments);
1517 /* Create a block where the return registers can be saved. */
1518 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1520 /* Fetch the arg pointer from the ARGUMENTS block. */
1521 incoming_args = gen_reg_rtx (Pmode);
1522 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1523 if (!STACK_GROWS_DOWNWARD)
1524 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1525 incoming_args, 0, OPTAB_LIB_WIDEN);
1527 /* Push a new argument block and copy the arguments. Do not allow
1528 the (potential) memcpy call below to interfere with our stack
1529 manipulations. */
1530 do_pending_stack_adjust ();
1531 NO_DEFER_POP;
1533 /* Save the stack with nonlocal if available. */
1534 if (targetm.have_save_stack_nonlocal ())
1535 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1536 else
1537 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1539 /* Allocate a block of memory onto the stack and copy the memory
1540 arguments to the outgoing arguments address. We can pass TRUE
1541 as the 4th argument because we just saved the stack pointer
1542 and will restore it right after the call. */
1543 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1545 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1546 may have already set current_function_calls_alloca to true.
1547 current_function_calls_alloca won't be set if argsize is zero,
1548 so we have to guarantee need_drap is true here. */
1549 if (SUPPORTS_STACK_ALIGNMENT)
1550 crtl->need_drap = true;
1552 dest = virtual_outgoing_args_rtx;
1553 if (!STACK_GROWS_DOWNWARD)
1555 if (CONST_INT_P (argsize))
1556 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1557 else
1558 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1560 dest = gen_rtx_MEM (BLKmode, dest);
1561 set_mem_align (dest, PARM_BOUNDARY);
1562 src = gen_rtx_MEM (BLKmode, incoming_args);
1563 set_mem_align (src, PARM_BOUNDARY);
1564 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1566 /* Refer to the argument block. */
1567 apply_args_size ();
1568 arguments = gen_rtx_MEM (BLKmode, arguments);
1569 set_mem_align (arguments, PARM_BOUNDARY);
1571 /* Walk past the arg-pointer and structure value address. */
1572 size = GET_MODE_SIZE (Pmode);
1573 if (struct_value)
1574 size += GET_MODE_SIZE (Pmode);
1576 /* Restore each of the registers previously saved. Make USE insns
1577 for each of these registers for use in making the call. */
1578 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1579 if ((mode = apply_args_mode[regno]) != VOIDmode)
1581 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1582 if (size % align != 0)
1583 size = CEIL (size, align) * align;
1584 reg = gen_rtx_REG (mode, regno);
1585 emit_move_insn (reg, adjust_address (arguments, mode, size));
1586 use_reg (&call_fusage, reg);
1587 size += GET_MODE_SIZE (mode);
1590 /* Restore the structure value address unless this is passed as an
1591 "invisible" first argument. */
1592 size = GET_MODE_SIZE (Pmode);
1593 if (struct_value)
1595 rtx value = gen_reg_rtx (Pmode);
1596 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1597 emit_move_insn (struct_value, value);
1598 if (REG_P (struct_value))
1599 use_reg (&call_fusage, struct_value);
1600 size += GET_MODE_SIZE (Pmode);
1603 /* All arguments and registers used for the call are set up by now! */
1604 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1606 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1607 and we don't want to load it into a register as an optimization,
1608 because prepare_call_address already did it if it should be done. */
1609 if (GET_CODE (function) != SYMBOL_REF)
1610 function = memory_address (FUNCTION_MODE, function);
1612 /* Generate the actual call instruction and save the return value. */
1613 if (targetm.have_untyped_call ())
1615 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1616 emit_call_insn (targetm.gen_untyped_call (mem, result,
1617 result_vector (1, result)));
1619 else if (targetm.have_call_value ())
1621 rtx valreg = 0;
1623 /* Locate the unique return register. It is not possible to
1624 express a call that sets more than one return register using
1625 call_value; use untyped_call for that. In fact, untyped_call
1626 only needs to save the return registers in the given block. */
1627 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1628 if ((mode = apply_result_mode[regno]) != VOIDmode)
1630 gcc_assert (!valreg); /* have_untyped_call required. */
1632 valreg = gen_rtx_REG (mode, regno);
1635 emit_insn (targetm.gen_call_value (valreg,
1636 gen_rtx_MEM (FUNCTION_MODE, function),
1637 const0_rtx, NULL_RTX, const0_rtx));
1639 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1641 else
1642 gcc_unreachable ();
1644 /* Find the CALL insn we just emitted, and attach the register usage
1645 information. */
1646 call_insn = last_call_insn ();
1647 add_function_usage_to (call_insn, call_fusage);
1649 /* Restore the stack. */
1650 if (targetm.have_save_stack_nonlocal ())
1651 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1652 else
1653 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1654 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1656 OK_DEFER_POP;
1658 /* Return the address of the result block. */
1659 result = copy_addr_to_reg (XEXP (result, 0));
1660 return convert_memory_address (ptr_mode, result);
1663 /* Perform an untyped return. */
1665 static void
1666 expand_builtin_return (rtx result)
1668 int size, align, regno;
1669 machine_mode mode;
1670 rtx reg;
1671 rtx_insn *call_fusage = 0;
1673 result = convert_memory_address (Pmode, result);
1675 apply_result_size ();
1676 result = gen_rtx_MEM (BLKmode, result);
1678 if (targetm.have_untyped_return ())
1680 rtx vector = result_vector (0, result);
1681 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1682 emit_barrier ();
1683 return;
1686 /* Restore the return value and note that each value is used. */
1687 size = 0;
1688 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1689 if ((mode = apply_result_mode[regno]) != VOIDmode)
1691 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1692 if (size % align != 0)
1693 size = CEIL (size, align) * align;
1694 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1695 emit_move_insn (reg, adjust_address (result, mode, size));
1697 push_to_sequence (call_fusage);
1698 emit_use (reg);
1699 call_fusage = get_insns ();
1700 end_sequence ();
1701 size += GET_MODE_SIZE (mode);
1704 /* Put the USE insns before the return. */
1705 emit_insn (call_fusage);
1707 /* Return whatever values was restored by jumping directly to the end
1708 of the function. */
1709 expand_naked_return ();
1712 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1714 static enum type_class
1715 type_to_class (tree type)
1717 switch (TREE_CODE (type))
1719 case VOID_TYPE: return void_type_class;
1720 case INTEGER_TYPE: return integer_type_class;
1721 case ENUMERAL_TYPE: return enumeral_type_class;
1722 case BOOLEAN_TYPE: return boolean_type_class;
1723 case POINTER_TYPE: return pointer_type_class;
1724 case REFERENCE_TYPE: return reference_type_class;
1725 case OFFSET_TYPE: return offset_type_class;
1726 case REAL_TYPE: return real_type_class;
1727 case COMPLEX_TYPE: return complex_type_class;
1728 case FUNCTION_TYPE: return function_type_class;
1729 case METHOD_TYPE: return method_type_class;
1730 case RECORD_TYPE: return record_type_class;
1731 case UNION_TYPE:
1732 case QUAL_UNION_TYPE: return union_type_class;
1733 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1734 ? string_type_class : array_type_class);
1735 case LANG_TYPE: return lang_type_class;
1736 default: return no_type_class;
1740 /* Expand a call EXP to __builtin_classify_type. */
1742 static rtx
1743 expand_builtin_classify_type (tree exp)
1745 if (call_expr_nargs (exp))
1746 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1747 return GEN_INT (no_type_class);
1750 /* This helper macro, meant to be used in mathfn_built_in below,
1751 determines which among a set of three builtin math functions is
1752 appropriate for a given type mode. The `F' and `L' cases are
1753 automatically generated from the `double' case. */
1754 #define CASE_MATHFN(MATHFN) \
1755 CASE_CFN_##MATHFN: \
1756 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1757 fcodel = BUILT_IN_##MATHFN##L ; break;
1758 /* Similar to above, but appends _R after any F/L suffix. */
1759 #define CASE_MATHFN_REENT(MATHFN) \
1760 case CFN_BUILT_IN_##MATHFN##_R: \
1761 case CFN_BUILT_IN_##MATHFN##F_R: \
1762 case CFN_BUILT_IN_##MATHFN##L_R: \
1763 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1764 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1766 /* Return a function equivalent to FN but operating on floating-point
1767 values of type TYPE, or END_BUILTINS if no such function exists.
1768 This is purely an operation on function codes; it does not guarantee
1769 that the target actually has an implementation of the function. */
1771 static built_in_function
1772 mathfn_built_in_2 (tree type, combined_fn fn)
1774 built_in_function fcode, fcodef, fcodel;
1776 switch (fn)
1778 CASE_MATHFN (ACOS)
1779 CASE_MATHFN (ACOSH)
1780 CASE_MATHFN (ASIN)
1781 CASE_MATHFN (ASINH)
1782 CASE_MATHFN (ATAN)
1783 CASE_MATHFN (ATAN2)
1784 CASE_MATHFN (ATANH)
1785 CASE_MATHFN (CBRT)
1786 CASE_MATHFN (CEIL)
1787 CASE_MATHFN (CEXPI)
1788 CASE_MATHFN (COPYSIGN)
1789 CASE_MATHFN (COS)
1790 CASE_MATHFN (COSH)
1791 CASE_MATHFN (DREM)
1792 CASE_MATHFN (ERF)
1793 CASE_MATHFN (ERFC)
1794 CASE_MATHFN (EXP)
1795 CASE_MATHFN (EXP10)
1796 CASE_MATHFN (EXP2)
1797 CASE_MATHFN (EXPM1)
1798 CASE_MATHFN (FABS)
1799 CASE_MATHFN (FDIM)
1800 CASE_MATHFN (FLOOR)
1801 CASE_MATHFN (FMA)
1802 CASE_MATHFN (FMAX)
1803 CASE_MATHFN (FMIN)
1804 CASE_MATHFN (FMOD)
1805 CASE_MATHFN (FREXP)
1806 CASE_MATHFN (GAMMA)
1807 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1808 CASE_MATHFN (HUGE_VAL)
1809 CASE_MATHFN (HYPOT)
1810 CASE_MATHFN (ILOGB)
1811 CASE_MATHFN (ICEIL)
1812 CASE_MATHFN (IFLOOR)
1813 CASE_MATHFN (INF)
1814 CASE_MATHFN (IRINT)
1815 CASE_MATHFN (IROUND)
1816 CASE_MATHFN (ISINF)
1817 CASE_MATHFN (J0)
1818 CASE_MATHFN (J1)
1819 CASE_MATHFN (JN)
1820 CASE_MATHFN (LCEIL)
1821 CASE_MATHFN (LDEXP)
1822 CASE_MATHFN (LFLOOR)
1823 CASE_MATHFN (LGAMMA)
1824 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1825 CASE_MATHFN (LLCEIL)
1826 CASE_MATHFN (LLFLOOR)
1827 CASE_MATHFN (LLRINT)
1828 CASE_MATHFN (LLROUND)
1829 CASE_MATHFN (LOG)
1830 CASE_MATHFN (LOG10)
1831 CASE_MATHFN (LOG1P)
1832 CASE_MATHFN (LOG2)
1833 CASE_MATHFN (LOGB)
1834 CASE_MATHFN (LRINT)
1835 CASE_MATHFN (LROUND)
1836 CASE_MATHFN (MODF)
1837 CASE_MATHFN (NAN)
1838 CASE_MATHFN (NANS)
1839 CASE_MATHFN (NEARBYINT)
1840 CASE_MATHFN (NEXTAFTER)
1841 CASE_MATHFN (NEXTTOWARD)
1842 CASE_MATHFN (POW)
1843 CASE_MATHFN (POWI)
1844 CASE_MATHFN (POW10)
1845 CASE_MATHFN (REMAINDER)
1846 CASE_MATHFN (REMQUO)
1847 CASE_MATHFN (RINT)
1848 CASE_MATHFN (ROUND)
1849 CASE_MATHFN (SCALB)
1850 CASE_MATHFN (SCALBLN)
1851 CASE_MATHFN (SCALBN)
1852 CASE_MATHFN (SIGNBIT)
1853 CASE_MATHFN (SIGNIFICAND)
1854 CASE_MATHFN (SIN)
1855 CASE_MATHFN (SINCOS)
1856 CASE_MATHFN (SINH)
1857 CASE_MATHFN (SQRT)
1858 CASE_MATHFN (TAN)
1859 CASE_MATHFN (TANH)
1860 CASE_MATHFN (TGAMMA)
1861 CASE_MATHFN (TRUNC)
1862 CASE_MATHFN (Y0)
1863 CASE_MATHFN (Y1)
1864 CASE_MATHFN (YN)
1866 default:
1867 return END_BUILTINS;
1870 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1871 return fcode;
1872 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1873 return fcodef;
1874 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1875 return fcodel;
1876 else
1877 return END_BUILTINS;
1880 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1881 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1882 otherwise use the explicit declaration. If we can't do the conversion,
1883 return null. */
1885 static tree
1886 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
1888 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1889 if (fcode2 == END_BUILTINS)
1890 return NULL_TREE;
1892 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1893 return NULL_TREE;
1895 return builtin_decl_explicit (fcode2);
1898 /* Like mathfn_built_in_1, but always use the implicit array. */
1900 tree
1901 mathfn_built_in (tree type, combined_fn fn)
1903 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1906 /* Like mathfn_built_in_1, but take a built_in_function and
1907 always use the implicit array. */
1909 tree
1910 mathfn_built_in (tree type, enum built_in_function fn)
1912 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
1915 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
1916 return its code, otherwise return IFN_LAST. Note that this function
1917 only tests whether the function is defined in internals.def, not whether
1918 it is actually available on the target. */
1920 internal_fn
1921 associated_internal_fn (tree fndecl)
1923 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
1924 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
1925 switch (DECL_FUNCTION_CODE (fndecl))
1927 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
1928 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1929 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
1930 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1931 #include "internal-fn.def"
1933 CASE_FLT_FN (BUILT_IN_POW10):
1934 return IFN_EXP10;
1936 CASE_FLT_FN (BUILT_IN_DREM):
1937 return IFN_REMAINDER;
1939 CASE_FLT_FN (BUILT_IN_SCALBN):
1940 CASE_FLT_FN (BUILT_IN_SCALBLN):
1941 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
1942 return IFN_LDEXP;
1943 return IFN_LAST;
1945 default:
1946 return IFN_LAST;
1950 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
1951 on the current target by a call to an internal function, return the
1952 code of that internal function, otherwise return IFN_LAST. The caller
1953 is responsible for ensuring that any side-effects of the built-in
1954 call are dealt with correctly. E.g. if CALL sets errno, the caller
1955 must decide that the errno result isn't needed or make it available
1956 in some other way. */
1958 internal_fn
1959 replacement_internal_fn (gcall *call)
1961 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1963 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
1964 if (ifn != IFN_LAST)
1966 tree_pair types = direct_internal_fn_types (ifn, call);
1967 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
1968 if (direct_internal_fn_supported_p (ifn, types, opt_type))
1969 return ifn;
1972 return IFN_LAST;
1975 /* Expand a call to the builtin trinary math functions (fma).
1976 Return NULL_RTX if a normal call should be emitted rather than expanding the
1977 function in-line. EXP is the expression that is a call to the builtin
1978 function; if convenient, the result should be placed in TARGET.
1979 SUBTARGET may be used as the target for computing one of EXP's
1980 operands. */
1982 static rtx
1983 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
1985 optab builtin_optab;
1986 rtx op0, op1, op2, result;
1987 rtx_insn *insns;
1988 tree fndecl = get_callee_fndecl (exp);
1989 tree arg0, arg1, arg2;
1990 machine_mode mode;
1992 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
1993 return NULL_RTX;
1995 arg0 = CALL_EXPR_ARG (exp, 0);
1996 arg1 = CALL_EXPR_ARG (exp, 1);
1997 arg2 = CALL_EXPR_ARG (exp, 2);
1999 switch (DECL_FUNCTION_CODE (fndecl))
2001 CASE_FLT_FN (BUILT_IN_FMA):
2002 builtin_optab = fma_optab; break;
2003 default:
2004 gcc_unreachable ();
2007 /* Make a suitable register to place result in. */
2008 mode = TYPE_MODE (TREE_TYPE (exp));
2010 /* Before working hard, check whether the instruction is available. */
2011 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2012 return NULL_RTX;
2014 result = gen_reg_rtx (mode);
2016 /* Always stabilize the argument list. */
2017 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2018 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2019 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2021 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2022 op1 = expand_normal (arg1);
2023 op2 = expand_normal (arg2);
2025 start_sequence ();
2027 /* Compute into RESULT.
2028 Set RESULT to wherever the result comes back. */
2029 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2030 result, 0);
2032 /* If we were unable to expand via the builtin, stop the sequence
2033 (without outputting the insns) and call to the library function
2034 with the stabilized argument list. */
2035 if (result == 0)
2037 end_sequence ();
2038 return expand_call (exp, target, target == const0_rtx);
2041 /* Output the entire sequence. */
2042 insns = get_insns ();
2043 end_sequence ();
2044 emit_insn (insns);
2046 return result;
2049 /* Expand a call to the builtin sin and cos math functions.
2050 Return NULL_RTX if a normal call should be emitted rather than expanding the
2051 function in-line. EXP is the expression that is a call to the builtin
2052 function; if convenient, the result should be placed in TARGET.
2053 SUBTARGET may be used as the target for computing one of EXP's
2054 operands. */
2056 static rtx
2057 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2059 optab builtin_optab;
2060 rtx op0;
2061 rtx_insn *insns;
2062 tree fndecl = get_callee_fndecl (exp);
2063 machine_mode mode;
2064 tree arg;
2066 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2067 return NULL_RTX;
2069 arg = CALL_EXPR_ARG (exp, 0);
2071 switch (DECL_FUNCTION_CODE (fndecl))
2073 CASE_FLT_FN (BUILT_IN_SIN):
2074 CASE_FLT_FN (BUILT_IN_COS):
2075 builtin_optab = sincos_optab; break;
2076 default:
2077 gcc_unreachable ();
2080 /* Make a suitable register to place result in. */
2081 mode = TYPE_MODE (TREE_TYPE (exp));
2083 /* Check if sincos insn is available, otherwise fallback
2084 to sin or cos insn. */
2085 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2086 switch (DECL_FUNCTION_CODE (fndecl))
2088 CASE_FLT_FN (BUILT_IN_SIN):
2089 builtin_optab = sin_optab; break;
2090 CASE_FLT_FN (BUILT_IN_COS):
2091 builtin_optab = cos_optab; break;
2092 default:
2093 gcc_unreachable ();
2096 /* Before working hard, check whether the instruction is available. */
2097 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2099 rtx result = gen_reg_rtx (mode);
2101 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2102 need to expand the argument again. This way, we will not perform
2103 side-effects more the once. */
2104 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2106 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2108 start_sequence ();
2110 /* Compute into RESULT.
2111 Set RESULT to wherever the result comes back. */
2112 if (builtin_optab == sincos_optab)
2114 int ok;
2116 switch (DECL_FUNCTION_CODE (fndecl))
2118 CASE_FLT_FN (BUILT_IN_SIN):
2119 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2120 break;
2121 CASE_FLT_FN (BUILT_IN_COS):
2122 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2123 break;
2124 default:
2125 gcc_unreachable ();
2127 gcc_assert (ok);
2129 else
2130 result = expand_unop (mode, builtin_optab, op0, result, 0);
2132 if (result != 0)
2134 /* Output the entire sequence. */
2135 insns = get_insns ();
2136 end_sequence ();
2137 emit_insn (insns);
2138 return result;
2141 /* If we were unable to expand via the builtin, stop the sequence
2142 (without outputting the insns) and call to the library function
2143 with the stabilized argument list. */
2144 end_sequence ();
2147 return expand_call (exp, target, target == const0_rtx);
2150 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2151 return an RTL instruction code that implements the functionality.
2152 If that isn't possible or available return CODE_FOR_nothing. */
2154 static enum insn_code
2155 interclass_mathfn_icode (tree arg, tree fndecl)
2157 bool errno_set = false;
2158 optab builtin_optab = unknown_optab;
2159 machine_mode mode;
2161 switch (DECL_FUNCTION_CODE (fndecl))
2163 CASE_FLT_FN (BUILT_IN_ILOGB):
2164 errno_set = true; builtin_optab = ilogb_optab; break;
2165 CASE_FLT_FN (BUILT_IN_ISINF):
2166 builtin_optab = isinf_optab; break;
2167 case BUILT_IN_ISNORMAL:
2168 case BUILT_IN_ISFINITE:
2169 CASE_FLT_FN (BUILT_IN_FINITE):
2170 case BUILT_IN_FINITED32:
2171 case BUILT_IN_FINITED64:
2172 case BUILT_IN_FINITED128:
2173 case BUILT_IN_ISINFD32:
2174 case BUILT_IN_ISINFD64:
2175 case BUILT_IN_ISINFD128:
2176 /* These builtins have no optabs (yet). */
2177 break;
2178 default:
2179 gcc_unreachable ();
2182 /* There's no easy way to detect the case we need to set EDOM. */
2183 if (flag_errno_math && errno_set)
2184 return CODE_FOR_nothing;
2186 /* Optab mode depends on the mode of the input argument. */
2187 mode = TYPE_MODE (TREE_TYPE (arg));
2189 if (builtin_optab)
2190 return optab_handler (builtin_optab, mode);
2191 return CODE_FOR_nothing;
2194 /* Expand a call to one of the builtin math functions that operate on
2195 floating point argument and output an integer result (ilogb, isinf,
2196 isnan, etc).
2197 Return 0 if a normal call should be emitted rather than expanding the
2198 function in-line. EXP is the expression that is a call to the builtin
2199 function; if convenient, the result should be placed in TARGET. */
2201 static rtx
2202 expand_builtin_interclass_mathfn (tree exp, rtx target)
2204 enum insn_code icode = CODE_FOR_nothing;
2205 rtx op0;
2206 tree fndecl = get_callee_fndecl (exp);
2207 machine_mode mode;
2208 tree arg;
2210 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2211 return NULL_RTX;
2213 arg = CALL_EXPR_ARG (exp, 0);
2214 icode = interclass_mathfn_icode (arg, fndecl);
2215 mode = TYPE_MODE (TREE_TYPE (arg));
2217 if (icode != CODE_FOR_nothing)
2219 struct expand_operand ops[1];
2220 rtx_insn *last = get_last_insn ();
2221 tree orig_arg = arg;
2223 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2224 need to expand the argument again. This way, we will not perform
2225 side-effects more the once. */
2226 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2228 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2230 if (mode != GET_MODE (op0))
2231 op0 = convert_to_mode (mode, op0, 0);
2233 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2234 if (maybe_legitimize_operands (icode, 0, 1, ops)
2235 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2236 return ops[0].value;
2238 delete_insns_since (last);
2239 CALL_EXPR_ARG (exp, 0) = orig_arg;
2242 return NULL_RTX;
2245 /* Expand a call to the builtin sincos math function.
2246 Return NULL_RTX if a normal call should be emitted rather than expanding the
2247 function in-line. EXP is the expression that is a call to the builtin
2248 function. */
2250 static rtx
2251 expand_builtin_sincos (tree exp)
2253 rtx op0, op1, op2, target1, target2;
2254 machine_mode mode;
2255 tree arg, sinp, cosp;
2256 int result;
2257 location_t loc = EXPR_LOCATION (exp);
2258 tree alias_type, alias_off;
2260 if (!validate_arglist (exp, REAL_TYPE,
2261 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2262 return NULL_RTX;
2264 arg = CALL_EXPR_ARG (exp, 0);
2265 sinp = CALL_EXPR_ARG (exp, 1);
2266 cosp = CALL_EXPR_ARG (exp, 2);
2268 /* Make a suitable register to place result in. */
2269 mode = TYPE_MODE (TREE_TYPE (arg));
2271 /* Check if sincos insn is available, otherwise emit the call. */
2272 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2273 return NULL_RTX;
2275 target1 = gen_reg_rtx (mode);
2276 target2 = gen_reg_rtx (mode);
2278 op0 = expand_normal (arg);
2279 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2280 alias_off = build_int_cst (alias_type, 0);
2281 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2282 sinp, alias_off));
2283 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2284 cosp, alias_off));
2286 /* Compute into target1 and target2.
2287 Set TARGET to wherever the result comes back. */
2288 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2289 gcc_assert (result);
2291 /* Move target1 and target2 to the memory locations indicated
2292 by op1 and op2. */
2293 emit_move_insn (op1, target1);
2294 emit_move_insn (op2, target2);
2296 return const0_rtx;
2299 /* Expand a call to the internal cexpi builtin to the sincos math function.
2300 EXP is the expression that is a call to the builtin function; if convenient,
2301 the result should be placed in TARGET. */
2303 static rtx
2304 expand_builtin_cexpi (tree exp, rtx target)
2306 tree fndecl = get_callee_fndecl (exp);
2307 tree arg, type;
2308 machine_mode mode;
2309 rtx op0, op1, op2;
2310 location_t loc = EXPR_LOCATION (exp);
2312 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2313 return NULL_RTX;
2315 arg = CALL_EXPR_ARG (exp, 0);
2316 type = TREE_TYPE (arg);
2317 mode = TYPE_MODE (TREE_TYPE (arg));
2319 /* Try expanding via a sincos optab, fall back to emitting a libcall
2320 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2321 is only generated from sincos, cexp or if we have either of them. */
2322 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2324 op1 = gen_reg_rtx (mode);
2325 op2 = gen_reg_rtx (mode);
2327 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2329 /* Compute into op1 and op2. */
2330 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2332 else if (targetm.libc_has_function (function_sincos))
2334 tree call, fn = NULL_TREE;
2335 tree top1, top2;
2336 rtx op1a, op2a;
2338 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2339 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2340 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2341 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2342 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2343 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2344 else
2345 gcc_unreachable ();
2347 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2348 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2349 op1a = copy_addr_to_reg (XEXP (op1, 0));
2350 op2a = copy_addr_to_reg (XEXP (op2, 0));
2351 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2352 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2354 /* Make sure not to fold the sincos call again. */
2355 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2356 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2357 call, 3, arg, top1, top2));
2359 else
2361 tree call, fn = NULL_TREE, narg;
2362 tree ctype = build_complex_type (type);
2364 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2365 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2366 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2367 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2368 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2369 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2370 else
2371 gcc_unreachable ();
2373 /* If we don't have a decl for cexp create one. This is the
2374 friendliest fallback if the user calls __builtin_cexpi
2375 without full target C99 function support. */
2376 if (fn == NULL_TREE)
2378 tree fntype;
2379 const char *name = NULL;
2381 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2382 name = "cexpf";
2383 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2384 name = "cexp";
2385 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2386 name = "cexpl";
2388 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2389 fn = build_fn_decl (name, fntype);
2392 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2393 build_real (type, dconst0), arg);
2395 /* Make sure not to fold the cexp call again. */
2396 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2397 return expand_expr (build_call_nary (ctype, call, 1, narg),
2398 target, VOIDmode, EXPAND_NORMAL);
2401 /* Now build the proper return type. */
2402 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2403 make_tree (TREE_TYPE (arg), op2),
2404 make_tree (TREE_TYPE (arg), op1)),
2405 target, VOIDmode, EXPAND_NORMAL);
2408 /* Conveniently construct a function call expression. FNDECL names the
2409 function to be called, N is the number of arguments, and the "..."
2410 parameters are the argument expressions. Unlike build_call_exr
2411 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2413 static tree
2414 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2416 va_list ap;
2417 tree fntype = TREE_TYPE (fndecl);
2418 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2420 va_start (ap, n);
2421 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2422 va_end (ap);
2423 SET_EXPR_LOCATION (fn, loc);
2424 return fn;
2427 /* Expand a call to one of the builtin rounding functions gcc defines
2428 as an extension (lfloor and lceil). As these are gcc extensions we
2429 do not need to worry about setting errno to EDOM.
2430 If expanding via optab fails, lower expression to (int)(floor(x)).
2431 EXP is the expression that is a call to the builtin function;
2432 if convenient, the result should be placed in TARGET. */
2434 static rtx
2435 expand_builtin_int_roundingfn (tree exp, rtx target)
2437 convert_optab builtin_optab;
2438 rtx op0, tmp;
2439 rtx_insn *insns;
2440 tree fndecl = get_callee_fndecl (exp);
2441 enum built_in_function fallback_fn;
2442 tree fallback_fndecl;
2443 machine_mode mode;
2444 tree arg;
2446 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2447 gcc_unreachable ();
2449 arg = CALL_EXPR_ARG (exp, 0);
2451 switch (DECL_FUNCTION_CODE (fndecl))
2453 CASE_FLT_FN (BUILT_IN_ICEIL):
2454 CASE_FLT_FN (BUILT_IN_LCEIL):
2455 CASE_FLT_FN (BUILT_IN_LLCEIL):
2456 builtin_optab = lceil_optab;
2457 fallback_fn = BUILT_IN_CEIL;
2458 break;
2460 CASE_FLT_FN (BUILT_IN_IFLOOR):
2461 CASE_FLT_FN (BUILT_IN_LFLOOR):
2462 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2463 builtin_optab = lfloor_optab;
2464 fallback_fn = BUILT_IN_FLOOR;
2465 break;
2467 default:
2468 gcc_unreachable ();
2471 /* Make a suitable register to place result in. */
2472 mode = TYPE_MODE (TREE_TYPE (exp));
2474 target = gen_reg_rtx (mode);
2476 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2477 need to expand the argument again. This way, we will not perform
2478 side-effects more the once. */
2479 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2481 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2483 start_sequence ();
2485 /* Compute into TARGET. */
2486 if (expand_sfix_optab (target, op0, builtin_optab))
2488 /* Output the entire sequence. */
2489 insns = get_insns ();
2490 end_sequence ();
2491 emit_insn (insns);
2492 return target;
2495 /* If we were unable to expand via the builtin, stop the sequence
2496 (without outputting the insns). */
2497 end_sequence ();
2499 /* Fall back to floating point rounding optab. */
2500 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2502 /* For non-C99 targets we may end up without a fallback fndecl here
2503 if the user called __builtin_lfloor directly. In this case emit
2504 a call to the floor/ceil variants nevertheless. This should result
2505 in the best user experience for not full C99 targets. */
2506 if (fallback_fndecl == NULL_TREE)
2508 tree fntype;
2509 const char *name = NULL;
2511 switch (DECL_FUNCTION_CODE (fndecl))
2513 case BUILT_IN_ICEIL:
2514 case BUILT_IN_LCEIL:
2515 case BUILT_IN_LLCEIL:
2516 name = "ceil";
2517 break;
2518 case BUILT_IN_ICEILF:
2519 case BUILT_IN_LCEILF:
2520 case BUILT_IN_LLCEILF:
2521 name = "ceilf";
2522 break;
2523 case BUILT_IN_ICEILL:
2524 case BUILT_IN_LCEILL:
2525 case BUILT_IN_LLCEILL:
2526 name = "ceill";
2527 break;
2528 case BUILT_IN_IFLOOR:
2529 case BUILT_IN_LFLOOR:
2530 case BUILT_IN_LLFLOOR:
2531 name = "floor";
2532 break;
2533 case BUILT_IN_IFLOORF:
2534 case BUILT_IN_LFLOORF:
2535 case BUILT_IN_LLFLOORF:
2536 name = "floorf";
2537 break;
2538 case BUILT_IN_IFLOORL:
2539 case BUILT_IN_LFLOORL:
2540 case BUILT_IN_LLFLOORL:
2541 name = "floorl";
2542 break;
2543 default:
2544 gcc_unreachable ();
2547 fntype = build_function_type_list (TREE_TYPE (arg),
2548 TREE_TYPE (arg), NULL_TREE);
2549 fallback_fndecl = build_fn_decl (name, fntype);
2552 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2554 tmp = expand_normal (exp);
2555 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2557 /* Truncate the result of floating point optab to integer
2558 via expand_fix (). */
2559 target = gen_reg_rtx (mode);
2560 expand_fix (target, tmp, 0);
2562 return target;
2565 /* Expand a call to one of the builtin math functions doing integer
2566 conversion (lrint).
2567 Return 0 if a normal call should be emitted rather than expanding the
2568 function in-line. EXP is the expression that is a call to the builtin
2569 function; if convenient, the result should be placed in TARGET. */
2571 static rtx
2572 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2574 convert_optab builtin_optab;
2575 rtx op0;
2576 rtx_insn *insns;
2577 tree fndecl = get_callee_fndecl (exp);
2578 tree arg;
2579 machine_mode mode;
2580 enum built_in_function fallback_fn = BUILT_IN_NONE;
2582 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2583 gcc_unreachable ();
2585 arg = CALL_EXPR_ARG (exp, 0);
2587 switch (DECL_FUNCTION_CODE (fndecl))
2589 CASE_FLT_FN (BUILT_IN_IRINT):
2590 fallback_fn = BUILT_IN_LRINT;
2591 /* FALLTHRU */
2592 CASE_FLT_FN (BUILT_IN_LRINT):
2593 CASE_FLT_FN (BUILT_IN_LLRINT):
2594 builtin_optab = lrint_optab;
2595 break;
2597 CASE_FLT_FN (BUILT_IN_IROUND):
2598 fallback_fn = BUILT_IN_LROUND;
2599 /* FALLTHRU */
2600 CASE_FLT_FN (BUILT_IN_LROUND):
2601 CASE_FLT_FN (BUILT_IN_LLROUND):
2602 builtin_optab = lround_optab;
2603 break;
2605 default:
2606 gcc_unreachable ();
2609 /* There's no easy way to detect the case we need to set EDOM. */
2610 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2611 return NULL_RTX;
2613 /* Make a suitable register to place result in. */
2614 mode = TYPE_MODE (TREE_TYPE (exp));
2616 /* There's no easy way to detect the case we need to set EDOM. */
2617 if (!flag_errno_math)
2619 rtx result = gen_reg_rtx (mode);
2621 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2622 need to expand the argument again. This way, we will not perform
2623 side-effects more the once. */
2624 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2626 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2628 start_sequence ();
2630 if (expand_sfix_optab (result, op0, builtin_optab))
2632 /* Output the entire sequence. */
2633 insns = get_insns ();
2634 end_sequence ();
2635 emit_insn (insns);
2636 return result;
2639 /* If we were unable to expand via the builtin, stop the sequence
2640 (without outputting the insns) and call to the library function
2641 with the stabilized argument list. */
2642 end_sequence ();
2645 if (fallback_fn != BUILT_IN_NONE)
2647 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2648 targets, (int) round (x) should never be transformed into
2649 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2650 a call to lround in the hope that the target provides at least some
2651 C99 functions. This should result in the best user experience for
2652 not full C99 targets. */
2653 tree fallback_fndecl = mathfn_built_in_1
2654 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2656 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2657 fallback_fndecl, 1, arg);
2659 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2660 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2661 return convert_to_mode (mode, target, 0);
2664 return expand_call (exp, target, target == const0_rtx);
2667 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2668 a normal call should be emitted rather than expanding the function
2669 in-line. EXP is the expression that is a call to the builtin
2670 function; if convenient, the result should be placed in TARGET. */
2672 static rtx
2673 expand_builtin_powi (tree exp, rtx target)
2675 tree arg0, arg1;
2676 rtx op0, op1;
2677 machine_mode mode;
2678 machine_mode mode2;
2680 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2681 return NULL_RTX;
2683 arg0 = CALL_EXPR_ARG (exp, 0);
2684 arg1 = CALL_EXPR_ARG (exp, 1);
2685 mode = TYPE_MODE (TREE_TYPE (exp));
2687 /* Emit a libcall to libgcc. */
2689 /* Mode of the 2nd argument must match that of an int. */
2690 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2692 if (target == NULL_RTX)
2693 target = gen_reg_rtx (mode);
2695 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2696 if (GET_MODE (op0) != mode)
2697 op0 = convert_to_mode (mode, op0, 0);
2698 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2699 if (GET_MODE (op1) != mode2)
2700 op1 = convert_to_mode (mode2, op1, 0);
2702 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2703 target, LCT_CONST, mode, 2,
2704 op0, mode, op1, mode2);
2706 return target;
2709 /* Expand expression EXP which is a call to the strlen builtin. Return
2710 NULL_RTX if we failed the caller should emit a normal call, otherwise
2711 try to get the result in TARGET, if convenient. */
2713 static rtx
2714 expand_builtin_strlen (tree exp, rtx target,
2715 machine_mode target_mode)
2717 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2718 return NULL_RTX;
2719 else
2721 struct expand_operand ops[4];
2722 rtx pat;
2723 tree len;
2724 tree src = CALL_EXPR_ARG (exp, 0);
2725 rtx src_reg;
2726 rtx_insn *before_strlen;
2727 machine_mode insn_mode = target_mode;
2728 enum insn_code icode = CODE_FOR_nothing;
2729 unsigned int align;
2731 /* If the length can be computed at compile-time, return it. */
2732 len = c_strlen (src, 0);
2733 if (len)
2734 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2736 /* If the length can be computed at compile-time and is constant
2737 integer, but there are side-effects in src, evaluate
2738 src for side-effects, then return len.
2739 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2740 can be optimized into: i++; x = 3; */
2741 len = c_strlen (src, 1);
2742 if (len && TREE_CODE (len) == INTEGER_CST)
2744 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2745 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2748 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2750 /* If SRC is not a pointer type, don't do this operation inline. */
2751 if (align == 0)
2752 return NULL_RTX;
2754 /* Bail out if we can't compute strlen in the right mode. */
2755 while (insn_mode != VOIDmode)
2757 icode = optab_handler (strlen_optab, insn_mode);
2758 if (icode != CODE_FOR_nothing)
2759 break;
2761 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2763 if (insn_mode == VOIDmode)
2764 return NULL_RTX;
2766 /* Make a place to hold the source address. We will not expand
2767 the actual source until we are sure that the expansion will
2768 not fail -- there are trees that cannot be expanded twice. */
2769 src_reg = gen_reg_rtx (Pmode);
2771 /* Mark the beginning of the strlen sequence so we can emit the
2772 source operand later. */
2773 before_strlen = get_last_insn ();
2775 create_output_operand (&ops[0], target, insn_mode);
2776 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2777 create_integer_operand (&ops[2], 0);
2778 create_integer_operand (&ops[3], align);
2779 if (!maybe_expand_insn (icode, 4, ops))
2780 return NULL_RTX;
2782 /* Now that we are assured of success, expand the source. */
2783 start_sequence ();
2784 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2785 if (pat != src_reg)
2787 #ifdef POINTERS_EXTEND_UNSIGNED
2788 if (GET_MODE (pat) != Pmode)
2789 pat = convert_to_mode (Pmode, pat,
2790 POINTERS_EXTEND_UNSIGNED);
2791 #endif
2792 emit_move_insn (src_reg, pat);
2794 pat = get_insns ();
2795 end_sequence ();
2797 if (before_strlen)
2798 emit_insn_after (pat, before_strlen);
2799 else
2800 emit_insn_before (pat, get_insns ());
2802 /* Return the value in the proper mode for this function. */
2803 if (GET_MODE (ops[0].value) == target_mode)
2804 target = ops[0].value;
2805 else if (target != 0)
2806 convert_move (target, ops[0].value, 0);
2807 else
2808 target = convert_to_mode (target_mode, ops[0].value, 0);
2810 return target;
2814 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2815 bytes from constant string DATA + OFFSET and return it as target
2816 constant. */
2818 static rtx
2819 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2820 machine_mode mode)
2822 const char *str = (const char *) data;
2824 gcc_assert (offset >= 0
2825 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2826 <= strlen (str) + 1));
2828 return c_readstr (str + offset, mode);
2831 /* LEN specify length of the block of memcpy/memset operation.
2832 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2833 In some cases we can make very likely guess on max size, then we
2834 set it into PROBABLE_MAX_SIZE. */
2836 static void
2837 determine_block_size (tree len, rtx len_rtx,
2838 unsigned HOST_WIDE_INT *min_size,
2839 unsigned HOST_WIDE_INT *max_size,
2840 unsigned HOST_WIDE_INT *probable_max_size)
2842 if (CONST_INT_P (len_rtx))
2844 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
2845 return;
2847 else
2849 wide_int min, max;
2850 enum value_range_type range_type = VR_UNDEFINED;
2852 /* Determine bounds from the type. */
2853 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2854 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2855 else
2856 *min_size = 0;
2857 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2858 *probable_max_size = *max_size
2859 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
2860 else
2861 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2863 if (TREE_CODE (len) == SSA_NAME)
2864 range_type = get_range_info (len, &min, &max);
2865 if (range_type == VR_RANGE)
2867 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
2868 *min_size = min.to_uhwi ();
2869 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
2870 *probable_max_size = *max_size = max.to_uhwi ();
2872 else if (range_type == VR_ANTI_RANGE)
2874 /* Anti range 0...N lets us to determine minimal size to N+1. */
2875 if (min == 0)
2877 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2878 *min_size = max.to_uhwi () + 1;
2880 /* Code like
2882 int n;
2883 if (n < 100)
2884 memcpy (a, b, n)
2886 Produce anti range allowing negative values of N. We still
2887 can use the information and make a guess that N is not negative.
2889 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
2890 *probable_max_size = min.to_uhwi () - 1;
2893 gcc_checking_assert (*max_size <=
2894 (unsigned HOST_WIDE_INT)
2895 GET_MODE_MASK (GET_MODE (len_rtx)));
2898 /* Helper function to do the actual work for expand_builtin_memcpy. */
2900 static rtx
2901 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
2903 const char *src_str;
2904 unsigned int src_align = get_pointer_alignment (src);
2905 unsigned int dest_align = get_pointer_alignment (dest);
2906 rtx dest_mem, src_mem, dest_addr, len_rtx;
2907 HOST_WIDE_INT expected_size = -1;
2908 unsigned int expected_align = 0;
2909 unsigned HOST_WIDE_INT min_size;
2910 unsigned HOST_WIDE_INT max_size;
2911 unsigned HOST_WIDE_INT probable_max_size;
2913 /* If DEST is not a pointer type, call the normal function. */
2914 if (dest_align == 0)
2915 return NULL_RTX;
2917 /* If either SRC is not a pointer type, don't do this
2918 operation in-line. */
2919 if (src_align == 0)
2920 return NULL_RTX;
2922 if (currently_expanding_gimple_stmt)
2923 stringop_block_profile (currently_expanding_gimple_stmt,
2924 &expected_align, &expected_size);
2926 if (expected_align < dest_align)
2927 expected_align = dest_align;
2928 dest_mem = get_memory_rtx (dest, len);
2929 set_mem_align (dest_mem, dest_align);
2930 len_rtx = expand_normal (len);
2931 determine_block_size (len, len_rtx, &min_size, &max_size,
2932 &probable_max_size);
2933 src_str = c_getstr (src);
2935 /* If SRC is a string constant and block move would be done
2936 by pieces, we can avoid loading the string from memory
2937 and only stored the computed constants. */
2938 if (src_str
2939 && CONST_INT_P (len_rtx)
2940 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2941 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2942 CONST_CAST (char *, src_str),
2943 dest_align, false))
2945 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2946 builtin_memcpy_read_str,
2947 CONST_CAST (char *, src_str),
2948 dest_align, false, 0);
2949 dest_mem = force_operand (XEXP (dest_mem, 0), target);
2950 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2951 return dest_mem;
2954 src_mem = get_memory_rtx (src, len);
2955 set_mem_align (src_mem, src_align);
2957 /* Copy word part most expediently. */
2958 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
2959 CALL_EXPR_TAILCALL (exp)
2960 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
2961 expected_align, expected_size,
2962 min_size, max_size, probable_max_size);
2964 if (dest_addr == 0)
2966 dest_addr = force_operand (XEXP (dest_mem, 0), target);
2967 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2970 return dest_addr;
2973 /* Expand a call EXP to the memcpy builtin.
2974 Return NULL_RTX if we failed, the caller should emit a normal call,
2975 otherwise try to get the result in TARGET, if convenient (and in
2976 mode MODE if that's convenient). */
2978 static rtx
2979 expand_builtin_memcpy (tree exp, rtx target)
2981 if (!validate_arglist (exp,
2982 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2983 return NULL_RTX;
2984 else
2986 tree dest = CALL_EXPR_ARG (exp, 0);
2987 tree src = CALL_EXPR_ARG (exp, 1);
2988 tree len = CALL_EXPR_ARG (exp, 2);
2989 return expand_builtin_memcpy_args (dest, src, len, target, exp);
2993 /* Expand an instrumented call EXP to the memcpy builtin.
2994 Return NULL_RTX if we failed, the caller should emit a normal call,
2995 otherwise try to get the result in TARGET, if convenient (and in
2996 mode MODE if that's convenient). */
2998 static rtx
2999 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3001 if (!validate_arglist (exp,
3002 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3003 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3004 INTEGER_TYPE, VOID_TYPE))
3005 return NULL_RTX;
3006 else
3008 tree dest = CALL_EXPR_ARG (exp, 0);
3009 tree src = CALL_EXPR_ARG (exp, 2);
3010 tree len = CALL_EXPR_ARG (exp, 4);
3011 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3013 /* Return src bounds with the result. */
3014 if (res)
3016 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3017 expand_normal (CALL_EXPR_ARG (exp, 1)));
3018 res = chkp_join_splitted_slot (res, bnd);
3020 return res;
3024 /* Expand a call EXP to the mempcpy builtin.
3025 Return NULL_RTX if we failed; the caller should emit a normal call,
3026 otherwise try to get the result in TARGET, if convenient (and in
3027 mode MODE if that's convenient). If ENDP is 0 return the
3028 destination pointer, if ENDP is 1 return the end pointer ala
3029 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3030 stpcpy. */
3032 static rtx
3033 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3035 if (!validate_arglist (exp,
3036 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3037 return NULL_RTX;
3038 else
3040 tree dest = CALL_EXPR_ARG (exp, 0);
3041 tree src = CALL_EXPR_ARG (exp, 1);
3042 tree len = CALL_EXPR_ARG (exp, 2);
3043 return expand_builtin_mempcpy_args (dest, src, len,
3044 target, mode, /*endp=*/ 1,
3045 exp);
3049 /* Expand an instrumented call EXP to the mempcpy builtin.
3050 Return NULL_RTX if we failed, the caller should emit a normal call,
3051 otherwise try to get the result in TARGET, if convenient (and in
3052 mode MODE if that's convenient). */
3054 static rtx
3055 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3057 if (!validate_arglist (exp,
3058 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3059 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3060 INTEGER_TYPE, VOID_TYPE))
3061 return NULL_RTX;
3062 else
3064 tree dest = CALL_EXPR_ARG (exp, 0);
3065 tree src = CALL_EXPR_ARG (exp, 2);
3066 tree len = CALL_EXPR_ARG (exp, 4);
3067 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3068 mode, 1, exp);
3070 /* Return src bounds with the result. */
3071 if (res)
3073 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3074 expand_normal (CALL_EXPR_ARG (exp, 1)));
3075 res = chkp_join_splitted_slot (res, bnd);
3077 return res;
3081 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3082 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3083 so that this can also be called without constructing an actual CALL_EXPR.
3084 The other arguments and return value are the same as for
3085 expand_builtin_mempcpy. */
3087 static rtx
3088 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3089 rtx target, machine_mode mode, int endp,
3090 tree orig_exp)
3092 tree fndecl = get_callee_fndecl (orig_exp);
3094 /* If return value is ignored, transform mempcpy into memcpy. */
3095 if (target == const0_rtx
3096 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3097 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3099 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3100 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3101 dest, src, len);
3102 return expand_expr (result, target, mode, EXPAND_NORMAL);
3104 else if (target == const0_rtx
3105 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3107 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3108 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3109 dest, src, len);
3110 return expand_expr (result, target, mode, EXPAND_NORMAL);
3112 else
3114 const char *src_str;
3115 unsigned int src_align = get_pointer_alignment (src);
3116 unsigned int dest_align = get_pointer_alignment (dest);
3117 rtx dest_mem, src_mem, len_rtx;
3119 /* If either SRC or DEST is not a pointer type, don't do this
3120 operation in-line. */
3121 if (dest_align == 0 || src_align == 0)
3122 return NULL_RTX;
3124 /* If LEN is not constant, call the normal function. */
3125 if (! tree_fits_uhwi_p (len))
3126 return NULL_RTX;
3128 len_rtx = expand_normal (len);
3129 src_str = c_getstr (src);
3131 /* If SRC is a string constant and block move would be done
3132 by pieces, we can avoid loading the string from memory
3133 and only stored the computed constants. */
3134 if (src_str
3135 && CONST_INT_P (len_rtx)
3136 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3137 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3138 CONST_CAST (char *, src_str),
3139 dest_align, false))
3141 dest_mem = get_memory_rtx (dest, len);
3142 set_mem_align (dest_mem, dest_align);
3143 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3144 builtin_memcpy_read_str,
3145 CONST_CAST (char *, src_str),
3146 dest_align, false, endp);
3147 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3148 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3149 return dest_mem;
3152 if (CONST_INT_P (len_rtx)
3153 && can_move_by_pieces (INTVAL (len_rtx),
3154 MIN (dest_align, src_align)))
3156 dest_mem = get_memory_rtx (dest, len);
3157 set_mem_align (dest_mem, dest_align);
3158 src_mem = get_memory_rtx (src, len);
3159 set_mem_align (src_mem, src_align);
3160 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3161 MIN (dest_align, src_align), endp);
3162 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3163 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3164 return dest_mem;
3167 return NULL_RTX;
3171 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3172 we failed, the caller should emit a normal call, otherwise try to
3173 get the result in TARGET, if convenient. If ENDP is 0 return the
3174 destination pointer, if ENDP is 1 return the end pointer ala
3175 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3176 stpcpy. */
3178 static rtx
3179 expand_movstr (tree dest, tree src, rtx target, int endp)
3181 struct expand_operand ops[3];
3182 rtx dest_mem;
3183 rtx src_mem;
3185 if (!targetm.have_movstr ())
3186 return NULL_RTX;
3188 dest_mem = get_memory_rtx (dest, NULL);
3189 src_mem = get_memory_rtx (src, NULL);
3190 if (!endp)
3192 target = force_reg (Pmode, XEXP (dest_mem, 0));
3193 dest_mem = replace_equiv_address (dest_mem, target);
3196 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3197 create_fixed_operand (&ops[1], dest_mem);
3198 create_fixed_operand (&ops[2], src_mem);
3199 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3200 return NULL_RTX;
3202 if (endp && target != const0_rtx)
3204 target = ops[0].value;
3205 /* movstr is supposed to set end to the address of the NUL
3206 terminator. If the caller requested a mempcpy-like return value,
3207 adjust it. */
3208 if (endp == 1)
3210 rtx tem = plus_constant (GET_MODE (target),
3211 gen_lowpart (GET_MODE (target), target), 1);
3212 emit_move_insn (target, force_operand (tem, NULL_RTX));
3215 return target;
3218 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3219 NULL_RTX if we failed the caller should emit a normal call, otherwise
3220 try to get the result in TARGET, if convenient (and in mode MODE if that's
3221 convenient). */
3223 static rtx
3224 expand_builtin_strcpy (tree exp, rtx target)
3226 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3228 tree dest = CALL_EXPR_ARG (exp, 0);
3229 tree src = CALL_EXPR_ARG (exp, 1);
3230 return expand_builtin_strcpy_args (dest, src, target);
3232 return NULL_RTX;
3235 /* Helper function to do the actual work for expand_builtin_strcpy. The
3236 arguments to the builtin_strcpy call DEST and SRC are broken out
3237 so that this can also be called without constructing an actual CALL_EXPR.
3238 The other arguments and return value are the same as for
3239 expand_builtin_strcpy. */
3241 static rtx
3242 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3244 return expand_movstr (dest, src, target, /*endp=*/0);
3247 /* Expand a call EXP to the stpcpy builtin.
3248 Return NULL_RTX if we failed the caller should emit a normal call,
3249 otherwise try to get the result in TARGET, if convenient (and in
3250 mode MODE if that's convenient). */
3252 static rtx
3253 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3255 tree dst, src;
3256 location_t loc = EXPR_LOCATION (exp);
3258 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3259 return NULL_RTX;
3261 dst = CALL_EXPR_ARG (exp, 0);
3262 src = CALL_EXPR_ARG (exp, 1);
3264 /* If return value is ignored, transform stpcpy into strcpy. */
3265 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3267 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3268 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3269 return expand_expr (result, target, mode, EXPAND_NORMAL);
3271 else
3273 tree len, lenp1;
3274 rtx ret;
3276 /* Ensure we get an actual string whose length can be evaluated at
3277 compile-time, not an expression containing a string. This is
3278 because the latter will potentially produce pessimized code
3279 when used to produce the return value. */
3280 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3281 return expand_movstr (dst, src, target, /*endp=*/2);
3283 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3284 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3285 target, mode, /*endp=*/2,
3286 exp);
3288 if (ret)
3289 return ret;
3291 if (TREE_CODE (len) == INTEGER_CST)
3293 rtx len_rtx = expand_normal (len);
3295 if (CONST_INT_P (len_rtx))
3297 ret = expand_builtin_strcpy_args (dst, src, target);
3299 if (ret)
3301 if (! target)
3303 if (mode != VOIDmode)
3304 target = gen_reg_rtx (mode);
3305 else
3306 target = gen_reg_rtx (GET_MODE (ret));
3308 if (GET_MODE (target) != GET_MODE (ret))
3309 ret = gen_lowpart (GET_MODE (target), ret);
3311 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3312 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3313 gcc_assert (ret);
3315 return target;
3320 return expand_movstr (dst, src, target, /*endp=*/2);
3324 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3325 bytes from constant string DATA + OFFSET and return it as target
3326 constant. */
3329 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3330 machine_mode mode)
3332 const char *str = (const char *) data;
3334 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3335 return const0_rtx;
3337 return c_readstr (str + offset, mode);
3340 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3341 NULL_RTX if we failed the caller should emit a normal call. */
3343 static rtx
3344 expand_builtin_strncpy (tree exp, rtx target)
3346 location_t loc = EXPR_LOCATION (exp);
3348 if (validate_arglist (exp,
3349 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3351 tree dest = CALL_EXPR_ARG (exp, 0);
3352 tree src = CALL_EXPR_ARG (exp, 1);
3353 tree len = CALL_EXPR_ARG (exp, 2);
3354 tree slen = c_strlen (src, 1);
3356 /* We must be passed a constant len and src parameter. */
3357 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3358 return NULL_RTX;
3360 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3362 /* We're required to pad with trailing zeros if the requested
3363 len is greater than strlen(s2)+1. In that case try to
3364 use store_by_pieces, if it fails, punt. */
3365 if (tree_int_cst_lt (slen, len))
3367 unsigned int dest_align = get_pointer_alignment (dest);
3368 const char *p = c_getstr (src);
3369 rtx dest_mem;
3371 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3372 || !can_store_by_pieces (tree_to_uhwi (len),
3373 builtin_strncpy_read_str,
3374 CONST_CAST (char *, p),
3375 dest_align, false))
3376 return NULL_RTX;
3378 dest_mem = get_memory_rtx (dest, len);
3379 store_by_pieces (dest_mem, tree_to_uhwi (len),
3380 builtin_strncpy_read_str,
3381 CONST_CAST (char *, p), dest_align, false, 0);
3382 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3383 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3384 return dest_mem;
3387 return NULL_RTX;
3390 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3391 bytes from constant string DATA + OFFSET and return it as target
3392 constant. */
3395 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3396 machine_mode mode)
3398 const char *c = (const char *) data;
3399 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3401 memset (p, *c, GET_MODE_SIZE (mode));
3403 return c_readstr (p, mode);
3406 /* Callback routine for store_by_pieces. Return the RTL of a register
3407 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3408 char value given in the RTL register data. For example, if mode is
3409 4 bytes wide, return the RTL for 0x01010101*data. */
3411 static rtx
3412 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3413 machine_mode mode)
3415 rtx target, coeff;
3416 size_t size;
3417 char *p;
3419 size = GET_MODE_SIZE (mode);
3420 if (size == 1)
3421 return (rtx) data;
3423 p = XALLOCAVEC (char, size);
3424 memset (p, 1, size);
3425 coeff = c_readstr (p, mode);
3427 target = convert_to_mode (mode, (rtx) data, 1);
3428 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3429 return force_reg (mode, target);
3432 /* Expand expression EXP, which is a call to the memset builtin. Return
3433 NULL_RTX if we failed the caller should emit a normal call, otherwise
3434 try to get the result in TARGET, if convenient (and in mode MODE if that's
3435 convenient). */
3437 static rtx
3438 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3440 if (!validate_arglist (exp,
3441 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3442 return NULL_RTX;
3443 else
3445 tree dest = CALL_EXPR_ARG (exp, 0);
3446 tree val = CALL_EXPR_ARG (exp, 1);
3447 tree len = CALL_EXPR_ARG (exp, 2);
3448 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3452 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3453 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3454 try to get the result in TARGET, if convenient (and in mode MODE if that's
3455 convenient). */
3457 static rtx
3458 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3460 if (!validate_arglist (exp,
3461 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3462 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3463 return NULL_RTX;
3464 else
3466 tree dest = CALL_EXPR_ARG (exp, 0);
3467 tree val = CALL_EXPR_ARG (exp, 2);
3468 tree len = CALL_EXPR_ARG (exp, 3);
3469 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3471 /* Return src bounds with the result. */
3472 if (res)
3474 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3475 expand_normal (CALL_EXPR_ARG (exp, 1)));
3476 res = chkp_join_splitted_slot (res, bnd);
3478 return res;
3482 /* Helper function to do the actual work for expand_builtin_memset. The
3483 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3484 so that this can also be called without constructing an actual CALL_EXPR.
3485 The other arguments and return value are the same as for
3486 expand_builtin_memset. */
3488 static rtx
3489 expand_builtin_memset_args (tree dest, tree val, tree len,
3490 rtx target, machine_mode mode, tree orig_exp)
3492 tree fndecl, fn;
3493 enum built_in_function fcode;
3494 machine_mode val_mode;
3495 char c;
3496 unsigned int dest_align;
3497 rtx dest_mem, dest_addr, len_rtx;
3498 HOST_WIDE_INT expected_size = -1;
3499 unsigned int expected_align = 0;
3500 unsigned HOST_WIDE_INT min_size;
3501 unsigned HOST_WIDE_INT max_size;
3502 unsigned HOST_WIDE_INT probable_max_size;
3504 dest_align = get_pointer_alignment (dest);
3506 /* If DEST is not a pointer type, don't do this operation in-line. */
3507 if (dest_align == 0)
3508 return NULL_RTX;
3510 if (currently_expanding_gimple_stmt)
3511 stringop_block_profile (currently_expanding_gimple_stmt,
3512 &expected_align, &expected_size);
3514 if (expected_align < dest_align)
3515 expected_align = dest_align;
3517 /* If the LEN parameter is zero, return DEST. */
3518 if (integer_zerop (len))
3520 /* Evaluate and ignore VAL in case it has side-effects. */
3521 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3522 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3525 /* Stabilize the arguments in case we fail. */
3526 dest = builtin_save_expr (dest);
3527 val = builtin_save_expr (val);
3528 len = builtin_save_expr (len);
3530 len_rtx = expand_normal (len);
3531 determine_block_size (len, len_rtx, &min_size, &max_size,
3532 &probable_max_size);
3533 dest_mem = get_memory_rtx (dest, len);
3534 val_mode = TYPE_MODE (unsigned_char_type_node);
3536 if (TREE_CODE (val) != INTEGER_CST)
3538 rtx val_rtx;
3540 val_rtx = expand_normal (val);
3541 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3543 /* Assume that we can memset by pieces if we can store
3544 * the coefficients by pieces (in the required modes).
3545 * We can't pass builtin_memset_gen_str as that emits RTL. */
3546 c = 1;
3547 if (tree_fits_uhwi_p (len)
3548 && can_store_by_pieces (tree_to_uhwi (len),
3549 builtin_memset_read_str, &c, dest_align,
3550 true))
3552 val_rtx = force_reg (val_mode, val_rtx);
3553 store_by_pieces (dest_mem, tree_to_uhwi (len),
3554 builtin_memset_gen_str, val_rtx, dest_align,
3555 true, 0);
3557 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3558 dest_align, expected_align,
3559 expected_size, min_size, max_size,
3560 probable_max_size))
3561 goto do_libcall;
3563 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3564 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3565 return dest_mem;
3568 if (target_char_cast (val, &c))
3569 goto do_libcall;
3571 if (c)
3573 if (tree_fits_uhwi_p (len)
3574 && can_store_by_pieces (tree_to_uhwi (len),
3575 builtin_memset_read_str, &c, dest_align,
3576 true))
3577 store_by_pieces (dest_mem, tree_to_uhwi (len),
3578 builtin_memset_read_str, &c, dest_align, true, 0);
3579 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3580 gen_int_mode (c, val_mode),
3581 dest_align, expected_align,
3582 expected_size, min_size, max_size,
3583 probable_max_size))
3584 goto do_libcall;
3586 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3587 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3588 return dest_mem;
3591 set_mem_align (dest_mem, dest_align);
3592 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3593 CALL_EXPR_TAILCALL (orig_exp)
3594 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3595 expected_align, expected_size,
3596 min_size, max_size,
3597 probable_max_size);
3599 if (dest_addr == 0)
3601 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3602 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3605 return dest_addr;
3607 do_libcall:
3608 fndecl = get_callee_fndecl (orig_exp);
3609 fcode = DECL_FUNCTION_CODE (fndecl);
3610 if (fcode == BUILT_IN_MEMSET
3611 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3612 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3613 dest, val, len);
3614 else if (fcode == BUILT_IN_BZERO)
3615 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3616 dest, len);
3617 else
3618 gcc_unreachable ();
3619 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3620 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3621 return expand_call (fn, target, target == const0_rtx);
3624 /* Expand expression EXP, which is a call to the bzero builtin. Return
3625 NULL_RTX if we failed the caller should emit a normal call. */
3627 static rtx
3628 expand_builtin_bzero (tree exp)
3630 tree dest, size;
3631 location_t loc = EXPR_LOCATION (exp);
3633 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3634 return NULL_RTX;
3636 dest = CALL_EXPR_ARG (exp, 0);
3637 size = CALL_EXPR_ARG (exp, 1);
3639 /* New argument list transforming bzero(ptr x, int y) to
3640 memset(ptr x, int 0, size_t y). This is done this way
3641 so that if it isn't expanded inline, we fallback to
3642 calling bzero instead of memset. */
3644 return expand_builtin_memset_args (dest, integer_zero_node,
3645 fold_convert_loc (loc,
3646 size_type_node, size),
3647 const0_rtx, VOIDmode, exp);
3650 /* Try to expand cmpstr operation ICODE with the given operands.
3651 Return the result rtx on success, otherwise return null. */
3653 static rtx
3654 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
3655 HOST_WIDE_INT align)
3657 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3659 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3660 target = NULL_RTX;
3662 struct expand_operand ops[4];
3663 create_output_operand (&ops[0], target, insn_mode);
3664 create_fixed_operand (&ops[1], arg1_rtx);
3665 create_fixed_operand (&ops[2], arg2_rtx);
3666 create_integer_operand (&ops[3], align);
3667 if (maybe_expand_insn (icode, 4, ops))
3668 return ops[0].value;
3669 return NULL_RTX;
3672 /* Expand expression EXP, which is a call to the memcmp built-in function.
3673 Return NULL_RTX if we failed and the caller should emit a normal call,
3674 otherwise try to get the result in TARGET, if convenient.
3675 RESULT_EQ is true if we can relax the returned value to be either zero
3676 or nonzero, without caring about the sign. */
3678 static rtx
3679 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
3681 if (!validate_arglist (exp,
3682 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3683 return NULL_RTX;
3685 tree arg1 = CALL_EXPR_ARG (exp, 0);
3686 tree arg2 = CALL_EXPR_ARG (exp, 1);
3687 tree len = CALL_EXPR_ARG (exp, 2);
3688 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3689 location_t loc = EXPR_LOCATION (exp);
3691 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3692 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3694 /* If we don't have POINTER_TYPE, call the function. */
3695 if (arg1_align == 0 || arg2_align == 0)
3696 return NULL_RTX;
3698 rtx arg1_rtx = get_memory_rtx (arg1, len);
3699 rtx arg2_rtx = get_memory_rtx (arg2, len);
3700 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3702 /* Set MEM_SIZE as appropriate. */
3703 if (CONST_INT_P (len_rtx))
3705 set_mem_size (arg1_rtx, INTVAL (len_rtx));
3706 set_mem_size (arg2_rtx, INTVAL (len_rtx));
3709 by_pieces_constfn constfn = NULL;
3711 const char *src_str = c_getstr (arg1);
3712 if (src_str == NULL)
3713 src_str = c_getstr (arg2);
3714 else
3715 std::swap (arg1_rtx, arg2_rtx);
3717 /* If SRC is a string constant and block move would be done
3718 by pieces, we can avoid loading the string from memory
3719 and only stored the computed constants. */
3720 if (src_str
3721 && CONST_INT_P (len_rtx)
3722 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
3723 constfn = builtin_memcpy_read_str;
3725 rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
3726 TREE_TYPE (len), target,
3727 result_eq, constfn,
3728 CONST_CAST (char *, src_str));
3730 if (result)
3732 /* Return the value in the proper mode for this function. */
3733 if (GET_MODE (result) == mode)
3734 return result;
3736 if (target != 0)
3738 convert_move (target, result, 0);
3739 return target;
3742 return convert_to_mode (mode, result, 0);
3745 return NULL_RTX;
3748 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3749 if we failed the caller should emit a normal call, otherwise try to get
3750 the result in TARGET, if convenient. */
3752 static rtx
3753 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3755 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3756 return NULL_RTX;
3758 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
3759 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
3760 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
3762 rtx arg1_rtx, arg2_rtx;
3763 tree fndecl, fn;
3764 tree arg1 = CALL_EXPR_ARG (exp, 0);
3765 tree arg2 = CALL_EXPR_ARG (exp, 1);
3766 rtx result = NULL_RTX;
3768 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3769 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3771 /* If we don't have POINTER_TYPE, call the function. */
3772 if (arg1_align == 0 || arg2_align == 0)
3773 return NULL_RTX;
3775 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3776 arg1 = builtin_save_expr (arg1);
3777 arg2 = builtin_save_expr (arg2);
3779 arg1_rtx = get_memory_rtx (arg1, NULL);
3780 arg2_rtx = get_memory_rtx (arg2, NULL);
3782 /* Try to call cmpstrsi. */
3783 if (cmpstr_icode != CODE_FOR_nothing)
3784 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
3785 MIN (arg1_align, arg2_align));
3787 /* Try to determine at least one length and call cmpstrnsi. */
3788 if (!result && cmpstrn_icode != CODE_FOR_nothing)
3790 tree len;
3791 rtx arg3_rtx;
3793 tree len1 = c_strlen (arg1, 1);
3794 tree len2 = c_strlen (arg2, 1);
3796 if (len1)
3797 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3798 if (len2)
3799 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3801 /* If we don't have a constant length for the first, use the length
3802 of the second, if we know it. We don't require a constant for
3803 this case; some cost analysis could be done if both are available
3804 but neither is constant. For now, assume they're equally cheap,
3805 unless one has side effects. If both strings have constant lengths,
3806 use the smaller. */
3808 if (!len1)
3809 len = len2;
3810 else if (!len2)
3811 len = len1;
3812 else if (TREE_SIDE_EFFECTS (len1))
3813 len = len2;
3814 else if (TREE_SIDE_EFFECTS (len2))
3815 len = len1;
3816 else if (TREE_CODE (len1) != INTEGER_CST)
3817 len = len2;
3818 else if (TREE_CODE (len2) != INTEGER_CST)
3819 len = len1;
3820 else if (tree_int_cst_lt (len1, len2))
3821 len = len1;
3822 else
3823 len = len2;
3825 /* If both arguments have side effects, we cannot optimize. */
3826 if (len && !TREE_SIDE_EFFECTS (len))
3828 arg3_rtx = expand_normal (len);
3829 result = expand_cmpstrn_or_cmpmem
3830 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
3831 arg3_rtx, MIN (arg1_align, arg2_align));
3835 if (result)
3837 /* Return the value in the proper mode for this function. */
3838 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3839 if (GET_MODE (result) == mode)
3840 return result;
3841 if (target == 0)
3842 return convert_to_mode (mode, result, 0);
3843 convert_move (target, result, 0);
3844 return target;
3847 /* Expand the library call ourselves using a stabilized argument
3848 list to avoid re-evaluating the function's arguments twice. */
3849 fndecl = get_callee_fndecl (exp);
3850 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3851 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3852 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3853 return expand_call (fn, target, target == const0_rtx);
3855 return NULL_RTX;
3858 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3859 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3860 the result in TARGET, if convenient. */
3862 static rtx
3863 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3864 ATTRIBUTE_UNUSED machine_mode mode)
3866 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3868 if (!validate_arglist (exp,
3869 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3870 return NULL_RTX;
3872 /* If c_strlen can determine an expression for one of the string
3873 lengths, and it doesn't have side effects, then emit cmpstrnsi
3874 using length MIN(strlen(string)+1, arg3). */
3875 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
3876 if (cmpstrn_icode != CODE_FOR_nothing)
3878 tree len, len1, len2;
3879 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3880 rtx result;
3881 tree fndecl, fn;
3882 tree arg1 = CALL_EXPR_ARG (exp, 0);
3883 tree arg2 = CALL_EXPR_ARG (exp, 1);
3884 tree arg3 = CALL_EXPR_ARG (exp, 2);
3886 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3887 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3889 len1 = c_strlen (arg1, 1);
3890 len2 = c_strlen (arg2, 1);
3892 if (len1)
3893 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
3894 if (len2)
3895 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
3897 /* If we don't have a constant length for the first, use the length
3898 of the second, if we know it. We don't require a constant for
3899 this case; some cost analysis could be done if both are available
3900 but neither is constant. For now, assume they're equally cheap,
3901 unless one has side effects. If both strings have constant lengths,
3902 use the smaller. */
3904 if (!len1)
3905 len = len2;
3906 else if (!len2)
3907 len = len1;
3908 else if (TREE_SIDE_EFFECTS (len1))
3909 len = len2;
3910 else if (TREE_SIDE_EFFECTS (len2))
3911 len = len1;
3912 else if (TREE_CODE (len1) != INTEGER_CST)
3913 len = len2;
3914 else if (TREE_CODE (len2) != INTEGER_CST)
3915 len = len1;
3916 else if (tree_int_cst_lt (len1, len2))
3917 len = len1;
3918 else
3919 len = len2;
3921 /* If both arguments have side effects, we cannot optimize. */
3922 if (!len || TREE_SIDE_EFFECTS (len))
3923 return NULL_RTX;
3925 /* The actual new length parameter is MIN(len,arg3). */
3926 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
3927 fold_convert_loc (loc, TREE_TYPE (len), arg3));
3929 /* If we don't have POINTER_TYPE, call the function. */
3930 if (arg1_align == 0 || arg2_align == 0)
3931 return NULL_RTX;
3933 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
3934 arg1 = builtin_save_expr (arg1);
3935 arg2 = builtin_save_expr (arg2);
3936 len = builtin_save_expr (len);
3938 arg1_rtx = get_memory_rtx (arg1, len);
3939 arg2_rtx = get_memory_rtx (arg2, len);
3940 arg3_rtx = expand_normal (len);
3941 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
3942 arg2_rtx, TREE_TYPE (len), arg3_rtx,
3943 MIN (arg1_align, arg2_align));
3944 if (result)
3946 /* Return the value in the proper mode for this function. */
3947 mode = TYPE_MODE (TREE_TYPE (exp));
3948 if (GET_MODE (result) == mode)
3949 return result;
3950 if (target == 0)
3951 return convert_to_mode (mode, result, 0);
3952 convert_move (target, result, 0);
3953 return target;
3956 /* Expand the library call ourselves using a stabilized argument
3957 list to avoid re-evaluating the function's arguments twice. */
3958 fndecl = get_callee_fndecl (exp);
3959 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
3960 arg1, arg2, len);
3961 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3962 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3963 return expand_call (fn, target, target == const0_rtx);
3965 return NULL_RTX;
3968 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
3969 if that's convenient. */
3972 expand_builtin_saveregs (void)
3974 rtx val;
3975 rtx_insn *seq;
3977 /* Don't do __builtin_saveregs more than once in a function.
3978 Save the result of the first call and reuse it. */
3979 if (saveregs_value != 0)
3980 return saveregs_value;
3982 /* When this function is called, it means that registers must be
3983 saved on entry to this function. So we migrate the call to the
3984 first insn of this function. */
3986 start_sequence ();
3988 /* Do whatever the machine needs done in this case. */
3989 val = targetm.calls.expand_builtin_saveregs ();
3991 seq = get_insns ();
3992 end_sequence ();
3994 saveregs_value = val;
3996 /* Put the insns after the NOTE that starts the function. If this
3997 is inside a start_sequence, make the outer-level insn chain current, so
3998 the code is placed at the start of the function. */
3999 push_topmost_sequence ();
4000 emit_insn_after (seq, entry_of_function ());
4001 pop_topmost_sequence ();
4003 return val;
4006 /* Expand a call to __builtin_next_arg. */
4008 static rtx
4009 expand_builtin_next_arg (void)
4011 /* Checking arguments is already done in fold_builtin_next_arg
4012 that must be called before this function. */
4013 return expand_binop (ptr_mode, add_optab,
4014 crtl->args.internal_arg_pointer,
4015 crtl->args.arg_offset_rtx,
4016 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4019 /* Make it easier for the backends by protecting the valist argument
4020 from multiple evaluations. */
4022 static tree
4023 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4025 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4027 /* The current way of determining the type of valist is completely
4028 bogus. We should have the information on the va builtin instead. */
4029 if (!vatype)
4030 vatype = targetm.fn_abi_va_list (cfun->decl);
4032 if (TREE_CODE (vatype) == ARRAY_TYPE)
4034 if (TREE_SIDE_EFFECTS (valist))
4035 valist = save_expr (valist);
4037 /* For this case, the backends will be expecting a pointer to
4038 vatype, but it's possible we've actually been given an array
4039 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4040 So fix it. */
4041 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4043 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4044 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4047 else
4049 tree pt = build_pointer_type (vatype);
4051 if (! needs_lvalue)
4053 if (! TREE_SIDE_EFFECTS (valist))
4054 return valist;
4056 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4057 TREE_SIDE_EFFECTS (valist) = 1;
4060 if (TREE_SIDE_EFFECTS (valist))
4061 valist = save_expr (valist);
4062 valist = fold_build2_loc (loc, MEM_REF,
4063 vatype, valist, build_int_cst (pt, 0));
4066 return valist;
4069 /* The "standard" definition of va_list is void*. */
4071 tree
4072 std_build_builtin_va_list (void)
4074 return ptr_type_node;
4077 /* The "standard" abi va_list is va_list_type_node. */
4079 tree
4080 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4082 return va_list_type_node;
4085 /* The "standard" type of va_list is va_list_type_node. */
4087 tree
4088 std_canonical_va_list_type (tree type)
4090 tree wtype, htype;
4092 if (INDIRECT_REF_P (type))
4093 type = TREE_TYPE (type);
4094 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4095 type = TREE_TYPE (type);
4096 wtype = va_list_type_node;
4097 htype = type;
4098 /* Treat structure va_list types. */
4099 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4100 htype = TREE_TYPE (htype);
4101 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4103 /* If va_list is an array type, the argument may have decayed
4104 to a pointer type, e.g. by being passed to another function.
4105 In that case, unwrap both types so that we can compare the
4106 underlying records. */
4107 if (TREE_CODE (htype) == ARRAY_TYPE
4108 || POINTER_TYPE_P (htype))
4110 wtype = TREE_TYPE (wtype);
4111 htype = TREE_TYPE (htype);
4114 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4115 return va_list_type_node;
4117 return NULL_TREE;
4120 /* The "standard" implementation of va_start: just assign `nextarg' to
4121 the variable. */
4123 void
4124 std_expand_builtin_va_start (tree valist, rtx nextarg)
4126 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4127 convert_move (va_r, nextarg, 0);
4129 /* We do not have any valid bounds for the pointer, so
4130 just store zero bounds for it. */
4131 if (chkp_function_instrumented_p (current_function_decl))
4132 chkp_expand_bounds_reset_for_mem (valist,
4133 make_tree (TREE_TYPE (valist),
4134 nextarg));
4137 /* Expand EXP, a call to __builtin_va_start. */
4139 static rtx
4140 expand_builtin_va_start (tree exp)
4142 rtx nextarg;
4143 tree valist;
4144 location_t loc = EXPR_LOCATION (exp);
4146 if (call_expr_nargs (exp) < 2)
4148 error_at (loc, "too few arguments to function %<va_start%>");
4149 return const0_rtx;
4152 if (fold_builtin_next_arg (exp, true))
4153 return const0_rtx;
4155 nextarg = expand_builtin_next_arg ();
4156 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4158 if (targetm.expand_builtin_va_start)
4159 targetm.expand_builtin_va_start (valist, nextarg);
4160 else
4161 std_expand_builtin_va_start (valist, nextarg);
4163 return const0_rtx;
4166 /* Expand EXP, a call to __builtin_va_end. */
4168 static rtx
4169 expand_builtin_va_end (tree exp)
4171 tree valist = CALL_EXPR_ARG (exp, 0);
4173 /* Evaluate for side effects, if needed. I hate macros that don't
4174 do that. */
4175 if (TREE_SIDE_EFFECTS (valist))
4176 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4178 return const0_rtx;
4181 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4182 builtin rather than just as an assignment in stdarg.h because of the
4183 nastiness of array-type va_list types. */
4185 static rtx
4186 expand_builtin_va_copy (tree exp)
4188 tree dst, src, t;
4189 location_t loc = EXPR_LOCATION (exp);
4191 dst = CALL_EXPR_ARG (exp, 0);
4192 src = CALL_EXPR_ARG (exp, 1);
4194 dst = stabilize_va_list_loc (loc, dst, 1);
4195 src = stabilize_va_list_loc (loc, src, 0);
4197 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4199 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4201 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4202 TREE_SIDE_EFFECTS (t) = 1;
4203 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4205 else
4207 rtx dstb, srcb, size;
4209 /* Evaluate to pointers. */
4210 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4211 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4212 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4213 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4215 dstb = convert_memory_address (Pmode, dstb);
4216 srcb = convert_memory_address (Pmode, srcb);
4218 /* "Dereference" to BLKmode memories. */
4219 dstb = gen_rtx_MEM (BLKmode, dstb);
4220 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4221 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4222 srcb = gen_rtx_MEM (BLKmode, srcb);
4223 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4224 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4226 /* Copy. */
4227 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4230 return const0_rtx;
4233 /* Expand a call to one of the builtin functions __builtin_frame_address or
4234 __builtin_return_address. */
4236 static rtx
4237 expand_builtin_frame_address (tree fndecl, tree exp)
4239 /* The argument must be a nonnegative integer constant.
4240 It counts the number of frames to scan up the stack.
4241 The value is either the frame pointer value or the return
4242 address saved in that frame. */
4243 if (call_expr_nargs (exp) == 0)
4244 /* Warning about missing arg was already issued. */
4245 return const0_rtx;
4246 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4248 error ("invalid argument to %qD", fndecl);
4249 return const0_rtx;
4251 else
4253 /* Number of frames to scan up the stack. */
4254 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4256 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4258 /* Some ports cannot access arbitrary stack frames. */
4259 if (tem == NULL)
4261 warning (0, "unsupported argument to %qD", fndecl);
4262 return const0_rtx;
4265 if (count)
4267 /* Warn since no effort is made to ensure that any frame
4268 beyond the current one exists or can be safely reached. */
4269 warning (OPT_Wframe_address, "calling %qD with "
4270 "a nonzero argument is unsafe", fndecl);
4273 /* For __builtin_frame_address, return what we've got. */
4274 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4275 return tem;
4277 if (!REG_P (tem)
4278 && ! CONSTANT_P (tem))
4279 tem = copy_addr_to_reg (tem);
4280 return tem;
4284 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4285 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4286 is the same as for allocate_dynamic_stack_space. */
4288 static rtx
4289 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4291 rtx op0;
4292 rtx result;
4293 bool valid_arglist;
4294 unsigned int align;
4295 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4296 == BUILT_IN_ALLOCA_WITH_ALIGN);
4298 valid_arglist
4299 = (alloca_with_align
4300 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4301 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4303 if (!valid_arglist)
4304 return NULL_RTX;
4306 /* Compute the argument. */
4307 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4309 /* Compute the alignment. */
4310 align = (alloca_with_align
4311 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4312 : BIGGEST_ALIGNMENT);
4314 /* Allocate the desired space. */
4315 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4316 result = convert_memory_address (ptr_mode, result);
4318 return result;
4321 /* Expand a call to bswap builtin in EXP.
4322 Return NULL_RTX if a normal call should be emitted rather than expanding the
4323 function in-line. If convenient, the result should be placed in TARGET.
4324 SUBTARGET may be used as the target for computing one of EXP's operands. */
4326 static rtx
4327 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4328 rtx subtarget)
4330 tree arg;
4331 rtx op0;
4333 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4334 return NULL_RTX;
4336 arg = CALL_EXPR_ARG (exp, 0);
4337 op0 = expand_expr (arg,
4338 subtarget && GET_MODE (subtarget) == target_mode
4339 ? subtarget : NULL_RTX,
4340 target_mode, EXPAND_NORMAL);
4341 if (GET_MODE (op0) != target_mode)
4342 op0 = convert_to_mode (target_mode, op0, 1);
4344 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4346 gcc_assert (target);
4348 return convert_to_mode (target_mode, target, 1);
4351 /* Expand a call to a unary builtin in EXP.
4352 Return NULL_RTX if a normal call should be emitted rather than expanding the
4353 function in-line. If convenient, the result should be placed in TARGET.
4354 SUBTARGET may be used as the target for computing one of EXP's operands. */
4356 static rtx
4357 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4358 rtx subtarget, optab op_optab)
4360 rtx op0;
4362 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4363 return NULL_RTX;
4365 /* Compute the argument. */
4366 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4367 (subtarget
4368 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4369 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4370 VOIDmode, EXPAND_NORMAL);
4371 /* Compute op, into TARGET if possible.
4372 Set TARGET to wherever the result comes back. */
4373 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4374 op_optab, op0, target, op_optab != clrsb_optab);
4375 gcc_assert (target);
4377 return convert_to_mode (target_mode, target, 0);
4380 /* Expand a call to __builtin_expect. We just return our argument
4381 as the builtin_expect semantic should've been already executed by
4382 tree branch prediction pass. */
4384 static rtx
4385 expand_builtin_expect (tree exp, rtx target)
4387 tree arg;
4389 if (call_expr_nargs (exp) < 2)
4390 return const0_rtx;
4391 arg = CALL_EXPR_ARG (exp, 0);
4393 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4394 /* When guessing was done, the hints should be already stripped away. */
4395 gcc_assert (!flag_guess_branch_prob
4396 || optimize == 0 || seen_error ());
4397 return target;
4400 /* Expand a call to __builtin_assume_aligned. We just return our first
4401 argument as the builtin_assume_aligned semantic should've been already
4402 executed by CCP. */
4404 static rtx
4405 expand_builtin_assume_aligned (tree exp, rtx target)
4407 if (call_expr_nargs (exp) < 2)
4408 return const0_rtx;
4409 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4410 EXPAND_NORMAL);
4411 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4412 && (call_expr_nargs (exp) < 3
4413 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4414 return target;
4417 void
4418 expand_builtin_trap (void)
4420 if (targetm.have_trap ())
4422 rtx_insn *insn = emit_insn (targetm.gen_trap ());
4423 /* For trap insns when not accumulating outgoing args force
4424 REG_ARGS_SIZE note to prevent crossjumping of calls with
4425 different args sizes. */
4426 if (!ACCUMULATE_OUTGOING_ARGS)
4427 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4429 else
4431 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
4432 tree call_expr = build_call_expr (fn, 0);
4433 expand_call (call_expr, NULL_RTX, false);
4436 emit_barrier ();
4439 /* Expand a call to __builtin_unreachable. We do nothing except emit
4440 a barrier saying that control flow will not pass here.
4442 It is the responsibility of the program being compiled to ensure
4443 that control flow does never reach __builtin_unreachable. */
4444 static void
4445 expand_builtin_unreachable (void)
4447 emit_barrier ();
4450 /* Expand EXP, a call to fabs, fabsf or fabsl.
4451 Return NULL_RTX if a normal call should be emitted rather than expanding
4452 the function inline. If convenient, the result should be placed
4453 in TARGET. SUBTARGET may be used as the target for computing
4454 the operand. */
4456 static rtx
4457 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4459 machine_mode mode;
4460 tree arg;
4461 rtx op0;
4463 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4464 return NULL_RTX;
4466 arg = CALL_EXPR_ARG (exp, 0);
4467 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4468 mode = TYPE_MODE (TREE_TYPE (arg));
4469 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4470 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4473 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4474 Return NULL is a normal call should be emitted rather than expanding the
4475 function inline. If convenient, the result should be placed in TARGET.
4476 SUBTARGET may be used as the target for computing the operand. */
4478 static rtx
4479 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4481 rtx op0, op1;
4482 tree arg;
4484 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4485 return NULL_RTX;
4487 arg = CALL_EXPR_ARG (exp, 0);
4488 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4490 arg = CALL_EXPR_ARG (exp, 1);
4491 op1 = expand_normal (arg);
4493 return expand_copysign (op0, op1, target);
4496 /* Expand a call to __builtin___clear_cache. */
4498 static rtx
4499 expand_builtin___clear_cache (tree exp)
4501 if (!targetm.code_for_clear_cache)
4503 #ifdef CLEAR_INSN_CACHE
4504 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4505 does something. Just do the default expansion to a call to
4506 __clear_cache(). */
4507 return NULL_RTX;
4508 #else
4509 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4510 does nothing. There is no need to call it. Do nothing. */
4511 return const0_rtx;
4512 #endif /* CLEAR_INSN_CACHE */
4515 /* We have a "clear_cache" insn, and it will handle everything. */
4516 tree begin, end;
4517 rtx begin_rtx, end_rtx;
4519 /* We must not expand to a library call. If we did, any
4520 fallback library function in libgcc that might contain a call to
4521 __builtin___clear_cache() would recurse infinitely. */
4522 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4524 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4525 return const0_rtx;
4528 if (targetm.have_clear_cache ())
4530 struct expand_operand ops[2];
4532 begin = CALL_EXPR_ARG (exp, 0);
4533 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4535 end = CALL_EXPR_ARG (exp, 1);
4536 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4538 create_address_operand (&ops[0], begin_rtx);
4539 create_address_operand (&ops[1], end_rtx);
4540 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
4541 return const0_rtx;
4543 return const0_rtx;
4546 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4548 static rtx
4549 round_trampoline_addr (rtx tramp)
4551 rtx temp, addend, mask;
4553 /* If we don't need too much alignment, we'll have been guaranteed
4554 proper alignment by get_trampoline_type. */
4555 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4556 return tramp;
4558 /* Round address up to desired boundary. */
4559 temp = gen_reg_rtx (Pmode);
4560 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4561 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4563 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4564 temp, 0, OPTAB_LIB_WIDEN);
4565 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4566 temp, 0, OPTAB_LIB_WIDEN);
4568 return tramp;
4571 static rtx
4572 expand_builtin_init_trampoline (tree exp, bool onstack)
4574 tree t_tramp, t_func, t_chain;
4575 rtx m_tramp, r_tramp, r_chain, tmp;
4577 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4578 POINTER_TYPE, VOID_TYPE))
4579 return NULL_RTX;
4581 t_tramp = CALL_EXPR_ARG (exp, 0);
4582 t_func = CALL_EXPR_ARG (exp, 1);
4583 t_chain = CALL_EXPR_ARG (exp, 2);
4585 r_tramp = expand_normal (t_tramp);
4586 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4587 MEM_NOTRAP_P (m_tramp) = 1;
4589 /* If ONSTACK, the TRAMP argument should be the address of a field
4590 within the local function's FRAME decl. Either way, let's see if
4591 we can fill in the MEM_ATTRs for this memory. */
4592 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4593 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4595 /* Creator of a heap trampoline is responsible for making sure the
4596 address is aligned to at least STACK_BOUNDARY. Normally malloc
4597 will ensure this anyhow. */
4598 tmp = round_trampoline_addr (r_tramp);
4599 if (tmp != r_tramp)
4601 m_tramp = change_address (m_tramp, BLKmode, tmp);
4602 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4603 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4606 /* The FUNC argument should be the address of the nested function.
4607 Extract the actual function decl to pass to the hook. */
4608 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4609 t_func = TREE_OPERAND (t_func, 0);
4610 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4612 r_chain = expand_normal (t_chain);
4614 /* Generate insns to initialize the trampoline. */
4615 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4617 if (onstack)
4619 trampolines_created = 1;
4621 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4622 "trampoline generated for nested function %qD", t_func);
4625 return const0_rtx;
4628 static rtx
4629 expand_builtin_adjust_trampoline (tree exp)
4631 rtx tramp;
4633 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4634 return NULL_RTX;
4636 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4637 tramp = round_trampoline_addr (tramp);
4638 if (targetm.calls.trampoline_adjust_address)
4639 tramp = targetm.calls.trampoline_adjust_address (tramp);
4641 return tramp;
4644 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4645 function. The function first checks whether the back end provides
4646 an insn to implement signbit for the respective mode. If not, it
4647 checks whether the floating point format of the value is such that
4648 the sign bit can be extracted. If that is not the case, error out.
4649 EXP is the expression that is a call to the builtin function; if
4650 convenient, the result should be placed in TARGET. */
4651 static rtx
4652 expand_builtin_signbit (tree exp, rtx target)
4654 const struct real_format *fmt;
4655 machine_mode fmode, imode, rmode;
4656 tree arg;
4657 int word, bitpos;
4658 enum insn_code icode;
4659 rtx temp;
4660 location_t loc = EXPR_LOCATION (exp);
4662 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4663 return NULL_RTX;
4665 arg = CALL_EXPR_ARG (exp, 0);
4666 fmode = TYPE_MODE (TREE_TYPE (arg));
4667 rmode = TYPE_MODE (TREE_TYPE (exp));
4668 fmt = REAL_MODE_FORMAT (fmode);
4670 arg = builtin_save_expr (arg);
4672 /* Expand the argument yielding a RTX expression. */
4673 temp = expand_normal (arg);
4675 /* Check if the back end provides an insn that handles signbit for the
4676 argument's mode. */
4677 icode = optab_handler (signbit_optab, fmode);
4678 if (icode != CODE_FOR_nothing)
4680 rtx_insn *last = get_last_insn ();
4681 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4682 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4683 return target;
4684 delete_insns_since (last);
4687 /* For floating point formats without a sign bit, implement signbit
4688 as "ARG < 0.0". */
4689 bitpos = fmt->signbit_ro;
4690 if (bitpos < 0)
4692 /* But we can't do this if the format supports signed zero. */
4693 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
4695 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4696 build_real (TREE_TYPE (arg), dconst0));
4697 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4700 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4702 imode = int_mode_for_mode (fmode);
4703 gcc_assert (imode != BLKmode);
4704 temp = gen_lowpart (imode, temp);
4706 else
4708 imode = word_mode;
4709 /* Handle targets with different FP word orders. */
4710 if (FLOAT_WORDS_BIG_ENDIAN)
4711 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4712 else
4713 word = bitpos / BITS_PER_WORD;
4714 temp = operand_subword_force (temp, word, fmode);
4715 bitpos = bitpos % BITS_PER_WORD;
4718 /* Force the intermediate word_mode (or narrower) result into a
4719 register. This avoids attempting to create paradoxical SUBREGs
4720 of floating point modes below. */
4721 temp = force_reg (imode, temp);
4723 /* If the bitpos is within the "result mode" lowpart, the operation
4724 can be implement with a single bitwise AND. Otherwise, we need
4725 a right shift and an AND. */
4727 if (bitpos < GET_MODE_BITSIZE (rmode))
4729 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
4731 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4732 temp = gen_lowpart (rmode, temp);
4733 temp = expand_binop (rmode, and_optab, temp,
4734 immed_wide_int_const (mask, rmode),
4735 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4737 else
4739 /* Perform a logical right shift to place the signbit in the least
4740 significant bit, then truncate the result to the desired mode
4741 and mask just this bit. */
4742 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4743 temp = gen_lowpart (rmode, temp);
4744 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4745 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4748 return temp;
4751 /* Expand fork or exec calls. TARGET is the desired target of the
4752 call. EXP is the call. FN is the
4753 identificator of the actual function. IGNORE is nonzero if the
4754 value is to be ignored. */
4756 static rtx
4757 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4759 tree id, decl;
4760 tree call;
4762 /* If we are not profiling, just call the function. */
4763 if (!profile_arc_flag)
4764 return NULL_RTX;
4766 /* Otherwise call the wrapper. This should be equivalent for the rest of
4767 compiler, so the code does not diverge, and the wrapper may run the
4768 code necessary for keeping the profiling sane. */
4770 switch (DECL_FUNCTION_CODE (fn))
4772 case BUILT_IN_FORK:
4773 id = get_identifier ("__gcov_fork");
4774 break;
4776 case BUILT_IN_EXECL:
4777 id = get_identifier ("__gcov_execl");
4778 break;
4780 case BUILT_IN_EXECV:
4781 id = get_identifier ("__gcov_execv");
4782 break;
4784 case BUILT_IN_EXECLP:
4785 id = get_identifier ("__gcov_execlp");
4786 break;
4788 case BUILT_IN_EXECLE:
4789 id = get_identifier ("__gcov_execle");
4790 break;
4792 case BUILT_IN_EXECVP:
4793 id = get_identifier ("__gcov_execvp");
4794 break;
4796 case BUILT_IN_EXECVE:
4797 id = get_identifier ("__gcov_execve");
4798 break;
4800 default:
4801 gcc_unreachable ();
4804 decl = build_decl (DECL_SOURCE_LOCATION (fn),
4805 FUNCTION_DECL, id, TREE_TYPE (fn));
4806 DECL_EXTERNAL (decl) = 1;
4807 TREE_PUBLIC (decl) = 1;
4808 DECL_ARTIFICIAL (decl) = 1;
4809 TREE_NOTHROW (decl) = 1;
4810 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
4811 DECL_VISIBILITY_SPECIFIED (decl) = 1;
4812 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
4813 return expand_call (call, target, ignore);
4818 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
4819 the pointer in these functions is void*, the tree optimizers may remove
4820 casts. The mode computed in expand_builtin isn't reliable either, due
4821 to __sync_bool_compare_and_swap.
4823 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
4824 group of builtins. This gives us log2 of the mode size. */
4826 static inline machine_mode
4827 get_builtin_sync_mode (int fcode_diff)
4829 /* The size is not negotiable, so ask not to get BLKmode in return
4830 if the target indicates that a smaller size would be better. */
4831 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
4834 /* Expand the memory expression LOC and return the appropriate memory operand
4835 for the builtin_sync operations. */
4837 static rtx
4838 get_builtin_sync_mem (tree loc, machine_mode mode)
4840 rtx addr, mem;
4842 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
4843 addr = convert_memory_address (Pmode, addr);
4845 /* Note that we explicitly do not want any alias information for this
4846 memory, so that we kill all other live memories. Otherwise we don't
4847 satisfy the full barrier semantics of the intrinsic. */
4848 mem = validize_mem (gen_rtx_MEM (mode, addr));
4850 /* The alignment needs to be at least according to that of the mode. */
4851 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
4852 get_pointer_alignment (loc)));
4853 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
4854 MEM_VOLATILE_P (mem) = 1;
4856 return mem;
4859 /* Make sure an argument is in the right mode.
4860 EXP is the tree argument.
4861 MODE is the mode it should be in. */
4863 static rtx
4864 expand_expr_force_mode (tree exp, machine_mode mode)
4866 rtx val;
4867 machine_mode old_mode;
4869 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
4870 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
4871 of CONST_INTs, where we know the old_mode only from the call argument. */
4873 old_mode = GET_MODE (val);
4874 if (old_mode == VOIDmode)
4875 old_mode = TYPE_MODE (TREE_TYPE (exp));
4876 val = convert_modes (mode, old_mode, val, 1);
4877 return val;
4881 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
4882 EXP is the CALL_EXPR. CODE is the rtx code
4883 that corresponds to the arithmetic or logical operation from the name;
4884 an exception here is that NOT actually means NAND. TARGET is an optional
4885 place for us to store the results; AFTER is true if this is the
4886 fetch_and_xxx form. */
4888 static rtx
4889 expand_builtin_sync_operation (machine_mode mode, tree exp,
4890 enum rtx_code code, bool after,
4891 rtx target)
4893 rtx val, mem;
4894 location_t loc = EXPR_LOCATION (exp);
4896 if (code == NOT && warn_sync_nand)
4898 tree fndecl = get_callee_fndecl (exp);
4899 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4901 static bool warned_f_a_n, warned_n_a_f;
4903 switch (fcode)
4905 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
4906 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
4907 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
4908 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
4909 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
4910 if (warned_f_a_n)
4911 break;
4913 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
4914 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
4915 warned_f_a_n = true;
4916 break;
4918 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
4919 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
4920 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
4921 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
4922 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
4923 if (warned_n_a_f)
4924 break;
4926 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
4927 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
4928 warned_n_a_f = true;
4929 break;
4931 default:
4932 gcc_unreachable ();
4936 /* Expand the operands. */
4937 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
4938 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
4940 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
4941 after);
4944 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
4945 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
4946 true if this is the boolean form. TARGET is a place for us to store the
4947 results; this is NOT optional if IS_BOOL is true. */
4949 static rtx
4950 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
4951 bool is_bool, rtx target)
4953 rtx old_val, new_val, mem;
4954 rtx *pbool, *poval;
4956 /* Expand the operands. */
4957 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
4958 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
4959 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
4961 pbool = poval = NULL;
4962 if (target != const0_rtx)
4964 if (is_bool)
4965 pbool = &target;
4966 else
4967 poval = &target;
4969 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
4970 false, MEMMODEL_SYNC_SEQ_CST,
4971 MEMMODEL_SYNC_SEQ_CST))
4972 return NULL_RTX;
4974 return target;
4977 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
4978 general form is actually an atomic exchange, and some targets only
4979 support a reduced form with the second argument being a constant 1.
4980 EXP is the CALL_EXPR; TARGET is an optional place for us to store
4981 the results. */
4983 static rtx
4984 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
4985 rtx target)
4987 rtx val, mem;
4989 /* Expand the operands. */
4990 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
4991 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
4993 return expand_sync_lock_test_and_set (target, mem, val);
4996 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
4998 static void
4999 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5001 rtx mem;
5003 /* Expand the operands. */
5004 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5006 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5009 /* Given an integer representing an ``enum memmodel'', verify its
5010 correctness and return the memory model enum. */
5012 static enum memmodel
5013 get_memmodel (tree exp)
5015 rtx op;
5016 unsigned HOST_WIDE_INT val;
5017 source_location loc
5018 = expansion_point_location_if_in_system_header (input_location);
5020 /* If the parameter is not a constant, it's a run time value so we'll just
5021 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5022 if (TREE_CODE (exp) != INTEGER_CST)
5023 return MEMMODEL_SEQ_CST;
5025 op = expand_normal (exp);
5027 val = INTVAL (op);
5028 if (targetm.memmodel_check)
5029 val = targetm.memmodel_check (val);
5030 else if (val & ~MEMMODEL_MASK)
5032 warning_at (loc, OPT_Winvalid_memory_model,
5033 "unknown architecture specifier in memory model to builtin");
5034 return MEMMODEL_SEQ_CST;
5037 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5038 if (memmodel_base (val) >= MEMMODEL_LAST)
5040 warning_at (loc, OPT_Winvalid_memory_model,
5041 "invalid memory model argument to builtin");
5042 return MEMMODEL_SEQ_CST;
5045 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5046 be conservative and promote consume to acquire. */
5047 if (val == MEMMODEL_CONSUME)
5048 val = MEMMODEL_ACQUIRE;
5050 return (enum memmodel) val;
5053 /* Expand the __atomic_exchange intrinsic:
5054 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5055 EXP is the CALL_EXPR.
5056 TARGET is an optional place for us to store the results. */
5058 static rtx
5059 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5061 rtx val, mem;
5062 enum memmodel model;
5064 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5066 if (!flag_inline_atomics)
5067 return NULL_RTX;
5069 /* Expand the operands. */
5070 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5071 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5073 return expand_atomic_exchange (target, mem, val, model);
5076 /* Expand the __atomic_compare_exchange intrinsic:
5077 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5078 TYPE desired, BOOL weak,
5079 enum memmodel success,
5080 enum memmodel failure)
5081 EXP is the CALL_EXPR.
5082 TARGET is an optional place for us to store the results. */
5084 static rtx
5085 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5086 rtx target)
5088 rtx expect, desired, mem, oldval;
5089 rtx_code_label *label;
5090 enum memmodel success, failure;
5091 tree weak;
5092 bool is_weak;
5093 source_location loc
5094 = expansion_point_location_if_in_system_header (input_location);
5096 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5097 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5099 if (failure > success)
5101 warning_at (loc, OPT_Winvalid_memory_model,
5102 "failure memory model cannot be stronger than success "
5103 "memory model for %<__atomic_compare_exchange%>");
5104 success = MEMMODEL_SEQ_CST;
5107 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5109 warning_at (loc, OPT_Winvalid_memory_model,
5110 "invalid failure memory model for "
5111 "%<__atomic_compare_exchange%>");
5112 failure = MEMMODEL_SEQ_CST;
5113 success = MEMMODEL_SEQ_CST;
5117 if (!flag_inline_atomics)
5118 return NULL_RTX;
5120 /* Expand the operands. */
5121 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5123 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5124 expect = convert_memory_address (Pmode, expect);
5125 expect = gen_rtx_MEM (mode, expect);
5126 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5128 weak = CALL_EXPR_ARG (exp, 3);
5129 is_weak = false;
5130 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5131 is_weak = true;
5133 if (target == const0_rtx)
5134 target = NULL;
5136 /* Lest the rtl backend create a race condition with an imporoper store
5137 to memory, always create a new pseudo for OLDVAL. */
5138 oldval = NULL;
5140 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5141 is_weak, success, failure))
5142 return NULL_RTX;
5144 /* Conditionally store back to EXPECT, lest we create a race condition
5145 with an improper store to memory. */
5146 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5147 the normal case where EXPECT is totally private, i.e. a register. At
5148 which point the store can be unconditional. */
5149 label = gen_label_rtx ();
5150 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5151 GET_MODE (target), 1, label);
5152 emit_move_insn (expect, oldval);
5153 emit_label (label);
5155 return target;
5158 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5159 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5160 call. The weak parameter must be dropped to match the expected parameter
5161 list and the expected argument changed from value to pointer to memory
5162 slot. */
5164 static void
5165 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5167 unsigned int z;
5168 vec<tree, va_gc> *vec;
5170 vec_alloc (vec, 5);
5171 vec->quick_push (gimple_call_arg (call, 0));
5172 tree expected = gimple_call_arg (call, 1);
5173 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5174 TREE_TYPE (expected));
5175 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5176 if (expd != x)
5177 emit_move_insn (x, expd);
5178 tree v = make_tree (TREE_TYPE (expected), x);
5179 vec->quick_push (build1 (ADDR_EXPR,
5180 build_pointer_type (TREE_TYPE (expected)), v));
5181 vec->quick_push (gimple_call_arg (call, 2));
5182 /* Skip the boolean weak parameter. */
5183 for (z = 4; z < 6; z++)
5184 vec->quick_push (gimple_call_arg (call, z));
5185 built_in_function fncode
5186 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5187 + exact_log2 (GET_MODE_SIZE (mode)));
5188 tree fndecl = builtin_decl_explicit (fncode);
5189 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
5190 fndecl);
5191 tree exp = build_call_vec (boolean_type_node, fn, vec);
5192 tree lhs = gimple_call_lhs (call);
5193 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
5194 if (lhs)
5196 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5197 if (GET_MODE (boolret) != mode)
5198 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5199 x = force_reg (mode, x);
5200 write_complex_part (target, boolret, true);
5201 write_complex_part (target, x, false);
5205 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
5207 void
5208 expand_ifn_atomic_compare_exchange (gcall *call)
5210 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
5211 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
5212 machine_mode mode = mode_for_size (BITS_PER_UNIT * size, MODE_INT, 0);
5213 rtx expect, desired, mem, oldval, boolret;
5214 enum memmodel success, failure;
5215 tree lhs;
5216 bool is_weak;
5217 source_location loc
5218 = expansion_point_location_if_in_system_header (gimple_location (call));
5220 success = get_memmodel (gimple_call_arg (call, 4));
5221 failure = get_memmodel (gimple_call_arg (call, 5));
5223 if (failure > success)
5225 warning_at (loc, OPT_Winvalid_memory_model,
5226 "failure memory model cannot be stronger than success "
5227 "memory model for %<__atomic_compare_exchange%>");
5228 success = MEMMODEL_SEQ_CST;
5231 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5233 warning_at (loc, OPT_Winvalid_memory_model,
5234 "invalid failure memory model for "
5235 "%<__atomic_compare_exchange%>");
5236 failure = MEMMODEL_SEQ_CST;
5237 success = MEMMODEL_SEQ_CST;
5240 if (!flag_inline_atomics)
5242 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5243 return;
5246 /* Expand the operands. */
5247 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
5249 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
5250 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
5252 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
5254 boolret = NULL;
5255 oldval = NULL;
5257 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
5258 is_weak, success, failure))
5260 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5261 return;
5264 lhs = gimple_call_lhs (call);
5265 if (lhs)
5267 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5268 if (GET_MODE (boolret) != mode)
5269 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5270 write_complex_part (target, boolret, true);
5271 write_complex_part (target, oldval, false);
5275 /* Expand the __atomic_load intrinsic:
5276 TYPE __atomic_load (TYPE *object, enum memmodel)
5277 EXP is the CALL_EXPR.
5278 TARGET is an optional place for us to store the results. */
5280 static rtx
5281 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5283 rtx mem;
5284 enum memmodel model;
5286 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5287 if (is_mm_release (model) || is_mm_acq_rel (model))
5289 source_location loc
5290 = expansion_point_location_if_in_system_header (input_location);
5291 warning_at (loc, OPT_Winvalid_memory_model,
5292 "invalid memory model for %<__atomic_load%>");
5293 model = MEMMODEL_SEQ_CST;
5296 if (!flag_inline_atomics)
5297 return NULL_RTX;
5299 /* Expand the operand. */
5300 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5302 return expand_atomic_load (target, mem, model);
5306 /* Expand the __atomic_store intrinsic:
5307 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5308 EXP is the CALL_EXPR.
5309 TARGET is an optional place for us to store the results. */
5311 static rtx
5312 expand_builtin_atomic_store (machine_mode mode, tree exp)
5314 rtx mem, val;
5315 enum memmodel model;
5317 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5318 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5319 || is_mm_release (model)))
5321 source_location loc
5322 = expansion_point_location_if_in_system_header (input_location);
5323 warning_at (loc, OPT_Winvalid_memory_model,
5324 "invalid memory model for %<__atomic_store%>");
5325 model = MEMMODEL_SEQ_CST;
5328 if (!flag_inline_atomics)
5329 return NULL_RTX;
5331 /* Expand the operands. */
5332 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5333 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5335 return expand_atomic_store (mem, val, model, false);
5338 /* Expand the __atomic_fetch_XXX intrinsic:
5339 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5340 EXP is the CALL_EXPR.
5341 TARGET is an optional place for us to store the results.
5342 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5343 FETCH_AFTER is true if returning the result of the operation.
5344 FETCH_AFTER is false if returning the value before the operation.
5345 IGNORE is true if the result is not used.
5346 EXT_CALL is the correct builtin for an external call if this cannot be
5347 resolved to an instruction sequence. */
5349 static rtx
5350 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5351 enum rtx_code code, bool fetch_after,
5352 bool ignore, enum built_in_function ext_call)
5354 rtx val, mem, ret;
5355 enum memmodel model;
5356 tree fndecl;
5357 tree addr;
5359 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5361 /* Expand the operands. */
5362 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5363 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5365 /* Only try generating instructions if inlining is turned on. */
5366 if (flag_inline_atomics)
5368 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5369 if (ret)
5370 return ret;
5373 /* Return if a different routine isn't needed for the library call. */
5374 if (ext_call == BUILT_IN_NONE)
5375 return NULL_RTX;
5377 /* Change the call to the specified function. */
5378 fndecl = get_callee_fndecl (exp);
5379 addr = CALL_EXPR_FN (exp);
5380 STRIP_NOPS (addr);
5382 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5383 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5385 /* Expand the call here so we can emit trailing code. */
5386 ret = expand_call (exp, target, ignore);
5388 /* Replace the original function just in case it matters. */
5389 TREE_OPERAND (addr, 0) = fndecl;
5391 /* Then issue the arithmetic correction to return the right result. */
5392 if (!ignore)
5394 if (code == NOT)
5396 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5397 OPTAB_LIB_WIDEN);
5398 ret = expand_simple_unop (mode, NOT, ret, target, true);
5400 else
5401 ret = expand_simple_binop (mode, code, ret, val, target, true,
5402 OPTAB_LIB_WIDEN);
5404 return ret;
5407 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
5409 void
5410 expand_ifn_atomic_bit_test_and (gcall *call)
5412 tree ptr = gimple_call_arg (call, 0);
5413 tree bit = gimple_call_arg (call, 1);
5414 tree flag = gimple_call_arg (call, 2);
5415 tree lhs = gimple_call_lhs (call);
5416 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
5417 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
5418 enum rtx_code code;
5419 optab optab;
5420 struct expand_operand ops[5];
5422 gcc_assert (flag_inline_atomics);
5424 if (gimple_call_num_args (call) == 4)
5425 model = get_memmodel (gimple_call_arg (call, 3));
5427 rtx mem = get_builtin_sync_mem (ptr, mode);
5428 rtx val = expand_expr_force_mode (bit, mode);
5430 switch (gimple_call_internal_fn (call))
5432 case IFN_ATOMIC_BIT_TEST_AND_SET:
5433 code = IOR;
5434 optab = atomic_bit_test_and_set_optab;
5435 break;
5436 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
5437 code = XOR;
5438 optab = atomic_bit_test_and_complement_optab;
5439 break;
5440 case IFN_ATOMIC_BIT_TEST_AND_RESET:
5441 code = AND;
5442 optab = atomic_bit_test_and_reset_optab;
5443 break;
5444 default:
5445 gcc_unreachable ();
5448 if (lhs == NULL_TREE)
5450 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
5451 val, NULL_RTX, true, OPTAB_DIRECT);
5452 if (code == AND)
5453 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
5454 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
5455 return;
5458 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5459 enum insn_code icode = direct_optab_handler (optab, mode);
5460 gcc_assert (icode != CODE_FOR_nothing);
5461 create_output_operand (&ops[0], target, mode);
5462 create_fixed_operand (&ops[1], mem);
5463 create_convert_operand_to (&ops[2], val, mode, true);
5464 create_integer_operand (&ops[3], model);
5465 create_integer_operand (&ops[4], integer_onep (flag));
5466 if (maybe_expand_insn (icode, 5, ops))
5467 return;
5469 rtx bitval = val;
5470 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
5471 val, NULL_RTX, true, OPTAB_DIRECT);
5472 rtx maskval = val;
5473 if (code == AND)
5474 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
5475 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
5476 code, model, false);
5477 if (integer_onep (flag))
5479 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
5480 NULL_RTX, true, OPTAB_DIRECT);
5481 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
5482 true, OPTAB_DIRECT);
5484 else
5485 result = expand_simple_binop (mode, AND, result, maskval, target, true,
5486 OPTAB_DIRECT);
5487 if (result != target)
5488 emit_move_insn (target, result);
5491 /* Expand an atomic clear operation.
5492 void _atomic_clear (BOOL *obj, enum memmodel)
5493 EXP is the call expression. */
5495 static rtx
5496 expand_builtin_atomic_clear (tree exp)
5498 machine_mode mode;
5499 rtx mem, ret;
5500 enum memmodel model;
5502 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5503 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5504 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5506 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
5508 source_location loc
5509 = expansion_point_location_if_in_system_header (input_location);
5510 warning_at (loc, OPT_Winvalid_memory_model,
5511 "invalid memory model for %<__atomic_store%>");
5512 model = MEMMODEL_SEQ_CST;
5515 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5516 Failing that, a store is issued by __atomic_store. The only way this can
5517 fail is if the bool type is larger than a word size. Unlikely, but
5518 handle it anyway for completeness. Assume a single threaded model since
5519 there is no atomic support in this case, and no barriers are required. */
5520 ret = expand_atomic_store (mem, const0_rtx, model, true);
5521 if (!ret)
5522 emit_move_insn (mem, const0_rtx);
5523 return const0_rtx;
5526 /* Expand an atomic test_and_set operation.
5527 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5528 EXP is the call expression. */
5530 static rtx
5531 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5533 rtx mem;
5534 enum memmodel model;
5535 machine_mode mode;
5537 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5538 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5539 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5541 return expand_atomic_test_and_set (target, mem, model);
5545 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5546 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5548 static tree
5549 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5551 int size;
5552 machine_mode mode;
5553 unsigned int mode_align, type_align;
5555 if (TREE_CODE (arg0) != INTEGER_CST)
5556 return NULL_TREE;
5558 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5559 mode = mode_for_size (size, MODE_INT, 0);
5560 mode_align = GET_MODE_ALIGNMENT (mode);
5562 if (TREE_CODE (arg1) == INTEGER_CST)
5564 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
5566 /* Either this argument is null, or it's a fake pointer encoding
5567 the alignment of the object. */
5568 val = val & -val;
5569 val *= BITS_PER_UNIT;
5571 if (val == 0 || mode_align < val)
5572 type_align = mode_align;
5573 else
5574 type_align = val;
5576 else
5578 tree ttype = TREE_TYPE (arg1);
5580 /* This function is usually invoked and folded immediately by the front
5581 end before anything else has a chance to look at it. The pointer
5582 parameter at this point is usually cast to a void *, so check for that
5583 and look past the cast. */
5584 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
5585 && VOID_TYPE_P (TREE_TYPE (ttype)))
5586 arg1 = TREE_OPERAND (arg1, 0);
5588 ttype = TREE_TYPE (arg1);
5589 gcc_assert (POINTER_TYPE_P (ttype));
5591 /* Get the underlying type of the object. */
5592 ttype = TREE_TYPE (ttype);
5593 type_align = TYPE_ALIGN (ttype);
5596 /* If the object has smaller alignment, the lock free routines cannot
5597 be used. */
5598 if (type_align < mode_align)
5599 return boolean_false_node;
5601 /* Check if a compare_and_swap pattern exists for the mode which represents
5602 the required size. The pattern is not allowed to fail, so the existence
5603 of the pattern indicates support is present. */
5604 if (can_compare_and_swap_p (mode, true))
5605 return boolean_true_node;
5606 else
5607 return boolean_false_node;
5610 /* Return true if the parameters to call EXP represent an object which will
5611 always generate lock free instructions. The first argument represents the
5612 size of the object, and the second parameter is a pointer to the object
5613 itself. If NULL is passed for the object, then the result is based on
5614 typical alignment for an object of the specified size. Otherwise return
5615 false. */
5617 static rtx
5618 expand_builtin_atomic_always_lock_free (tree exp)
5620 tree size;
5621 tree arg0 = CALL_EXPR_ARG (exp, 0);
5622 tree arg1 = CALL_EXPR_ARG (exp, 1);
5624 if (TREE_CODE (arg0) != INTEGER_CST)
5626 error ("non-constant argument 1 to __atomic_always_lock_free");
5627 return const0_rtx;
5630 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5631 if (size == boolean_true_node)
5632 return const1_rtx;
5633 return const0_rtx;
5636 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5637 is lock free on this architecture. */
5639 static tree
5640 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5642 if (!flag_inline_atomics)
5643 return NULL_TREE;
5645 /* If it isn't always lock free, don't generate a result. */
5646 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5647 return boolean_true_node;
5649 return NULL_TREE;
5652 /* Return true if the parameters to call EXP represent an object which will
5653 always generate lock free instructions. The first argument represents the
5654 size of the object, and the second parameter is a pointer to the object
5655 itself. If NULL is passed for the object, then the result is based on
5656 typical alignment for an object of the specified size. Otherwise return
5657 NULL*/
5659 static rtx
5660 expand_builtin_atomic_is_lock_free (tree exp)
5662 tree size;
5663 tree arg0 = CALL_EXPR_ARG (exp, 0);
5664 tree arg1 = CALL_EXPR_ARG (exp, 1);
5666 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5668 error ("non-integer argument 1 to __atomic_is_lock_free");
5669 return NULL_RTX;
5672 if (!flag_inline_atomics)
5673 return NULL_RTX;
5675 /* If the value is known at compile time, return the RTX for it. */
5676 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5677 if (size == boolean_true_node)
5678 return const1_rtx;
5680 return NULL_RTX;
5683 /* Expand the __atomic_thread_fence intrinsic:
5684 void __atomic_thread_fence (enum memmodel)
5685 EXP is the CALL_EXPR. */
5687 static void
5688 expand_builtin_atomic_thread_fence (tree exp)
5690 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5691 expand_mem_thread_fence (model);
5694 /* Expand the __atomic_signal_fence intrinsic:
5695 void __atomic_signal_fence (enum memmodel)
5696 EXP is the CALL_EXPR. */
5698 static void
5699 expand_builtin_atomic_signal_fence (tree exp)
5701 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5702 expand_mem_signal_fence (model);
5705 /* Expand the __sync_synchronize intrinsic. */
5707 static void
5708 expand_builtin_sync_synchronize (void)
5710 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
5713 static rtx
5714 expand_builtin_thread_pointer (tree exp, rtx target)
5716 enum insn_code icode;
5717 if (!validate_arglist (exp, VOID_TYPE))
5718 return const0_rtx;
5719 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5720 if (icode != CODE_FOR_nothing)
5722 struct expand_operand op;
5723 /* If the target is not sutitable then create a new target. */
5724 if (target == NULL_RTX
5725 || !REG_P (target)
5726 || GET_MODE (target) != Pmode)
5727 target = gen_reg_rtx (Pmode);
5728 create_output_operand (&op, target, Pmode);
5729 expand_insn (icode, 1, &op);
5730 return target;
5732 error ("__builtin_thread_pointer is not supported on this target");
5733 return const0_rtx;
5736 static void
5737 expand_builtin_set_thread_pointer (tree exp)
5739 enum insn_code icode;
5740 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5741 return;
5742 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5743 if (icode != CODE_FOR_nothing)
5745 struct expand_operand op;
5746 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5747 Pmode, EXPAND_NORMAL);
5748 create_input_operand (&op, val, Pmode);
5749 expand_insn (icode, 1, &op);
5750 return;
5752 error ("__builtin_set_thread_pointer is not supported on this target");
5756 /* Emit code to restore the current value of stack. */
5758 static void
5759 expand_stack_restore (tree var)
5761 rtx_insn *prev;
5762 rtx sa = expand_normal (var);
5764 sa = convert_memory_address (Pmode, sa);
5766 prev = get_last_insn ();
5767 emit_stack_restore (SAVE_BLOCK, sa);
5769 record_new_stack_level ();
5771 fixup_args_size_notes (prev, get_last_insn (), 0);
5774 /* Emit code to save the current value of stack. */
5776 static rtx
5777 expand_stack_save (void)
5779 rtx ret = NULL_RTX;
5781 emit_stack_save (SAVE_BLOCK, &ret);
5782 return ret;
5786 /* Expand an expression EXP that calls a built-in function,
5787 with result going to TARGET if that's convenient
5788 (and in mode MODE if that's convenient).
5789 SUBTARGET may be used as the target for computing one of EXP's operands.
5790 IGNORE is nonzero if the value is to be ignored. */
5793 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5794 int ignore)
5796 tree fndecl = get_callee_fndecl (exp);
5797 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5798 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5799 int flags;
5801 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5802 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5804 /* When ASan is enabled, we don't want to expand some memory/string
5805 builtins and rely on libsanitizer's hooks. This allows us to avoid
5806 redundant checks and be sure, that possible overflow will be detected
5807 by ASan. */
5809 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5810 return expand_call (exp, target, ignore);
5812 /* When not optimizing, generate calls to library functions for a certain
5813 set of builtins. */
5814 if (!optimize
5815 && !called_as_built_in (fndecl)
5816 && fcode != BUILT_IN_FORK
5817 && fcode != BUILT_IN_EXECL
5818 && fcode != BUILT_IN_EXECV
5819 && fcode != BUILT_IN_EXECLP
5820 && fcode != BUILT_IN_EXECLE
5821 && fcode != BUILT_IN_EXECVP
5822 && fcode != BUILT_IN_EXECVE
5823 && fcode != BUILT_IN_ALLOCA
5824 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5825 && fcode != BUILT_IN_FREE
5826 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5827 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5828 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5829 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5830 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5831 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5832 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5833 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5834 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5835 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5836 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5837 && fcode != BUILT_IN_CHKP_BNDRET)
5838 return expand_call (exp, target, ignore);
5840 /* The built-in function expanders test for target == const0_rtx
5841 to determine whether the function's result will be ignored. */
5842 if (ignore)
5843 target = const0_rtx;
5845 /* If the result of a pure or const built-in function is ignored, and
5846 none of its arguments are volatile, we can avoid expanding the
5847 built-in call and just evaluate the arguments for side-effects. */
5848 if (target == const0_rtx
5849 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5850 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5852 bool volatilep = false;
5853 tree arg;
5854 call_expr_arg_iterator iter;
5856 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5857 if (TREE_THIS_VOLATILE (arg))
5859 volatilep = true;
5860 break;
5863 if (! volatilep)
5865 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5866 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5867 return const0_rtx;
5871 /* expand_builtin_with_bounds is supposed to be used for
5872 instrumented builtin calls. */
5873 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
5875 switch (fcode)
5877 CASE_FLT_FN (BUILT_IN_FABS):
5878 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
5879 case BUILT_IN_FABSD32:
5880 case BUILT_IN_FABSD64:
5881 case BUILT_IN_FABSD128:
5882 target = expand_builtin_fabs (exp, target, subtarget);
5883 if (target)
5884 return target;
5885 break;
5887 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5888 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
5889 target = expand_builtin_copysign (exp, target, subtarget);
5890 if (target)
5891 return target;
5892 break;
5894 /* Just do a normal library call if we were unable to fold
5895 the values. */
5896 CASE_FLT_FN (BUILT_IN_CABS):
5897 break;
5899 CASE_FLT_FN (BUILT_IN_FMA):
5900 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5901 if (target)
5902 return target;
5903 break;
5905 CASE_FLT_FN (BUILT_IN_ILOGB):
5906 if (! flag_unsafe_math_optimizations)
5907 break;
5908 CASE_FLT_FN (BUILT_IN_ISINF):
5909 CASE_FLT_FN (BUILT_IN_FINITE):
5910 case BUILT_IN_ISFINITE:
5911 case BUILT_IN_ISNORMAL:
5912 target = expand_builtin_interclass_mathfn (exp, target);
5913 if (target)
5914 return target;
5915 break;
5917 CASE_FLT_FN (BUILT_IN_ICEIL):
5918 CASE_FLT_FN (BUILT_IN_LCEIL):
5919 CASE_FLT_FN (BUILT_IN_LLCEIL):
5920 CASE_FLT_FN (BUILT_IN_LFLOOR):
5921 CASE_FLT_FN (BUILT_IN_IFLOOR):
5922 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5923 target = expand_builtin_int_roundingfn (exp, target);
5924 if (target)
5925 return target;
5926 break;
5928 CASE_FLT_FN (BUILT_IN_IRINT):
5929 CASE_FLT_FN (BUILT_IN_LRINT):
5930 CASE_FLT_FN (BUILT_IN_LLRINT):
5931 CASE_FLT_FN (BUILT_IN_IROUND):
5932 CASE_FLT_FN (BUILT_IN_LROUND):
5933 CASE_FLT_FN (BUILT_IN_LLROUND):
5934 target = expand_builtin_int_roundingfn_2 (exp, target);
5935 if (target)
5936 return target;
5937 break;
5939 CASE_FLT_FN (BUILT_IN_POWI):
5940 target = expand_builtin_powi (exp, target);
5941 if (target)
5942 return target;
5943 break;
5945 CASE_FLT_FN (BUILT_IN_CEXPI):
5946 target = expand_builtin_cexpi (exp, target);
5947 gcc_assert (target);
5948 return target;
5950 CASE_FLT_FN (BUILT_IN_SIN):
5951 CASE_FLT_FN (BUILT_IN_COS):
5952 if (! flag_unsafe_math_optimizations)
5953 break;
5954 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5955 if (target)
5956 return target;
5957 break;
5959 CASE_FLT_FN (BUILT_IN_SINCOS):
5960 if (! flag_unsafe_math_optimizations)
5961 break;
5962 target = expand_builtin_sincos (exp);
5963 if (target)
5964 return target;
5965 break;
5967 case BUILT_IN_APPLY_ARGS:
5968 return expand_builtin_apply_args ();
5970 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5971 FUNCTION with a copy of the parameters described by
5972 ARGUMENTS, and ARGSIZE. It returns a block of memory
5973 allocated on the stack into which is stored all the registers
5974 that might possibly be used for returning the result of a
5975 function. ARGUMENTS is the value returned by
5976 __builtin_apply_args. ARGSIZE is the number of bytes of
5977 arguments that must be copied. ??? How should this value be
5978 computed? We'll also need a safe worst case value for varargs
5979 functions. */
5980 case BUILT_IN_APPLY:
5981 if (!validate_arglist (exp, POINTER_TYPE,
5982 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5983 && !validate_arglist (exp, REFERENCE_TYPE,
5984 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5985 return const0_rtx;
5986 else
5988 rtx ops[3];
5990 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5991 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5992 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5994 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5997 /* __builtin_return (RESULT) causes the function to return the
5998 value described by RESULT. RESULT is address of the block of
5999 memory returned by __builtin_apply. */
6000 case BUILT_IN_RETURN:
6001 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6002 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6003 return const0_rtx;
6005 case BUILT_IN_SAVEREGS:
6006 return expand_builtin_saveregs ();
6008 case BUILT_IN_VA_ARG_PACK:
6009 /* All valid uses of __builtin_va_arg_pack () are removed during
6010 inlining. */
6011 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6012 return const0_rtx;
6014 case BUILT_IN_VA_ARG_PACK_LEN:
6015 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6016 inlining. */
6017 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6018 return const0_rtx;
6020 /* Return the address of the first anonymous stack arg. */
6021 case BUILT_IN_NEXT_ARG:
6022 if (fold_builtin_next_arg (exp, false))
6023 return const0_rtx;
6024 return expand_builtin_next_arg ();
6026 case BUILT_IN_CLEAR_CACHE:
6027 target = expand_builtin___clear_cache (exp);
6028 if (target)
6029 return target;
6030 break;
6032 case BUILT_IN_CLASSIFY_TYPE:
6033 return expand_builtin_classify_type (exp);
6035 case BUILT_IN_CONSTANT_P:
6036 return const0_rtx;
6038 case BUILT_IN_FRAME_ADDRESS:
6039 case BUILT_IN_RETURN_ADDRESS:
6040 return expand_builtin_frame_address (fndecl, exp);
6042 /* Returns the address of the area where the structure is returned.
6043 0 otherwise. */
6044 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6045 if (call_expr_nargs (exp) != 0
6046 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6047 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6048 return const0_rtx;
6049 else
6050 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6052 case BUILT_IN_ALLOCA:
6053 case BUILT_IN_ALLOCA_WITH_ALIGN:
6054 /* If the allocation stems from the declaration of a variable-sized
6055 object, it cannot accumulate. */
6056 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6057 if (target)
6058 return target;
6059 break;
6061 case BUILT_IN_STACK_SAVE:
6062 return expand_stack_save ();
6064 case BUILT_IN_STACK_RESTORE:
6065 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6066 return const0_rtx;
6068 case BUILT_IN_BSWAP16:
6069 case BUILT_IN_BSWAP32:
6070 case BUILT_IN_BSWAP64:
6071 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6072 if (target)
6073 return target;
6074 break;
6076 CASE_INT_FN (BUILT_IN_FFS):
6077 target = expand_builtin_unop (target_mode, exp, target,
6078 subtarget, ffs_optab);
6079 if (target)
6080 return target;
6081 break;
6083 CASE_INT_FN (BUILT_IN_CLZ):
6084 target = expand_builtin_unop (target_mode, exp, target,
6085 subtarget, clz_optab);
6086 if (target)
6087 return target;
6088 break;
6090 CASE_INT_FN (BUILT_IN_CTZ):
6091 target = expand_builtin_unop (target_mode, exp, target,
6092 subtarget, ctz_optab);
6093 if (target)
6094 return target;
6095 break;
6097 CASE_INT_FN (BUILT_IN_CLRSB):
6098 target = expand_builtin_unop (target_mode, exp, target,
6099 subtarget, clrsb_optab);
6100 if (target)
6101 return target;
6102 break;
6104 CASE_INT_FN (BUILT_IN_POPCOUNT):
6105 target = expand_builtin_unop (target_mode, exp, target,
6106 subtarget, popcount_optab);
6107 if (target)
6108 return target;
6109 break;
6111 CASE_INT_FN (BUILT_IN_PARITY):
6112 target = expand_builtin_unop (target_mode, exp, target,
6113 subtarget, parity_optab);
6114 if (target)
6115 return target;
6116 break;
6118 case BUILT_IN_STRLEN:
6119 target = expand_builtin_strlen (exp, target, target_mode);
6120 if (target)
6121 return target;
6122 break;
6124 case BUILT_IN_STRCPY:
6125 target = expand_builtin_strcpy (exp, target);
6126 if (target)
6127 return target;
6128 break;
6130 case BUILT_IN_STRNCPY:
6131 target = expand_builtin_strncpy (exp, target);
6132 if (target)
6133 return target;
6134 break;
6136 case BUILT_IN_STPCPY:
6137 target = expand_builtin_stpcpy (exp, target, mode);
6138 if (target)
6139 return target;
6140 break;
6142 case BUILT_IN_MEMCPY:
6143 target = expand_builtin_memcpy (exp, target);
6144 if (target)
6145 return target;
6146 break;
6148 case BUILT_IN_MEMPCPY:
6149 target = expand_builtin_mempcpy (exp, target, mode);
6150 if (target)
6151 return target;
6152 break;
6154 case BUILT_IN_MEMSET:
6155 target = expand_builtin_memset (exp, target, mode);
6156 if (target)
6157 return target;
6158 break;
6160 case BUILT_IN_BZERO:
6161 target = expand_builtin_bzero (exp);
6162 if (target)
6163 return target;
6164 break;
6166 case BUILT_IN_STRCMP:
6167 target = expand_builtin_strcmp (exp, target);
6168 if (target)
6169 return target;
6170 break;
6172 case BUILT_IN_STRNCMP:
6173 target = expand_builtin_strncmp (exp, target, mode);
6174 if (target)
6175 return target;
6176 break;
6178 case BUILT_IN_BCMP:
6179 case BUILT_IN_MEMCMP:
6180 case BUILT_IN_MEMCMP_EQ:
6181 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
6182 if (target)
6183 return target;
6184 if (fcode == BUILT_IN_MEMCMP_EQ)
6186 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
6187 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
6189 break;
6191 case BUILT_IN_SETJMP:
6192 /* This should have been lowered to the builtins below. */
6193 gcc_unreachable ();
6195 case BUILT_IN_SETJMP_SETUP:
6196 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6197 and the receiver label. */
6198 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6200 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6201 VOIDmode, EXPAND_NORMAL);
6202 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6203 rtx_insn *label_r = label_rtx (label);
6205 /* This is copied from the handling of non-local gotos. */
6206 expand_builtin_setjmp_setup (buf_addr, label_r);
6207 nonlocal_goto_handler_labels
6208 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6209 nonlocal_goto_handler_labels);
6210 /* ??? Do not let expand_label treat us as such since we would
6211 not want to be both on the list of non-local labels and on
6212 the list of forced labels. */
6213 FORCED_LABEL (label) = 0;
6214 return const0_rtx;
6216 break;
6218 case BUILT_IN_SETJMP_RECEIVER:
6219 /* __builtin_setjmp_receiver is passed the receiver label. */
6220 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6222 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6223 rtx_insn *label_r = label_rtx (label);
6225 expand_builtin_setjmp_receiver (label_r);
6226 return const0_rtx;
6228 break;
6230 /* __builtin_longjmp is passed a pointer to an array of five words.
6231 It's similar to the C library longjmp function but works with
6232 __builtin_setjmp above. */
6233 case BUILT_IN_LONGJMP:
6234 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6236 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6237 VOIDmode, EXPAND_NORMAL);
6238 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6240 if (value != const1_rtx)
6242 error ("%<__builtin_longjmp%> second argument must be 1");
6243 return const0_rtx;
6246 expand_builtin_longjmp (buf_addr, value);
6247 return const0_rtx;
6249 break;
6251 case BUILT_IN_NONLOCAL_GOTO:
6252 target = expand_builtin_nonlocal_goto (exp);
6253 if (target)
6254 return target;
6255 break;
6257 /* This updates the setjmp buffer that is its argument with the value
6258 of the current stack pointer. */
6259 case BUILT_IN_UPDATE_SETJMP_BUF:
6260 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6262 rtx buf_addr
6263 = expand_normal (CALL_EXPR_ARG (exp, 0));
6265 expand_builtin_update_setjmp_buf (buf_addr);
6266 return const0_rtx;
6268 break;
6270 case BUILT_IN_TRAP:
6271 expand_builtin_trap ();
6272 return const0_rtx;
6274 case BUILT_IN_UNREACHABLE:
6275 expand_builtin_unreachable ();
6276 return const0_rtx;
6278 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6279 case BUILT_IN_SIGNBITD32:
6280 case BUILT_IN_SIGNBITD64:
6281 case BUILT_IN_SIGNBITD128:
6282 target = expand_builtin_signbit (exp, target);
6283 if (target)
6284 return target;
6285 break;
6287 /* Various hooks for the DWARF 2 __throw routine. */
6288 case BUILT_IN_UNWIND_INIT:
6289 expand_builtin_unwind_init ();
6290 return const0_rtx;
6291 case BUILT_IN_DWARF_CFA:
6292 return virtual_cfa_rtx;
6293 #ifdef DWARF2_UNWIND_INFO
6294 case BUILT_IN_DWARF_SP_COLUMN:
6295 return expand_builtin_dwarf_sp_column ();
6296 case BUILT_IN_INIT_DWARF_REG_SIZES:
6297 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6298 return const0_rtx;
6299 #endif
6300 case BUILT_IN_FROB_RETURN_ADDR:
6301 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6302 case BUILT_IN_EXTRACT_RETURN_ADDR:
6303 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6304 case BUILT_IN_EH_RETURN:
6305 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6306 CALL_EXPR_ARG (exp, 1));
6307 return const0_rtx;
6308 case BUILT_IN_EH_RETURN_DATA_REGNO:
6309 return expand_builtin_eh_return_data_regno (exp);
6310 case BUILT_IN_EXTEND_POINTER:
6311 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6312 case BUILT_IN_EH_POINTER:
6313 return expand_builtin_eh_pointer (exp);
6314 case BUILT_IN_EH_FILTER:
6315 return expand_builtin_eh_filter (exp);
6316 case BUILT_IN_EH_COPY_VALUES:
6317 return expand_builtin_eh_copy_values (exp);
6319 case BUILT_IN_VA_START:
6320 return expand_builtin_va_start (exp);
6321 case BUILT_IN_VA_END:
6322 return expand_builtin_va_end (exp);
6323 case BUILT_IN_VA_COPY:
6324 return expand_builtin_va_copy (exp);
6325 case BUILT_IN_EXPECT:
6326 return expand_builtin_expect (exp, target);
6327 case BUILT_IN_ASSUME_ALIGNED:
6328 return expand_builtin_assume_aligned (exp, target);
6329 case BUILT_IN_PREFETCH:
6330 expand_builtin_prefetch (exp);
6331 return const0_rtx;
6333 case BUILT_IN_INIT_TRAMPOLINE:
6334 return expand_builtin_init_trampoline (exp, true);
6335 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6336 return expand_builtin_init_trampoline (exp, false);
6337 case BUILT_IN_ADJUST_TRAMPOLINE:
6338 return expand_builtin_adjust_trampoline (exp);
6340 case BUILT_IN_FORK:
6341 case BUILT_IN_EXECL:
6342 case BUILT_IN_EXECV:
6343 case BUILT_IN_EXECLP:
6344 case BUILT_IN_EXECLE:
6345 case BUILT_IN_EXECVP:
6346 case BUILT_IN_EXECVE:
6347 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6348 if (target)
6349 return target;
6350 break;
6352 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6353 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6354 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6355 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6356 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6357 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6358 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6359 if (target)
6360 return target;
6361 break;
6363 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6364 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6365 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6366 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6367 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6368 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6369 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6370 if (target)
6371 return target;
6372 break;
6374 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6375 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6376 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6377 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6378 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6379 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6380 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6381 if (target)
6382 return target;
6383 break;
6385 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6386 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6387 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6388 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6389 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6390 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6391 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6392 if (target)
6393 return target;
6394 break;
6396 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6397 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6398 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6399 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6400 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6401 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6402 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6403 if (target)
6404 return target;
6405 break;
6407 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6408 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6409 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6410 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6411 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6412 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6413 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6414 if (target)
6415 return target;
6416 break;
6418 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6419 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6420 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6421 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6422 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6423 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6424 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6425 if (target)
6426 return target;
6427 break;
6429 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6430 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6431 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6432 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6433 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6434 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6435 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6436 if (target)
6437 return target;
6438 break;
6440 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6441 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6442 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6443 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6444 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6445 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6446 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6447 if (target)
6448 return target;
6449 break;
6451 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6452 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6453 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6454 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6455 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6456 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6457 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6458 if (target)
6459 return target;
6460 break;
6462 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6463 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6464 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6465 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6466 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6467 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6468 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6469 if (target)
6470 return target;
6471 break;
6473 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6474 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6475 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6476 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6477 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6478 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6479 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6480 if (target)
6481 return target;
6482 break;
6484 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6485 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6486 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6487 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6488 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6489 if (mode == VOIDmode)
6490 mode = TYPE_MODE (boolean_type_node);
6491 if (!target || !register_operand (target, mode))
6492 target = gen_reg_rtx (mode);
6494 mode = get_builtin_sync_mode
6495 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6496 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6497 if (target)
6498 return target;
6499 break;
6501 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6502 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6503 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6504 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6505 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6506 mode = get_builtin_sync_mode
6507 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6508 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6509 if (target)
6510 return target;
6511 break;
6513 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6514 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6515 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6516 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6517 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6518 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6519 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6520 if (target)
6521 return target;
6522 break;
6524 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6525 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6526 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6527 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6528 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6529 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6530 expand_builtin_sync_lock_release (mode, exp);
6531 return const0_rtx;
6533 case BUILT_IN_SYNC_SYNCHRONIZE:
6534 expand_builtin_sync_synchronize ();
6535 return const0_rtx;
6537 case BUILT_IN_ATOMIC_EXCHANGE_1:
6538 case BUILT_IN_ATOMIC_EXCHANGE_2:
6539 case BUILT_IN_ATOMIC_EXCHANGE_4:
6540 case BUILT_IN_ATOMIC_EXCHANGE_8:
6541 case BUILT_IN_ATOMIC_EXCHANGE_16:
6542 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6543 target = expand_builtin_atomic_exchange (mode, exp, target);
6544 if (target)
6545 return target;
6546 break;
6548 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6549 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6550 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6551 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6552 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6554 unsigned int nargs, z;
6555 vec<tree, va_gc> *vec;
6557 mode =
6558 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6559 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6560 if (target)
6561 return target;
6563 /* If this is turned into an external library call, the weak parameter
6564 must be dropped to match the expected parameter list. */
6565 nargs = call_expr_nargs (exp);
6566 vec_alloc (vec, nargs - 1);
6567 for (z = 0; z < 3; z++)
6568 vec->quick_push (CALL_EXPR_ARG (exp, z));
6569 /* Skip the boolean weak parameter. */
6570 for (z = 4; z < 6; z++)
6571 vec->quick_push (CALL_EXPR_ARG (exp, z));
6572 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6573 break;
6576 case BUILT_IN_ATOMIC_LOAD_1:
6577 case BUILT_IN_ATOMIC_LOAD_2:
6578 case BUILT_IN_ATOMIC_LOAD_4:
6579 case BUILT_IN_ATOMIC_LOAD_8:
6580 case BUILT_IN_ATOMIC_LOAD_16:
6581 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6582 target = expand_builtin_atomic_load (mode, exp, target);
6583 if (target)
6584 return target;
6585 break;
6587 case BUILT_IN_ATOMIC_STORE_1:
6588 case BUILT_IN_ATOMIC_STORE_2:
6589 case BUILT_IN_ATOMIC_STORE_4:
6590 case BUILT_IN_ATOMIC_STORE_8:
6591 case BUILT_IN_ATOMIC_STORE_16:
6592 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6593 target = expand_builtin_atomic_store (mode, exp);
6594 if (target)
6595 return const0_rtx;
6596 break;
6598 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6599 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6600 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6601 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6602 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6604 enum built_in_function lib;
6605 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6606 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6607 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6608 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6609 ignore, lib);
6610 if (target)
6611 return target;
6612 break;
6614 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6615 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6616 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6617 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6618 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6620 enum built_in_function lib;
6621 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6622 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6623 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6624 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6625 ignore, lib);
6626 if (target)
6627 return target;
6628 break;
6630 case BUILT_IN_ATOMIC_AND_FETCH_1:
6631 case BUILT_IN_ATOMIC_AND_FETCH_2:
6632 case BUILT_IN_ATOMIC_AND_FETCH_4:
6633 case BUILT_IN_ATOMIC_AND_FETCH_8:
6634 case BUILT_IN_ATOMIC_AND_FETCH_16:
6636 enum built_in_function lib;
6637 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6638 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6639 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6640 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6641 ignore, lib);
6642 if (target)
6643 return target;
6644 break;
6646 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6647 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6648 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6649 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6650 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6652 enum built_in_function lib;
6653 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6654 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6655 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6656 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6657 ignore, lib);
6658 if (target)
6659 return target;
6660 break;
6662 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6663 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6664 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6665 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6666 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6668 enum built_in_function lib;
6669 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6670 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6671 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6672 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6673 ignore, lib);
6674 if (target)
6675 return target;
6676 break;
6678 case BUILT_IN_ATOMIC_OR_FETCH_1:
6679 case BUILT_IN_ATOMIC_OR_FETCH_2:
6680 case BUILT_IN_ATOMIC_OR_FETCH_4:
6681 case BUILT_IN_ATOMIC_OR_FETCH_8:
6682 case BUILT_IN_ATOMIC_OR_FETCH_16:
6684 enum built_in_function lib;
6685 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6686 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6687 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6688 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6689 ignore, lib);
6690 if (target)
6691 return target;
6692 break;
6694 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6695 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6696 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6697 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6698 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6699 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6700 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6701 ignore, BUILT_IN_NONE);
6702 if (target)
6703 return target;
6704 break;
6706 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6707 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6708 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6709 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6710 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6711 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6712 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6713 ignore, BUILT_IN_NONE);
6714 if (target)
6715 return target;
6716 break;
6718 case BUILT_IN_ATOMIC_FETCH_AND_1:
6719 case BUILT_IN_ATOMIC_FETCH_AND_2:
6720 case BUILT_IN_ATOMIC_FETCH_AND_4:
6721 case BUILT_IN_ATOMIC_FETCH_AND_8:
6722 case BUILT_IN_ATOMIC_FETCH_AND_16:
6723 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6724 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6725 ignore, BUILT_IN_NONE);
6726 if (target)
6727 return target;
6728 break;
6730 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6731 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6732 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6733 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6734 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6735 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6736 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6737 ignore, BUILT_IN_NONE);
6738 if (target)
6739 return target;
6740 break;
6742 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6743 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6744 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6745 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6746 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6747 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6748 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6749 ignore, BUILT_IN_NONE);
6750 if (target)
6751 return target;
6752 break;
6754 case BUILT_IN_ATOMIC_FETCH_OR_1:
6755 case BUILT_IN_ATOMIC_FETCH_OR_2:
6756 case BUILT_IN_ATOMIC_FETCH_OR_4:
6757 case BUILT_IN_ATOMIC_FETCH_OR_8:
6758 case BUILT_IN_ATOMIC_FETCH_OR_16:
6759 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6760 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6761 ignore, BUILT_IN_NONE);
6762 if (target)
6763 return target;
6764 break;
6766 case BUILT_IN_ATOMIC_TEST_AND_SET:
6767 return expand_builtin_atomic_test_and_set (exp, target);
6769 case BUILT_IN_ATOMIC_CLEAR:
6770 return expand_builtin_atomic_clear (exp);
6772 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6773 return expand_builtin_atomic_always_lock_free (exp);
6775 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6776 target = expand_builtin_atomic_is_lock_free (exp);
6777 if (target)
6778 return target;
6779 break;
6781 case BUILT_IN_ATOMIC_THREAD_FENCE:
6782 expand_builtin_atomic_thread_fence (exp);
6783 return const0_rtx;
6785 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6786 expand_builtin_atomic_signal_fence (exp);
6787 return const0_rtx;
6789 case BUILT_IN_OBJECT_SIZE:
6790 return expand_builtin_object_size (exp);
6792 case BUILT_IN_MEMCPY_CHK:
6793 case BUILT_IN_MEMPCPY_CHK:
6794 case BUILT_IN_MEMMOVE_CHK:
6795 case BUILT_IN_MEMSET_CHK:
6796 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6797 if (target)
6798 return target;
6799 break;
6801 case BUILT_IN_STRCPY_CHK:
6802 case BUILT_IN_STPCPY_CHK:
6803 case BUILT_IN_STRNCPY_CHK:
6804 case BUILT_IN_STPNCPY_CHK:
6805 case BUILT_IN_STRCAT_CHK:
6806 case BUILT_IN_STRNCAT_CHK:
6807 case BUILT_IN_SNPRINTF_CHK:
6808 case BUILT_IN_VSNPRINTF_CHK:
6809 maybe_emit_chk_warning (exp, fcode);
6810 break;
6812 case BUILT_IN_SPRINTF_CHK:
6813 case BUILT_IN_VSPRINTF_CHK:
6814 maybe_emit_sprintf_chk_warning (exp, fcode);
6815 break;
6817 case BUILT_IN_FREE:
6818 if (warn_free_nonheap_object)
6819 maybe_emit_free_warning (exp);
6820 break;
6822 case BUILT_IN_THREAD_POINTER:
6823 return expand_builtin_thread_pointer (exp, target);
6825 case BUILT_IN_SET_THREAD_POINTER:
6826 expand_builtin_set_thread_pointer (exp);
6827 return const0_rtx;
6829 case BUILT_IN_CILK_DETACH:
6830 expand_builtin_cilk_detach (exp);
6831 return const0_rtx;
6833 case BUILT_IN_CILK_POP_FRAME:
6834 expand_builtin_cilk_pop_frame (exp);
6835 return const0_rtx;
6837 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6838 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6839 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6840 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6841 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6842 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6843 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6844 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6845 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6846 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6847 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6848 /* We allow user CHKP builtins if Pointer Bounds
6849 Checker is off. */
6850 if (!chkp_function_instrumented_p (current_function_decl))
6852 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
6853 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6854 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
6855 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
6856 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
6857 return expand_normal (CALL_EXPR_ARG (exp, 0));
6858 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
6859 return expand_normal (size_zero_node);
6860 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
6861 return expand_normal (size_int (-1));
6862 else
6863 return const0_rtx;
6865 /* FALLTHROUGH */
6867 case BUILT_IN_CHKP_BNDMK:
6868 case BUILT_IN_CHKP_BNDSTX:
6869 case BUILT_IN_CHKP_BNDCL:
6870 case BUILT_IN_CHKP_BNDCU:
6871 case BUILT_IN_CHKP_BNDLDX:
6872 case BUILT_IN_CHKP_BNDRET:
6873 case BUILT_IN_CHKP_INTERSECT:
6874 case BUILT_IN_CHKP_NARROW:
6875 case BUILT_IN_CHKP_EXTRACT_LOWER:
6876 case BUILT_IN_CHKP_EXTRACT_UPPER:
6877 /* Software implementation of Pointer Bounds Checker is NYI.
6878 Target support is required. */
6879 error ("Your target platform does not support -fcheck-pointer-bounds");
6880 break;
6882 case BUILT_IN_ACC_ON_DEVICE:
6883 /* Do library call, if we failed to expand the builtin when
6884 folding. */
6885 break;
6887 default: /* just do library call, if unknown builtin */
6888 break;
6891 /* The switch statement above can drop through to cause the function
6892 to be called normally. */
6893 return expand_call (exp, target, ignore);
6896 /* Similar to expand_builtin but is used for instrumented calls. */
6899 expand_builtin_with_bounds (tree exp, rtx target,
6900 rtx subtarget ATTRIBUTE_UNUSED,
6901 machine_mode mode, int ignore)
6903 tree fndecl = get_callee_fndecl (exp);
6904 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6906 gcc_assert (CALL_WITH_BOUNDS_P (exp));
6908 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6909 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6911 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
6912 && fcode < END_CHKP_BUILTINS);
6914 switch (fcode)
6916 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
6917 target = expand_builtin_memcpy_with_bounds (exp, target);
6918 if (target)
6919 return target;
6920 break;
6922 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
6923 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
6924 if (target)
6925 return target;
6926 break;
6928 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
6929 target = expand_builtin_memset_with_bounds (exp, target, mode);
6930 if (target)
6931 return target;
6932 break;
6934 default:
6935 break;
6938 /* The switch statement above can drop through to cause the function
6939 to be called normally. */
6940 return expand_call (exp, target, ignore);
6943 /* Determine whether a tree node represents a call to a built-in
6944 function. If the tree T is a call to a built-in function with
6945 the right number of arguments of the appropriate types, return
6946 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6947 Otherwise the return value is END_BUILTINS. */
6949 enum built_in_function
6950 builtin_mathfn_code (const_tree t)
6952 const_tree fndecl, arg, parmlist;
6953 const_tree argtype, parmtype;
6954 const_call_expr_arg_iterator iter;
6956 if (TREE_CODE (t) != CALL_EXPR
6957 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6958 return END_BUILTINS;
6960 fndecl = get_callee_fndecl (t);
6961 if (fndecl == NULL_TREE
6962 || TREE_CODE (fndecl) != FUNCTION_DECL
6963 || ! DECL_BUILT_IN (fndecl)
6964 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6965 return END_BUILTINS;
6967 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6968 init_const_call_expr_arg_iterator (t, &iter);
6969 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6971 /* If a function doesn't take a variable number of arguments,
6972 the last element in the list will have type `void'. */
6973 parmtype = TREE_VALUE (parmlist);
6974 if (VOID_TYPE_P (parmtype))
6976 if (more_const_call_expr_args_p (&iter))
6977 return END_BUILTINS;
6978 return DECL_FUNCTION_CODE (fndecl);
6981 if (! more_const_call_expr_args_p (&iter))
6982 return END_BUILTINS;
6984 arg = next_const_call_expr_arg (&iter);
6985 argtype = TREE_TYPE (arg);
6987 if (SCALAR_FLOAT_TYPE_P (parmtype))
6989 if (! SCALAR_FLOAT_TYPE_P (argtype))
6990 return END_BUILTINS;
6992 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6994 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6995 return END_BUILTINS;
6997 else if (POINTER_TYPE_P (parmtype))
6999 if (! POINTER_TYPE_P (argtype))
7000 return END_BUILTINS;
7002 else if (INTEGRAL_TYPE_P (parmtype))
7004 if (! INTEGRAL_TYPE_P (argtype))
7005 return END_BUILTINS;
7007 else
7008 return END_BUILTINS;
7011 /* Variable-length argument list. */
7012 return DECL_FUNCTION_CODE (fndecl);
7015 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7016 evaluate to a constant. */
7018 static tree
7019 fold_builtin_constant_p (tree arg)
7021 /* We return 1 for a numeric type that's known to be a constant
7022 value at compile-time or for an aggregate type that's a
7023 literal constant. */
7024 STRIP_NOPS (arg);
7026 /* If we know this is a constant, emit the constant of one. */
7027 if (CONSTANT_CLASS_P (arg)
7028 || (TREE_CODE (arg) == CONSTRUCTOR
7029 && TREE_CONSTANT (arg)))
7030 return integer_one_node;
7031 if (TREE_CODE (arg) == ADDR_EXPR)
7033 tree op = TREE_OPERAND (arg, 0);
7034 if (TREE_CODE (op) == STRING_CST
7035 || (TREE_CODE (op) == ARRAY_REF
7036 && integer_zerop (TREE_OPERAND (op, 1))
7037 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7038 return integer_one_node;
7041 /* If this expression has side effects, show we don't know it to be a
7042 constant. Likewise if it's a pointer or aggregate type since in
7043 those case we only want literals, since those are only optimized
7044 when generating RTL, not later.
7045 And finally, if we are compiling an initializer, not code, we
7046 need to return a definite result now; there's not going to be any
7047 more optimization done. */
7048 if (TREE_SIDE_EFFECTS (arg)
7049 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7050 || POINTER_TYPE_P (TREE_TYPE (arg))
7051 || cfun == 0
7052 || folding_initializer
7053 || force_folding_builtin_constant_p)
7054 return integer_zero_node;
7056 return NULL_TREE;
7059 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7060 return it as a truthvalue. */
7062 static tree
7063 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7064 tree predictor)
7066 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7068 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7069 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7070 ret_type = TREE_TYPE (TREE_TYPE (fn));
7071 pred_type = TREE_VALUE (arg_types);
7072 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7074 pred = fold_convert_loc (loc, pred_type, pred);
7075 expected = fold_convert_loc (loc, expected_type, expected);
7076 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7077 predictor);
7079 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7080 build_int_cst (ret_type, 0));
7083 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7084 NULL_TREE if no simplification is possible. */
7086 tree
7087 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7089 tree inner, fndecl, inner_arg0;
7090 enum tree_code code;
7092 /* Distribute the expected value over short-circuiting operators.
7093 See through the cast from truthvalue_type_node to long. */
7094 inner_arg0 = arg0;
7095 while (CONVERT_EXPR_P (inner_arg0)
7096 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7097 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7098 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7100 /* If this is a builtin_expect within a builtin_expect keep the
7101 inner one. See through a comparison against a constant. It
7102 might have been added to create a thruthvalue. */
7103 inner = inner_arg0;
7105 if (COMPARISON_CLASS_P (inner)
7106 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7107 inner = TREE_OPERAND (inner, 0);
7109 if (TREE_CODE (inner) == CALL_EXPR
7110 && (fndecl = get_callee_fndecl (inner))
7111 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7112 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7113 return arg0;
7115 inner = inner_arg0;
7116 code = TREE_CODE (inner);
7117 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7119 tree op0 = TREE_OPERAND (inner, 0);
7120 tree op1 = TREE_OPERAND (inner, 1);
7122 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7123 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7124 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7126 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7129 /* If the argument isn't invariant then there's nothing else we can do. */
7130 if (!TREE_CONSTANT (inner_arg0))
7131 return NULL_TREE;
7133 /* If we expect that a comparison against the argument will fold to
7134 a constant return the constant. In practice, this means a true
7135 constant or the address of a non-weak symbol. */
7136 inner = inner_arg0;
7137 STRIP_NOPS (inner);
7138 if (TREE_CODE (inner) == ADDR_EXPR)
7142 inner = TREE_OPERAND (inner, 0);
7144 while (TREE_CODE (inner) == COMPONENT_REF
7145 || TREE_CODE (inner) == ARRAY_REF);
7146 if ((TREE_CODE (inner) == VAR_DECL
7147 || TREE_CODE (inner) == FUNCTION_DECL)
7148 && DECL_WEAK (inner))
7149 return NULL_TREE;
7152 /* Otherwise, ARG0 already has the proper type for the return value. */
7153 return arg0;
7156 /* Fold a call to __builtin_classify_type with argument ARG. */
7158 static tree
7159 fold_builtin_classify_type (tree arg)
7161 if (arg == 0)
7162 return build_int_cst (integer_type_node, no_type_class);
7164 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7167 /* Fold a call to __builtin_strlen with argument ARG. */
7169 static tree
7170 fold_builtin_strlen (location_t loc, tree type, tree arg)
7172 if (!validate_arg (arg, POINTER_TYPE))
7173 return NULL_TREE;
7174 else
7176 tree len = c_strlen (arg, 0);
7178 if (len)
7179 return fold_convert_loc (loc, type, len);
7181 return NULL_TREE;
7185 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7187 static tree
7188 fold_builtin_inf (location_t loc, tree type, int warn)
7190 REAL_VALUE_TYPE real;
7192 /* __builtin_inff is intended to be usable to define INFINITY on all
7193 targets. If an infinity is not available, INFINITY expands "to a
7194 positive constant of type float that overflows at translation
7195 time", footnote "In this case, using INFINITY will violate the
7196 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7197 Thus we pedwarn to ensure this constraint violation is
7198 diagnosed. */
7199 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7200 pedwarn (loc, 0, "target format does not support infinity");
7202 real_inf (&real);
7203 return build_real (type, real);
7206 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7207 NULL_TREE if no simplification can be made. */
7209 static tree
7210 fold_builtin_sincos (location_t loc,
7211 tree arg0, tree arg1, tree arg2)
7213 tree type;
7214 tree fndecl, call = NULL_TREE;
7216 if (!validate_arg (arg0, REAL_TYPE)
7217 || !validate_arg (arg1, POINTER_TYPE)
7218 || !validate_arg (arg2, POINTER_TYPE))
7219 return NULL_TREE;
7221 type = TREE_TYPE (arg0);
7223 /* Calculate the result when the argument is a constant. */
7224 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
7225 if (fn == END_BUILTINS)
7226 return NULL_TREE;
7228 /* Canonicalize sincos to cexpi. */
7229 if (TREE_CODE (arg0) == REAL_CST)
7231 tree complex_type = build_complex_type (type);
7232 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
7234 if (!call)
7236 if (!targetm.libc_has_function (function_c99_math_complex)
7237 || !builtin_decl_implicit_p (fn))
7238 return NULL_TREE;
7239 fndecl = builtin_decl_explicit (fn);
7240 call = build_call_expr_loc (loc, fndecl, 1, arg0);
7241 call = builtin_save_expr (call);
7244 return build2 (COMPOUND_EXPR, void_type_node,
7245 build2 (MODIFY_EXPR, void_type_node,
7246 build_fold_indirect_ref_loc (loc, arg1),
7247 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
7248 build2 (MODIFY_EXPR, void_type_node,
7249 build_fold_indirect_ref_loc (loc, arg2),
7250 fold_build1_loc (loc, REALPART_EXPR, type, call)));
7253 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
7254 arguments to the call, and TYPE is its return type.
7255 Return NULL_TREE if no simplification can be made. */
7257 static tree
7258 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
7260 if (!validate_arg (arg1, POINTER_TYPE)
7261 || !validate_arg (arg2, INTEGER_TYPE)
7262 || !validate_arg (len, INTEGER_TYPE))
7263 return NULL_TREE;
7264 else
7266 const char *p1;
7268 if (TREE_CODE (arg2) != INTEGER_CST
7269 || !tree_fits_uhwi_p (len))
7270 return NULL_TREE;
7272 p1 = c_getstr (arg1);
7273 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
7275 char c;
7276 const char *r;
7277 tree tem;
7279 if (target_char_cast (arg2, &c))
7280 return NULL_TREE;
7282 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
7284 if (r == NULL)
7285 return build_int_cst (TREE_TYPE (arg1), 0);
7287 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
7288 return fold_convert_loc (loc, type, tem);
7290 return NULL_TREE;
7294 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7295 Return NULL_TREE if no simplification can be made. */
7297 static tree
7298 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
7300 if (!validate_arg (arg1, POINTER_TYPE)
7301 || !validate_arg (arg2, POINTER_TYPE)
7302 || !validate_arg (len, INTEGER_TYPE))
7303 return NULL_TREE;
7305 /* If the LEN parameter is zero, return zero. */
7306 if (integer_zerop (len))
7307 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7308 arg1, arg2);
7310 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7311 if (operand_equal_p (arg1, arg2, 0))
7312 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7314 /* If len parameter is one, return an expression corresponding to
7315 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7316 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7318 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7319 tree cst_uchar_ptr_node
7320 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7322 tree ind1
7323 = fold_convert_loc (loc, integer_type_node,
7324 build1 (INDIRECT_REF, cst_uchar_node,
7325 fold_convert_loc (loc,
7326 cst_uchar_ptr_node,
7327 arg1)));
7328 tree ind2
7329 = fold_convert_loc (loc, integer_type_node,
7330 build1 (INDIRECT_REF, cst_uchar_node,
7331 fold_convert_loc (loc,
7332 cst_uchar_ptr_node,
7333 arg2)));
7334 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7337 return NULL_TREE;
7340 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
7341 Return NULL_TREE if no simplification can be made. */
7343 static tree
7344 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
7346 if (!validate_arg (arg1, POINTER_TYPE)
7347 || !validate_arg (arg2, POINTER_TYPE))
7348 return NULL_TREE;
7350 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7351 if (operand_equal_p (arg1, arg2, 0))
7352 return integer_zero_node;
7354 /* If the second arg is "", return *(const unsigned char*)arg1. */
7355 const char *p2 = c_getstr (arg2);
7356 if (p2 && *p2 == '\0')
7358 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7359 tree cst_uchar_ptr_node
7360 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7362 return fold_convert_loc (loc, integer_type_node,
7363 build1 (INDIRECT_REF, cst_uchar_node,
7364 fold_convert_loc (loc,
7365 cst_uchar_ptr_node,
7366 arg1)));
7369 /* If the first arg is "", return -*(const unsigned char*)arg2. */
7370 const char *p1 = c_getstr (arg1);
7371 if (p1 && *p1 == '\0')
7373 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7374 tree cst_uchar_ptr_node
7375 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7377 tree temp
7378 = fold_convert_loc (loc, integer_type_node,
7379 build1 (INDIRECT_REF, cst_uchar_node,
7380 fold_convert_loc (loc,
7381 cst_uchar_ptr_node,
7382 arg2)));
7383 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
7386 return NULL_TREE;
7389 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
7390 Return NULL_TREE if no simplification can be made. */
7392 static tree
7393 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
7395 if (!validate_arg (arg1, POINTER_TYPE)
7396 || !validate_arg (arg2, POINTER_TYPE)
7397 || !validate_arg (len, INTEGER_TYPE))
7398 return NULL_TREE;
7400 /* If the LEN parameter is zero, return zero. */
7401 if (integer_zerop (len))
7402 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7403 arg1, arg2);
7405 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7406 if (operand_equal_p (arg1, arg2, 0))
7407 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7409 /* If the second arg is "", and the length is greater than zero,
7410 return *(const unsigned char*)arg1. */
7411 const char *p2 = c_getstr (arg2);
7412 if (p2 && *p2 == '\0'
7413 && TREE_CODE (len) == INTEGER_CST
7414 && tree_int_cst_sgn (len) == 1)
7416 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7417 tree cst_uchar_ptr_node
7418 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7420 return fold_convert_loc (loc, integer_type_node,
7421 build1 (INDIRECT_REF, cst_uchar_node,
7422 fold_convert_loc (loc,
7423 cst_uchar_ptr_node,
7424 arg1)));
7427 /* If the first arg is "", and the length is greater than zero,
7428 return -*(const unsigned char*)arg2. */
7429 const char *p1 = c_getstr (arg1);
7430 if (p1 && *p1 == '\0'
7431 && TREE_CODE (len) == INTEGER_CST
7432 && tree_int_cst_sgn (len) == 1)
7434 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7435 tree cst_uchar_ptr_node
7436 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7438 tree temp = fold_convert_loc (loc, integer_type_node,
7439 build1 (INDIRECT_REF, cst_uchar_node,
7440 fold_convert_loc (loc,
7441 cst_uchar_ptr_node,
7442 arg2)));
7443 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
7446 /* If len parameter is one, return an expression corresponding to
7447 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7448 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7450 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7451 tree cst_uchar_ptr_node
7452 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7454 tree ind1 = fold_convert_loc (loc, integer_type_node,
7455 build1 (INDIRECT_REF, cst_uchar_node,
7456 fold_convert_loc (loc,
7457 cst_uchar_ptr_node,
7458 arg1)));
7459 tree ind2 = fold_convert_loc (loc, integer_type_node,
7460 build1 (INDIRECT_REF, cst_uchar_node,
7461 fold_convert_loc (loc,
7462 cst_uchar_ptr_node,
7463 arg2)));
7464 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7467 return NULL_TREE;
7470 /* Fold a call to builtin isascii with argument ARG. */
7472 static tree
7473 fold_builtin_isascii (location_t loc, tree arg)
7475 if (!validate_arg (arg, INTEGER_TYPE))
7476 return NULL_TREE;
7477 else
7479 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
7480 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
7481 build_int_cst (integer_type_node,
7482 ~ (unsigned HOST_WIDE_INT) 0x7f));
7483 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
7484 arg, integer_zero_node);
7488 /* Fold a call to builtin toascii with argument ARG. */
7490 static tree
7491 fold_builtin_toascii (location_t loc, tree arg)
7493 if (!validate_arg (arg, INTEGER_TYPE))
7494 return NULL_TREE;
7496 /* Transform toascii(c) -> (c & 0x7f). */
7497 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
7498 build_int_cst (integer_type_node, 0x7f));
7501 /* Fold a call to builtin isdigit with argument ARG. */
7503 static tree
7504 fold_builtin_isdigit (location_t loc, tree arg)
7506 if (!validate_arg (arg, INTEGER_TYPE))
7507 return NULL_TREE;
7508 else
7510 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
7511 /* According to the C standard, isdigit is unaffected by locale.
7512 However, it definitely is affected by the target character set. */
7513 unsigned HOST_WIDE_INT target_digit0
7514 = lang_hooks.to_target_charset ('0');
7516 if (target_digit0 == 0)
7517 return NULL_TREE;
7519 arg = fold_convert_loc (loc, unsigned_type_node, arg);
7520 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
7521 build_int_cst (unsigned_type_node, target_digit0));
7522 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
7523 build_int_cst (unsigned_type_node, 9));
7527 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
7529 static tree
7530 fold_builtin_fabs (location_t loc, tree arg, tree type)
7532 if (!validate_arg (arg, REAL_TYPE))
7533 return NULL_TREE;
7535 arg = fold_convert_loc (loc, type, arg);
7536 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7539 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
7541 static tree
7542 fold_builtin_abs (location_t loc, tree arg, tree type)
7544 if (!validate_arg (arg, INTEGER_TYPE))
7545 return NULL_TREE;
7547 arg = fold_convert_loc (loc, type, arg);
7548 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7551 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
7553 static tree
7554 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
7556 /* ??? Only expand to FMA_EXPR if it's directly supported. */
7557 if (validate_arg (arg0, REAL_TYPE)
7558 && validate_arg (arg1, REAL_TYPE)
7559 && validate_arg (arg2, REAL_TYPE)
7560 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
7561 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
7563 return NULL_TREE;
7566 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
7568 static tree
7569 fold_builtin_carg (location_t loc, tree arg, tree type)
7571 if (validate_arg (arg, COMPLEX_TYPE)
7572 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7574 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
7576 if (atan2_fn)
7578 tree new_arg = builtin_save_expr (arg);
7579 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
7580 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
7581 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
7585 return NULL_TREE;
7588 /* Fold a call to builtin frexp, we can assume the base is 2. */
7590 static tree
7591 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
7593 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7594 return NULL_TREE;
7596 STRIP_NOPS (arg0);
7598 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
7599 return NULL_TREE;
7601 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7603 /* Proceed if a valid pointer type was passed in. */
7604 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
7606 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
7607 tree frac, exp;
7609 switch (value->cl)
7611 case rvc_zero:
7612 /* For +-0, return (*exp = 0, +-0). */
7613 exp = integer_zero_node;
7614 frac = arg0;
7615 break;
7616 case rvc_nan:
7617 case rvc_inf:
7618 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
7619 return omit_one_operand_loc (loc, rettype, arg0, arg1);
7620 case rvc_normal:
7622 /* Since the frexp function always expects base 2, and in
7623 GCC normalized significands are already in the range
7624 [0.5, 1.0), we have exactly what frexp wants. */
7625 REAL_VALUE_TYPE frac_rvt = *value;
7626 SET_REAL_EXP (&frac_rvt, 0);
7627 frac = build_real (rettype, frac_rvt);
7628 exp = build_int_cst (integer_type_node, REAL_EXP (value));
7630 break;
7631 default:
7632 gcc_unreachable ();
7635 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7636 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
7637 TREE_SIDE_EFFECTS (arg1) = 1;
7638 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
7641 return NULL_TREE;
7644 /* Fold a call to builtin modf. */
7646 static tree
7647 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
7649 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7650 return NULL_TREE;
7652 STRIP_NOPS (arg0);
7654 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
7655 return NULL_TREE;
7657 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7659 /* Proceed if a valid pointer type was passed in. */
7660 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
7662 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
7663 REAL_VALUE_TYPE trunc, frac;
7665 switch (value->cl)
7667 case rvc_nan:
7668 case rvc_zero:
7669 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
7670 trunc = frac = *value;
7671 break;
7672 case rvc_inf:
7673 /* For +-Inf, return (*arg1 = arg0, +-0). */
7674 frac = dconst0;
7675 frac.sign = value->sign;
7676 trunc = *value;
7677 break;
7678 case rvc_normal:
7679 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
7680 real_trunc (&trunc, VOIDmode, value);
7681 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
7682 /* If the original number was negative and already
7683 integral, then the fractional part is -0.0. */
7684 if (value->sign && frac.cl == rvc_zero)
7685 frac.sign = value->sign;
7686 break;
7689 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7690 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
7691 build_real (rettype, trunc));
7692 TREE_SIDE_EFFECTS (arg1) = 1;
7693 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
7694 build_real (rettype, frac));
7697 return NULL_TREE;
7700 /* Given a location LOC, an interclass builtin function decl FNDECL
7701 and its single argument ARG, return an folded expression computing
7702 the same, or NULL_TREE if we either couldn't or didn't want to fold
7703 (the latter happen if there's an RTL instruction available). */
7705 static tree
7706 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
7708 machine_mode mode;
7710 if (!validate_arg (arg, REAL_TYPE))
7711 return NULL_TREE;
7713 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
7714 return NULL_TREE;
7716 mode = TYPE_MODE (TREE_TYPE (arg));
7718 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
7720 /* If there is no optab, try generic code. */
7721 switch (DECL_FUNCTION_CODE (fndecl))
7723 tree result;
7725 CASE_FLT_FN (BUILT_IN_ISINF):
7727 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
7728 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
7729 tree type = TREE_TYPE (arg);
7730 REAL_VALUE_TYPE r;
7731 char buf[128];
7733 if (is_ibm_extended)
7735 /* NaN and Inf are encoded in the high-order double value
7736 only. The low-order value is not significant. */
7737 type = double_type_node;
7738 mode = DFmode;
7739 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7741 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7742 real_from_string (&r, buf);
7743 result = build_call_expr (isgr_fn, 2,
7744 fold_build1_loc (loc, ABS_EXPR, type, arg),
7745 build_real (type, r));
7746 return result;
7748 CASE_FLT_FN (BUILT_IN_FINITE):
7749 case BUILT_IN_ISFINITE:
7751 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
7752 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7753 tree type = TREE_TYPE (arg);
7754 REAL_VALUE_TYPE r;
7755 char buf[128];
7757 if (is_ibm_extended)
7759 /* NaN and Inf are encoded in the high-order double value
7760 only. The low-order value is not significant. */
7761 type = double_type_node;
7762 mode = DFmode;
7763 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7765 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7766 real_from_string (&r, buf);
7767 result = build_call_expr (isle_fn, 2,
7768 fold_build1_loc (loc, ABS_EXPR, type, arg),
7769 build_real (type, r));
7770 /*result = fold_build2_loc (loc, UNGT_EXPR,
7771 TREE_TYPE (TREE_TYPE (fndecl)),
7772 fold_build1_loc (loc, ABS_EXPR, type, arg),
7773 build_real (type, r));
7774 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
7775 TREE_TYPE (TREE_TYPE (fndecl)),
7776 result);*/
7777 return result;
7779 case BUILT_IN_ISNORMAL:
7781 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
7782 islessequal(fabs(x),DBL_MAX). */
7783 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7784 tree type = TREE_TYPE (arg);
7785 tree orig_arg, max_exp, min_exp;
7786 machine_mode orig_mode = mode;
7787 REAL_VALUE_TYPE rmax, rmin;
7788 char buf[128];
7790 orig_arg = arg = builtin_save_expr (arg);
7791 if (is_ibm_extended)
7793 /* Use double to test the normal range of IBM extended
7794 precision. Emin for IBM extended precision is
7795 different to emin for IEEE double, being 53 higher
7796 since the low double exponent is at least 53 lower
7797 than the high double exponent. */
7798 type = double_type_node;
7799 mode = DFmode;
7800 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7802 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
7804 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7805 real_from_string (&rmax, buf);
7806 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
7807 real_from_string (&rmin, buf);
7808 max_exp = build_real (type, rmax);
7809 min_exp = build_real (type, rmin);
7811 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
7812 if (is_ibm_extended)
7814 /* Testing the high end of the range is done just using
7815 the high double, using the same test as isfinite().
7816 For the subnormal end of the range we first test the
7817 high double, then if its magnitude is equal to the
7818 limit of 0x1p-969, we test whether the low double is
7819 non-zero and opposite sign to the high double. */
7820 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
7821 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
7822 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
7823 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
7824 arg, min_exp);
7825 tree as_complex = build1 (VIEW_CONVERT_EXPR,
7826 complex_double_type_node, orig_arg);
7827 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
7828 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
7829 tree zero = build_real (type, dconst0);
7830 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
7831 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
7832 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
7833 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
7834 fold_build3 (COND_EXPR,
7835 integer_type_node,
7836 hilt, logt, lolt));
7837 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
7838 eq_min, ok_lo);
7839 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
7840 gt_min, eq_min);
7842 else
7844 tree const isge_fn
7845 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
7846 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
7848 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
7849 max_exp, min_exp);
7850 return result;
7852 default:
7853 break;
7856 return NULL_TREE;
7859 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
7860 ARG is the argument for the call. */
7862 static tree
7863 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
7865 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7867 if (!validate_arg (arg, REAL_TYPE))
7868 return NULL_TREE;
7870 switch (builtin_index)
7872 case BUILT_IN_ISINF:
7873 if (!HONOR_INFINITIES (arg))
7874 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
7876 return NULL_TREE;
7878 case BUILT_IN_ISINF_SIGN:
7880 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
7881 /* In a boolean context, GCC will fold the inner COND_EXPR to
7882 1. So e.g. "if (isinf_sign(x))" would be folded to just
7883 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
7884 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
7885 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
7886 tree tmp = NULL_TREE;
7888 arg = builtin_save_expr (arg);
7890 if (signbit_fn && isinf_fn)
7892 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
7893 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
7895 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
7896 signbit_call, integer_zero_node);
7897 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
7898 isinf_call, integer_zero_node);
7900 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
7901 integer_minus_one_node, integer_one_node);
7902 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
7903 isinf_call, tmp,
7904 integer_zero_node);
7907 return tmp;
7910 case BUILT_IN_ISFINITE:
7911 if (!HONOR_NANS (arg)
7912 && !HONOR_INFINITIES (arg))
7913 return omit_one_operand_loc (loc, type, integer_one_node, arg);
7915 return NULL_TREE;
7917 case BUILT_IN_ISNAN:
7918 if (!HONOR_NANS (arg))
7919 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
7922 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
7923 if (is_ibm_extended)
7925 /* NaN and Inf are encoded in the high-order double value
7926 only. The low-order value is not significant. */
7927 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
7930 arg = builtin_save_expr (arg);
7931 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
7933 default:
7934 gcc_unreachable ();
7938 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
7939 This builtin will generate code to return the appropriate floating
7940 point classification depending on the value of the floating point
7941 number passed in. The possible return values must be supplied as
7942 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
7943 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
7944 one floating point argument which is "type generic". */
7946 static tree
7947 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
7949 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
7950 arg, type, res, tmp;
7951 machine_mode mode;
7952 REAL_VALUE_TYPE r;
7953 char buf[128];
7955 /* Verify the required arguments in the original call. */
7956 if (nargs != 6
7957 || !validate_arg (args[0], INTEGER_TYPE)
7958 || !validate_arg (args[1], INTEGER_TYPE)
7959 || !validate_arg (args[2], INTEGER_TYPE)
7960 || !validate_arg (args[3], INTEGER_TYPE)
7961 || !validate_arg (args[4], INTEGER_TYPE)
7962 || !validate_arg (args[5], REAL_TYPE))
7963 return NULL_TREE;
7965 fp_nan = args[0];
7966 fp_infinite = args[1];
7967 fp_normal = args[2];
7968 fp_subnormal = args[3];
7969 fp_zero = args[4];
7970 arg = args[5];
7971 type = TREE_TYPE (arg);
7972 mode = TYPE_MODE (type);
7973 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
7975 /* fpclassify(x) ->
7976 isnan(x) ? FP_NAN :
7977 (fabs(x) == Inf ? FP_INFINITE :
7978 (fabs(x) >= DBL_MIN ? FP_NORMAL :
7979 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
7981 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
7982 build_real (type, dconst0));
7983 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
7984 tmp, fp_zero, fp_subnormal);
7986 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
7987 real_from_string (&r, buf);
7988 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
7989 arg, build_real (type, r));
7990 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
7992 if (HONOR_INFINITIES (mode))
7994 real_inf (&r);
7995 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
7996 build_real (type, r));
7997 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
7998 fp_infinite, res);
8001 if (HONOR_NANS (mode))
8003 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8004 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8007 return res;
8010 /* Fold a call to an unordered comparison function such as
8011 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8012 being called and ARG0 and ARG1 are the arguments for the call.
8013 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8014 the opposite of the desired result. UNORDERED_CODE is used
8015 for modes that can hold NaNs and ORDERED_CODE is used for
8016 the rest. */
8018 static tree
8019 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8020 enum tree_code unordered_code,
8021 enum tree_code ordered_code)
8023 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8024 enum tree_code code;
8025 tree type0, type1;
8026 enum tree_code code0, code1;
8027 tree cmp_type = NULL_TREE;
8029 type0 = TREE_TYPE (arg0);
8030 type1 = TREE_TYPE (arg1);
8032 code0 = TREE_CODE (type0);
8033 code1 = TREE_CODE (type1);
8035 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8036 /* Choose the wider of two real types. */
8037 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8038 ? type0 : type1;
8039 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8040 cmp_type = type0;
8041 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8042 cmp_type = type1;
8044 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8045 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8047 if (unordered_code == UNORDERED_EXPR)
8049 if (!HONOR_NANS (arg0))
8050 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8051 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8054 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8055 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8056 fold_build2_loc (loc, code, type, arg0, arg1));
8059 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8060 arithmetics if it can never overflow, or into internal functions that
8061 return both result of arithmetics and overflowed boolean flag in
8062 a complex integer result, or some other check for overflow.
8063 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8064 checking part of that. */
8066 static tree
8067 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8068 tree arg0, tree arg1, tree arg2)
8070 enum internal_fn ifn = IFN_LAST;
8071 /* The code of the expression corresponding to the type-generic
8072 built-in, or ERROR_MARK for the type-specific ones. */
8073 enum tree_code opcode = ERROR_MARK;
8074 bool ovf_only = false;
8076 switch (fcode)
8078 case BUILT_IN_ADD_OVERFLOW_P:
8079 ovf_only = true;
8080 /* FALLTHRU */
8081 case BUILT_IN_ADD_OVERFLOW:
8082 opcode = PLUS_EXPR;
8083 /* FALLTHRU */
8084 case BUILT_IN_SADD_OVERFLOW:
8085 case BUILT_IN_SADDL_OVERFLOW:
8086 case BUILT_IN_SADDLL_OVERFLOW:
8087 case BUILT_IN_UADD_OVERFLOW:
8088 case BUILT_IN_UADDL_OVERFLOW:
8089 case BUILT_IN_UADDLL_OVERFLOW:
8090 ifn = IFN_ADD_OVERFLOW;
8091 break;
8092 case BUILT_IN_SUB_OVERFLOW_P:
8093 ovf_only = true;
8094 /* FALLTHRU */
8095 case BUILT_IN_SUB_OVERFLOW:
8096 opcode = MINUS_EXPR;
8097 /* FALLTHRU */
8098 case BUILT_IN_SSUB_OVERFLOW:
8099 case BUILT_IN_SSUBL_OVERFLOW:
8100 case BUILT_IN_SSUBLL_OVERFLOW:
8101 case BUILT_IN_USUB_OVERFLOW:
8102 case BUILT_IN_USUBL_OVERFLOW:
8103 case BUILT_IN_USUBLL_OVERFLOW:
8104 ifn = IFN_SUB_OVERFLOW;
8105 break;
8106 case BUILT_IN_MUL_OVERFLOW_P:
8107 ovf_only = true;
8108 /* FALLTHRU */
8109 case BUILT_IN_MUL_OVERFLOW:
8110 opcode = MULT_EXPR;
8111 /* FALLTHRU */
8112 case BUILT_IN_SMUL_OVERFLOW:
8113 case BUILT_IN_SMULL_OVERFLOW:
8114 case BUILT_IN_SMULLL_OVERFLOW:
8115 case BUILT_IN_UMUL_OVERFLOW:
8116 case BUILT_IN_UMULL_OVERFLOW:
8117 case BUILT_IN_UMULLL_OVERFLOW:
8118 ifn = IFN_MUL_OVERFLOW;
8119 break;
8120 default:
8121 gcc_unreachable ();
8124 /* For the "generic" overloads, the first two arguments can have different
8125 types and the last argument determines the target type to use to check
8126 for overflow. The arguments of the other overloads all have the same
8127 type. */
8128 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
8130 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8131 arguments are constant, attempt to fold the built-in call into a constant
8132 expression indicating whether or not it detected an overflow. */
8133 if (ovf_only
8134 && TREE_CODE (arg0) == INTEGER_CST
8135 && TREE_CODE (arg1) == INTEGER_CST)
8136 /* Perform the computation in the target type and check for overflow. */
8137 return omit_one_operand_loc (loc, boolean_type_node,
8138 arith_overflowed_p (opcode, type, arg0, arg1)
8139 ? boolean_true_node : boolean_false_node,
8140 arg2);
8142 tree ctype = build_complex_type (type);
8143 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8144 2, arg0, arg1);
8145 tree tgt = save_expr (call);
8146 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8147 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8148 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8150 if (ovf_only)
8151 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
8153 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8154 tree store
8155 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8156 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8159 /* Fold a call to __builtin_FILE to a constant string. */
8161 static inline tree
8162 fold_builtin_FILE (location_t loc)
8164 if (const char *fname = LOCATION_FILE (loc))
8165 return build_string_literal (strlen (fname) + 1, fname);
8167 return build_string_literal (1, "");
8170 /* Fold a call to __builtin_FUNCTION to a constant string. */
8172 static inline tree
8173 fold_builtin_FUNCTION ()
8175 if (current_function_decl)
8177 const char *name = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
8178 return build_string_literal (strlen (name) + 1, name);
8181 return build_string_literal (1, "");
8184 /* Fold a call to __builtin_LINE to an integer constant. */
8186 static inline tree
8187 fold_builtin_LINE (location_t loc, tree type)
8189 return build_int_cst (type, LOCATION_LINE (loc));
8192 /* Fold a call to built-in function FNDECL with 0 arguments.
8193 This function returns NULL_TREE if no simplification was possible. */
8195 static tree
8196 fold_builtin_0 (location_t loc, tree fndecl)
8198 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8199 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8200 switch (fcode)
8202 case BUILT_IN_FILE:
8203 return fold_builtin_FILE (loc);
8205 case BUILT_IN_FUNCTION:
8206 return fold_builtin_FUNCTION ();
8208 case BUILT_IN_LINE:
8209 return fold_builtin_LINE (loc, type);
8211 CASE_FLT_FN (BUILT_IN_INF):
8212 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
8213 case BUILT_IN_INFD32:
8214 case BUILT_IN_INFD64:
8215 case BUILT_IN_INFD128:
8216 return fold_builtin_inf (loc, type, true);
8218 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8219 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
8220 return fold_builtin_inf (loc, type, false);
8222 case BUILT_IN_CLASSIFY_TYPE:
8223 return fold_builtin_classify_type (NULL_TREE);
8225 default:
8226 break;
8228 return NULL_TREE;
8231 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8232 This function returns NULL_TREE if no simplification was possible. */
8234 static tree
8235 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8237 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8238 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8240 if (TREE_CODE (arg0) == ERROR_MARK)
8241 return NULL_TREE;
8243 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
8244 return ret;
8246 switch (fcode)
8248 case BUILT_IN_CONSTANT_P:
8250 tree val = fold_builtin_constant_p (arg0);
8252 /* Gimplification will pull the CALL_EXPR for the builtin out of
8253 an if condition. When not optimizing, we'll not CSE it back.
8254 To avoid link error types of regressions, return false now. */
8255 if (!val && !optimize)
8256 val = integer_zero_node;
8258 return val;
8261 case BUILT_IN_CLASSIFY_TYPE:
8262 return fold_builtin_classify_type (arg0);
8264 case BUILT_IN_STRLEN:
8265 return fold_builtin_strlen (loc, type, arg0);
8267 CASE_FLT_FN (BUILT_IN_FABS):
8268 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8269 case BUILT_IN_FABSD32:
8270 case BUILT_IN_FABSD64:
8271 case BUILT_IN_FABSD128:
8272 return fold_builtin_fabs (loc, arg0, type);
8274 case BUILT_IN_ABS:
8275 case BUILT_IN_LABS:
8276 case BUILT_IN_LLABS:
8277 case BUILT_IN_IMAXABS:
8278 return fold_builtin_abs (loc, arg0, type);
8280 CASE_FLT_FN (BUILT_IN_CONJ):
8281 if (validate_arg (arg0, COMPLEX_TYPE)
8282 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8283 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8284 break;
8286 CASE_FLT_FN (BUILT_IN_CREAL):
8287 if (validate_arg (arg0, COMPLEX_TYPE)
8288 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8289 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8290 break;
8292 CASE_FLT_FN (BUILT_IN_CIMAG):
8293 if (validate_arg (arg0, COMPLEX_TYPE)
8294 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8295 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8296 break;
8298 CASE_FLT_FN (BUILT_IN_CARG):
8299 return fold_builtin_carg (loc, arg0, type);
8301 case BUILT_IN_ISASCII:
8302 return fold_builtin_isascii (loc, arg0);
8304 case BUILT_IN_TOASCII:
8305 return fold_builtin_toascii (loc, arg0);
8307 case BUILT_IN_ISDIGIT:
8308 return fold_builtin_isdigit (loc, arg0);
8310 CASE_FLT_FN (BUILT_IN_FINITE):
8311 case BUILT_IN_FINITED32:
8312 case BUILT_IN_FINITED64:
8313 case BUILT_IN_FINITED128:
8314 case BUILT_IN_ISFINITE:
8316 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
8317 if (ret)
8318 return ret;
8319 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8322 CASE_FLT_FN (BUILT_IN_ISINF):
8323 case BUILT_IN_ISINFD32:
8324 case BUILT_IN_ISINFD64:
8325 case BUILT_IN_ISINFD128:
8327 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
8328 if (ret)
8329 return ret;
8330 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8333 case BUILT_IN_ISNORMAL:
8334 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8336 case BUILT_IN_ISINF_SIGN:
8337 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
8339 CASE_FLT_FN (BUILT_IN_ISNAN):
8340 case BUILT_IN_ISNAND32:
8341 case BUILT_IN_ISNAND64:
8342 case BUILT_IN_ISNAND128:
8343 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
8345 case BUILT_IN_FREE:
8346 if (integer_zerop (arg0))
8347 return build_empty_stmt (loc);
8348 break;
8350 default:
8351 break;
8354 return NULL_TREE;
8358 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8359 This function returns NULL_TREE if no simplification was possible. */
8361 static tree
8362 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
8364 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8365 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8367 if (TREE_CODE (arg0) == ERROR_MARK
8368 || TREE_CODE (arg1) == ERROR_MARK)
8369 return NULL_TREE;
8371 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
8372 return ret;
8374 switch (fcode)
8376 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
8377 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
8378 if (validate_arg (arg0, REAL_TYPE)
8379 && validate_arg (arg1, POINTER_TYPE))
8380 return do_mpfr_lgamma_r (arg0, arg1, type);
8381 break;
8383 CASE_FLT_FN (BUILT_IN_FREXP):
8384 return fold_builtin_frexp (loc, arg0, arg1, type);
8386 CASE_FLT_FN (BUILT_IN_MODF):
8387 return fold_builtin_modf (loc, arg0, arg1, type);
8389 case BUILT_IN_STRSTR:
8390 return fold_builtin_strstr (loc, arg0, arg1, type);
8392 case BUILT_IN_STRSPN:
8393 return fold_builtin_strspn (loc, arg0, arg1);
8395 case BUILT_IN_STRCSPN:
8396 return fold_builtin_strcspn (loc, arg0, arg1);
8398 case BUILT_IN_STRCHR:
8399 case BUILT_IN_INDEX:
8400 return fold_builtin_strchr (loc, arg0, arg1, type);
8402 case BUILT_IN_STRRCHR:
8403 case BUILT_IN_RINDEX:
8404 return fold_builtin_strrchr (loc, arg0, arg1, type);
8406 case BUILT_IN_STRCMP:
8407 return fold_builtin_strcmp (loc, arg0, arg1);
8409 case BUILT_IN_STRPBRK:
8410 return fold_builtin_strpbrk (loc, arg0, arg1, type);
8412 case BUILT_IN_EXPECT:
8413 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
8415 case BUILT_IN_ISGREATER:
8416 return fold_builtin_unordered_cmp (loc, fndecl,
8417 arg0, arg1, UNLE_EXPR, LE_EXPR);
8418 case BUILT_IN_ISGREATEREQUAL:
8419 return fold_builtin_unordered_cmp (loc, fndecl,
8420 arg0, arg1, UNLT_EXPR, LT_EXPR);
8421 case BUILT_IN_ISLESS:
8422 return fold_builtin_unordered_cmp (loc, fndecl,
8423 arg0, arg1, UNGE_EXPR, GE_EXPR);
8424 case BUILT_IN_ISLESSEQUAL:
8425 return fold_builtin_unordered_cmp (loc, fndecl,
8426 arg0, arg1, UNGT_EXPR, GT_EXPR);
8427 case BUILT_IN_ISLESSGREATER:
8428 return fold_builtin_unordered_cmp (loc, fndecl,
8429 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
8430 case BUILT_IN_ISUNORDERED:
8431 return fold_builtin_unordered_cmp (loc, fndecl,
8432 arg0, arg1, UNORDERED_EXPR,
8433 NOP_EXPR);
8435 /* We do the folding for va_start in the expander. */
8436 case BUILT_IN_VA_START:
8437 break;
8439 case BUILT_IN_OBJECT_SIZE:
8440 return fold_builtin_object_size (arg0, arg1);
8442 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8443 return fold_builtin_atomic_always_lock_free (arg0, arg1);
8445 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8446 return fold_builtin_atomic_is_lock_free (arg0, arg1);
8448 default:
8449 break;
8451 return NULL_TREE;
8454 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
8455 and ARG2.
8456 This function returns NULL_TREE if no simplification was possible. */
8458 static tree
8459 fold_builtin_3 (location_t loc, tree fndecl,
8460 tree arg0, tree arg1, tree arg2)
8462 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8463 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8465 if (TREE_CODE (arg0) == ERROR_MARK
8466 || TREE_CODE (arg1) == ERROR_MARK
8467 || TREE_CODE (arg2) == ERROR_MARK)
8468 return NULL_TREE;
8470 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
8471 arg0, arg1, arg2))
8472 return ret;
8474 switch (fcode)
8477 CASE_FLT_FN (BUILT_IN_SINCOS):
8478 return fold_builtin_sincos (loc, arg0, arg1, arg2);
8480 CASE_FLT_FN (BUILT_IN_FMA):
8481 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
8483 CASE_FLT_FN (BUILT_IN_REMQUO):
8484 if (validate_arg (arg0, REAL_TYPE)
8485 && validate_arg (arg1, REAL_TYPE)
8486 && validate_arg (arg2, POINTER_TYPE))
8487 return do_mpfr_remquo (arg0, arg1, arg2);
8488 break;
8490 case BUILT_IN_STRNCMP:
8491 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
8493 case BUILT_IN_MEMCHR:
8494 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
8496 case BUILT_IN_BCMP:
8497 case BUILT_IN_MEMCMP:
8498 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
8500 case BUILT_IN_EXPECT:
8501 return fold_builtin_expect (loc, arg0, arg1, arg2);
8503 case BUILT_IN_ADD_OVERFLOW:
8504 case BUILT_IN_SUB_OVERFLOW:
8505 case BUILT_IN_MUL_OVERFLOW:
8506 case BUILT_IN_ADD_OVERFLOW_P:
8507 case BUILT_IN_SUB_OVERFLOW_P:
8508 case BUILT_IN_MUL_OVERFLOW_P:
8509 case BUILT_IN_SADD_OVERFLOW:
8510 case BUILT_IN_SADDL_OVERFLOW:
8511 case BUILT_IN_SADDLL_OVERFLOW:
8512 case BUILT_IN_SSUB_OVERFLOW:
8513 case BUILT_IN_SSUBL_OVERFLOW:
8514 case BUILT_IN_SSUBLL_OVERFLOW:
8515 case BUILT_IN_SMUL_OVERFLOW:
8516 case BUILT_IN_SMULL_OVERFLOW:
8517 case BUILT_IN_SMULLL_OVERFLOW:
8518 case BUILT_IN_UADD_OVERFLOW:
8519 case BUILT_IN_UADDL_OVERFLOW:
8520 case BUILT_IN_UADDLL_OVERFLOW:
8521 case BUILT_IN_USUB_OVERFLOW:
8522 case BUILT_IN_USUBL_OVERFLOW:
8523 case BUILT_IN_USUBLL_OVERFLOW:
8524 case BUILT_IN_UMUL_OVERFLOW:
8525 case BUILT_IN_UMULL_OVERFLOW:
8526 case BUILT_IN_UMULLL_OVERFLOW:
8527 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
8529 default:
8530 break;
8532 return NULL_TREE;
8535 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
8536 arguments. IGNORE is true if the result of the
8537 function call is ignored. This function returns NULL_TREE if no
8538 simplification was possible. */
8540 tree
8541 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
8543 tree ret = NULL_TREE;
8545 switch (nargs)
8547 case 0:
8548 ret = fold_builtin_0 (loc, fndecl);
8549 break;
8550 case 1:
8551 ret = fold_builtin_1 (loc, fndecl, args[0]);
8552 break;
8553 case 2:
8554 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
8555 break;
8556 case 3:
8557 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
8558 break;
8559 default:
8560 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
8561 break;
8563 if (ret)
8565 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
8566 SET_EXPR_LOCATION (ret, loc);
8567 TREE_NO_WARNING (ret) = 1;
8568 return ret;
8570 return NULL_TREE;
8573 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
8574 list ARGS along with N new arguments in NEWARGS. SKIP is the number
8575 of arguments in ARGS to be omitted. OLDNARGS is the number of
8576 elements in ARGS. */
8578 static tree
8579 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
8580 int skip, tree fndecl, int n, va_list newargs)
8582 int nargs = oldnargs - skip + n;
8583 tree *buffer;
8585 if (n > 0)
8587 int i, j;
8589 buffer = XALLOCAVEC (tree, nargs);
8590 for (i = 0; i < n; i++)
8591 buffer[i] = va_arg (newargs, tree);
8592 for (j = skip; j < oldnargs; j++, i++)
8593 buffer[i] = args[j];
8595 else
8596 buffer = args + skip;
8598 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
8601 /* Return true if FNDECL shouldn't be folded right now.
8602 If a built-in function has an inline attribute always_inline
8603 wrapper, defer folding it after always_inline functions have
8604 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
8605 might not be performed. */
8607 bool
8608 avoid_folding_inline_builtin (tree fndecl)
8610 return (DECL_DECLARED_INLINE_P (fndecl)
8611 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
8612 && cfun
8613 && !cfun->always_inline_functions_inlined
8614 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
8617 /* A wrapper function for builtin folding that prevents warnings for
8618 "statement without effect" and the like, caused by removing the
8619 call node earlier than the warning is generated. */
8621 tree
8622 fold_call_expr (location_t loc, tree exp, bool ignore)
8624 tree ret = NULL_TREE;
8625 tree fndecl = get_callee_fndecl (exp);
8626 if (fndecl
8627 && TREE_CODE (fndecl) == FUNCTION_DECL
8628 && DECL_BUILT_IN (fndecl)
8629 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
8630 yet. Defer folding until we see all the arguments
8631 (after inlining). */
8632 && !CALL_EXPR_VA_ARG_PACK (exp))
8634 int nargs = call_expr_nargs (exp);
8636 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
8637 instead last argument is __builtin_va_arg_pack (). Defer folding
8638 even in that case, until arguments are finalized. */
8639 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
8641 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
8642 if (fndecl2
8643 && TREE_CODE (fndecl2) == FUNCTION_DECL
8644 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8645 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8646 return NULL_TREE;
8649 if (avoid_folding_inline_builtin (fndecl))
8650 return NULL_TREE;
8652 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8653 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
8654 CALL_EXPR_ARGP (exp), ignore);
8655 else
8657 tree *args = CALL_EXPR_ARGP (exp);
8658 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
8659 if (ret)
8660 return ret;
8663 return NULL_TREE;
8666 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
8667 N arguments are passed in the array ARGARRAY. Return a folded
8668 expression or NULL_TREE if no simplification was possible. */
8670 tree
8671 fold_builtin_call_array (location_t loc, tree,
8672 tree fn,
8673 int n,
8674 tree *argarray)
8676 if (TREE_CODE (fn) != ADDR_EXPR)
8677 return NULL_TREE;
8679 tree fndecl = TREE_OPERAND (fn, 0);
8680 if (TREE_CODE (fndecl) == FUNCTION_DECL
8681 && DECL_BUILT_IN (fndecl))
8683 /* If last argument is __builtin_va_arg_pack (), arguments to this
8684 function are not finalized yet. Defer folding until they are. */
8685 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
8687 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
8688 if (fndecl2
8689 && TREE_CODE (fndecl2) == FUNCTION_DECL
8690 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8691 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8692 return NULL_TREE;
8694 if (avoid_folding_inline_builtin (fndecl))
8695 return NULL_TREE;
8696 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8697 return targetm.fold_builtin (fndecl, n, argarray, false);
8698 else
8699 return fold_builtin_n (loc, fndecl, argarray, n, false);
8702 return NULL_TREE;
8705 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
8706 along with N new arguments specified as the "..." parameters. SKIP
8707 is the number of arguments in EXP to be omitted. This function is used
8708 to do varargs-to-varargs transformations. */
8710 static tree
8711 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
8713 va_list ap;
8714 tree t;
8716 va_start (ap, n);
8717 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
8718 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
8719 va_end (ap);
8721 return t;
8724 /* Validate a single argument ARG against a tree code CODE representing
8725 a type. */
8727 static bool
8728 validate_arg (const_tree arg, enum tree_code code)
8730 if (!arg)
8731 return false;
8732 else if (code == POINTER_TYPE)
8733 return POINTER_TYPE_P (TREE_TYPE (arg));
8734 else if (code == INTEGER_TYPE)
8735 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
8736 return code == TREE_CODE (TREE_TYPE (arg));
8739 /* This function validates the types of a function call argument list
8740 against a specified list of tree_codes. If the last specifier is a 0,
8741 that represents an ellipses, otherwise the last specifier must be a
8742 VOID_TYPE.
8744 This is the GIMPLE version of validate_arglist. Eventually we want to
8745 completely convert builtins.c to work from GIMPLEs and the tree based
8746 validate_arglist will then be removed. */
8748 bool
8749 validate_gimple_arglist (const gcall *call, ...)
8751 enum tree_code code;
8752 bool res = 0;
8753 va_list ap;
8754 const_tree arg;
8755 size_t i;
8757 va_start (ap, call);
8758 i = 0;
8762 code = (enum tree_code) va_arg (ap, int);
8763 switch (code)
8765 case 0:
8766 /* This signifies an ellipses, any further arguments are all ok. */
8767 res = true;
8768 goto end;
8769 case VOID_TYPE:
8770 /* This signifies an endlink, if no arguments remain, return
8771 true, otherwise return false. */
8772 res = (i == gimple_call_num_args (call));
8773 goto end;
8774 default:
8775 /* If no parameters remain or the parameter's code does not
8776 match the specified code, return false. Otherwise continue
8777 checking any remaining arguments. */
8778 arg = gimple_call_arg (call, i++);
8779 if (!validate_arg (arg, code))
8780 goto end;
8781 break;
8784 while (1);
8786 /* We need gotos here since we can only have one VA_CLOSE in a
8787 function. */
8788 end: ;
8789 va_end (ap);
8791 return res;
8794 /* Default target-specific builtin expander that does nothing. */
8797 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
8798 rtx target ATTRIBUTE_UNUSED,
8799 rtx subtarget ATTRIBUTE_UNUSED,
8800 machine_mode mode ATTRIBUTE_UNUSED,
8801 int ignore ATTRIBUTE_UNUSED)
8803 return NULL_RTX;
8806 /* Returns true is EXP represents data that would potentially reside
8807 in a readonly section. */
8809 bool
8810 readonly_data_expr (tree exp)
8812 STRIP_NOPS (exp);
8814 if (TREE_CODE (exp) != ADDR_EXPR)
8815 return false;
8817 exp = get_base_address (TREE_OPERAND (exp, 0));
8818 if (!exp)
8819 return false;
8821 /* Make sure we call decl_readonly_section only for trees it
8822 can handle (since it returns true for everything it doesn't
8823 understand). */
8824 if (TREE_CODE (exp) == STRING_CST
8825 || TREE_CODE (exp) == CONSTRUCTOR
8826 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
8827 return decl_readonly_section (exp, 0);
8828 else
8829 return false;
8832 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
8833 to the call, and TYPE is its return type.
8835 Return NULL_TREE if no simplification was possible, otherwise return the
8836 simplified form of the call as a tree.
8838 The simplified form may be a constant or other expression which
8839 computes the same value, but in a more efficient manner (including
8840 calls to other builtin functions).
8842 The call may contain arguments which need to be evaluated, but
8843 which are not useful to determine the result of the call. In
8844 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8845 COMPOUND_EXPR will be an argument which must be evaluated.
8846 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8847 COMPOUND_EXPR in the chain will contain the tree for the simplified
8848 form of the builtin function call. */
8850 static tree
8851 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
8853 if (!validate_arg (s1, POINTER_TYPE)
8854 || !validate_arg (s2, POINTER_TYPE))
8855 return NULL_TREE;
8856 else
8858 tree fn;
8859 const char *p1, *p2;
8861 p2 = c_getstr (s2);
8862 if (p2 == NULL)
8863 return NULL_TREE;
8865 p1 = c_getstr (s1);
8866 if (p1 != NULL)
8868 const char *r = strstr (p1, p2);
8869 tree tem;
8871 if (r == NULL)
8872 return build_int_cst (TREE_TYPE (s1), 0);
8874 /* Return an offset into the constant string argument. */
8875 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8876 return fold_convert_loc (loc, type, tem);
8879 /* The argument is const char *, and the result is char *, so we need
8880 a type conversion here to avoid a warning. */
8881 if (p2[0] == '\0')
8882 return fold_convert_loc (loc, type, s1);
8884 if (p2[1] != '\0')
8885 return NULL_TREE;
8887 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
8888 if (!fn)
8889 return NULL_TREE;
8891 /* New argument list transforming strstr(s1, s2) to
8892 strchr(s1, s2[0]). */
8893 return build_call_expr_loc (loc, fn, 2, s1,
8894 build_int_cst (integer_type_node, p2[0]));
8898 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
8899 the call, and TYPE is its return type.
8901 Return NULL_TREE if no simplification was possible, otherwise return the
8902 simplified form of the call as a tree.
8904 The simplified form may be a constant or other expression which
8905 computes the same value, but in a more efficient manner (including
8906 calls to other builtin functions).
8908 The call may contain arguments which need to be evaluated, but
8909 which are not useful to determine the result of the call. In
8910 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8911 COMPOUND_EXPR will be an argument which must be evaluated.
8912 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8913 COMPOUND_EXPR in the chain will contain the tree for the simplified
8914 form of the builtin function call. */
8916 static tree
8917 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
8919 if (!validate_arg (s1, POINTER_TYPE)
8920 || !validate_arg (s2, INTEGER_TYPE))
8921 return NULL_TREE;
8922 else
8924 const char *p1;
8926 if (TREE_CODE (s2) != INTEGER_CST)
8927 return NULL_TREE;
8929 p1 = c_getstr (s1);
8930 if (p1 != NULL)
8932 char c;
8933 const char *r;
8934 tree tem;
8936 if (target_char_cast (s2, &c))
8937 return NULL_TREE;
8939 r = strchr (p1, c);
8941 if (r == NULL)
8942 return build_int_cst (TREE_TYPE (s1), 0);
8944 /* Return an offset into the constant string argument. */
8945 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8946 return fold_convert_loc (loc, type, tem);
8948 return NULL_TREE;
8952 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
8953 the call, and TYPE is its return type.
8955 Return NULL_TREE if no simplification was possible, otherwise return the
8956 simplified form of the call as a tree.
8958 The simplified form may be a constant or other expression which
8959 computes the same value, but in a more efficient manner (including
8960 calls to other builtin functions).
8962 The call may contain arguments which need to be evaluated, but
8963 which are not useful to determine the result of the call. In
8964 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8965 COMPOUND_EXPR will be an argument which must be evaluated.
8966 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8967 COMPOUND_EXPR in the chain will contain the tree for the simplified
8968 form of the builtin function call. */
8970 static tree
8971 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
8973 if (!validate_arg (s1, POINTER_TYPE)
8974 || !validate_arg (s2, INTEGER_TYPE))
8975 return NULL_TREE;
8976 else
8978 tree fn;
8979 const char *p1;
8981 if (TREE_CODE (s2) != INTEGER_CST)
8982 return NULL_TREE;
8984 p1 = c_getstr (s1);
8985 if (p1 != NULL)
8987 char c;
8988 const char *r;
8989 tree tem;
8991 if (target_char_cast (s2, &c))
8992 return NULL_TREE;
8994 r = strrchr (p1, c);
8996 if (r == NULL)
8997 return build_int_cst (TREE_TYPE (s1), 0);
8999 /* Return an offset into the constant string argument. */
9000 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9001 return fold_convert_loc (loc, type, tem);
9004 if (! integer_zerop (s2))
9005 return NULL_TREE;
9007 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9008 if (!fn)
9009 return NULL_TREE;
9011 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
9012 return build_call_expr_loc (loc, fn, 2, s1, s2);
9016 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9017 to the call, and TYPE is its return type.
9019 Return NULL_TREE if no simplification was possible, otherwise return the
9020 simplified form of the call as a tree.
9022 The simplified form may be a constant or other expression which
9023 computes the same value, but in a more efficient manner (including
9024 calls to other builtin functions).
9026 The call may contain arguments which need to be evaluated, but
9027 which are not useful to determine the result of the call. In
9028 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9029 COMPOUND_EXPR will be an argument which must be evaluated.
9030 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9031 COMPOUND_EXPR in the chain will contain the tree for the simplified
9032 form of the builtin function call. */
9034 static tree
9035 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9037 if (!validate_arg (s1, POINTER_TYPE)
9038 || !validate_arg (s2, POINTER_TYPE))
9039 return NULL_TREE;
9040 else
9042 tree fn;
9043 const char *p1, *p2;
9045 p2 = c_getstr (s2);
9046 if (p2 == NULL)
9047 return NULL_TREE;
9049 p1 = c_getstr (s1);
9050 if (p1 != NULL)
9052 const char *r = strpbrk (p1, p2);
9053 tree tem;
9055 if (r == NULL)
9056 return build_int_cst (TREE_TYPE (s1), 0);
9058 /* Return an offset into the constant string argument. */
9059 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9060 return fold_convert_loc (loc, type, tem);
9063 if (p2[0] == '\0')
9064 /* strpbrk(x, "") == NULL.
9065 Evaluate and ignore s1 in case it had side-effects. */
9066 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
9068 if (p2[1] != '\0')
9069 return NULL_TREE; /* Really call strpbrk. */
9071 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9072 if (!fn)
9073 return NULL_TREE;
9075 /* New argument list transforming strpbrk(s1, s2) to
9076 strchr(s1, s2[0]). */
9077 return build_call_expr_loc (loc, fn, 2, s1,
9078 build_int_cst (integer_type_node, p2[0]));
9082 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9083 to the call.
9085 Return NULL_TREE if no simplification was possible, otherwise return the
9086 simplified form of the call as a tree.
9088 The simplified form may be a constant or other expression which
9089 computes the same value, but in a more efficient manner (including
9090 calls to other builtin functions).
9092 The call may contain arguments which need to be evaluated, but
9093 which are not useful to determine the result of the call. In
9094 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9095 COMPOUND_EXPR will be an argument which must be evaluated.
9096 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9097 COMPOUND_EXPR in the chain will contain the tree for the simplified
9098 form of the builtin function call. */
9100 static tree
9101 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9103 if (!validate_arg (s1, POINTER_TYPE)
9104 || !validate_arg (s2, POINTER_TYPE))
9105 return NULL_TREE;
9106 else
9108 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9110 /* If either argument is "", return NULL_TREE. */
9111 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9112 /* Evaluate and ignore both arguments in case either one has
9113 side-effects. */
9114 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9115 s1, s2);
9116 return NULL_TREE;
9120 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9121 to the call.
9123 Return NULL_TREE if no simplification was possible, otherwise return the
9124 simplified form of the call as a tree.
9126 The simplified form may be a constant or other expression which
9127 computes the same value, but in a more efficient manner (including
9128 calls to other builtin functions).
9130 The call may contain arguments which need to be evaluated, but
9131 which are not useful to determine the result of the call. In
9132 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9133 COMPOUND_EXPR will be an argument which must be evaluated.
9134 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9135 COMPOUND_EXPR in the chain will contain the tree for the simplified
9136 form of the builtin function call. */
9138 static tree
9139 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9141 if (!validate_arg (s1, POINTER_TYPE)
9142 || !validate_arg (s2, POINTER_TYPE))
9143 return NULL_TREE;
9144 else
9146 /* If the first argument is "", return NULL_TREE. */
9147 const char *p1 = c_getstr (s1);
9148 if (p1 && *p1 == '\0')
9150 /* Evaluate and ignore argument s2 in case it has
9151 side-effects. */
9152 return omit_one_operand_loc (loc, size_type_node,
9153 size_zero_node, s2);
9156 /* If the second argument is "", return __builtin_strlen(s1). */
9157 const char *p2 = c_getstr (s2);
9158 if (p2 && *p2 == '\0')
9160 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9162 /* If the replacement _DECL isn't initialized, don't do the
9163 transformation. */
9164 if (!fn)
9165 return NULL_TREE;
9167 return build_call_expr_loc (loc, fn, 1, s1);
9169 return NULL_TREE;
9173 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9174 produced. False otherwise. This is done so that we don't output the error
9175 or warning twice or three times. */
9177 bool
9178 fold_builtin_next_arg (tree exp, bool va_start_p)
9180 tree fntype = TREE_TYPE (current_function_decl);
9181 int nargs = call_expr_nargs (exp);
9182 tree arg;
9183 /* There is good chance the current input_location points inside the
9184 definition of the va_start macro (perhaps on the token for
9185 builtin) in a system header, so warnings will not be emitted.
9186 Use the location in real source code. */
9187 source_location current_location =
9188 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9189 NULL);
9191 if (!stdarg_p (fntype))
9193 error ("%<va_start%> used in function with fixed args");
9194 return true;
9197 if (va_start_p)
9199 if (va_start_p && (nargs != 2))
9201 error ("wrong number of arguments to function %<va_start%>");
9202 return true;
9204 arg = CALL_EXPR_ARG (exp, 1);
9206 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9207 when we checked the arguments and if needed issued a warning. */
9208 else
9210 if (nargs == 0)
9212 /* Evidently an out of date version of <stdarg.h>; can't validate
9213 va_start's second argument, but can still work as intended. */
9214 warning_at (current_location,
9215 OPT_Wvarargs,
9216 "%<__builtin_next_arg%> called without an argument");
9217 return true;
9219 else if (nargs > 1)
9221 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9222 return true;
9224 arg = CALL_EXPR_ARG (exp, 0);
9227 if (TREE_CODE (arg) == SSA_NAME)
9228 arg = SSA_NAME_VAR (arg);
9230 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9231 or __builtin_next_arg (0) the first time we see it, after checking
9232 the arguments and if needed issuing a warning. */
9233 if (!integer_zerop (arg))
9235 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9237 /* Strip off all nops for the sake of the comparison. This
9238 is not quite the same as STRIP_NOPS. It does more.
9239 We must also strip off INDIRECT_EXPR for C++ reference
9240 parameters. */
9241 while (CONVERT_EXPR_P (arg)
9242 || TREE_CODE (arg) == INDIRECT_REF)
9243 arg = TREE_OPERAND (arg, 0);
9244 if (arg != last_parm)
9246 /* FIXME: Sometimes with the tree optimizers we can get the
9247 not the last argument even though the user used the last
9248 argument. We just warn and set the arg to be the last
9249 argument so that we will get wrong-code because of
9250 it. */
9251 warning_at (current_location,
9252 OPT_Wvarargs,
9253 "second parameter of %<va_start%> not last named argument");
9256 /* Undefined by C99 7.15.1.4p4 (va_start):
9257 "If the parameter parmN is declared with the register storage
9258 class, with a function or array type, or with a type that is
9259 not compatible with the type that results after application of
9260 the default argument promotions, the behavior is undefined."
9262 else if (DECL_REGISTER (arg))
9264 warning_at (current_location,
9265 OPT_Wvarargs,
9266 "undefined behavior when second parameter of "
9267 "%<va_start%> is declared with %<register%> storage");
9270 /* We want to verify the second parameter just once before the tree
9271 optimizers are run and then avoid keeping it in the tree,
9272 as otherwise we could warn even for correct code like:
9273 void foo (int i, ...)
9274 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9275 if (va_start_p)
9276 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9277 else
9278 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9280 return false;
9284 /* Expand a call EXP to __builtin_object_size. */
9286 static rtx
9287 expand_builtin_object_size (tree exp)
9289 tree ost;
9290 int object_size_type;
9291 tree fndecl = get_callee_fndecl (exp);
9293 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9295 error ("%Kfirst argument of %D must be a pointer, second integer constant",
9296 exp, fndecl);
9297 expand_builtin_trap ();
9298 return const0_rtx;
9301 ost = CALL_EXPR_ARG (exp, 1);
9302 STRIP_NOPS (ost);
9304 if (TREE_CODE (ost) != INTEGER_CST
9305 || tree_int_cst_sgn (ost) < 0
9306 || compare_tree_int (ost, 3) > 0)
9308 error ("%Klast argument of %D is not integer constant between 0 and 3",
9309 exp, fndecl);
9310 expand_builtin_trap ();
9311 return const0_rtx;
9314 object_size_type = tree_to_shwi (ost);
9316 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9319 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9320 FCODE is the BUILT_IN_* to use.
9321 Return NULL_RTX if we failed; the caller should emit a normal call,
9322 otherwise try to get the result in TARGET, if convenient (and in
9323 mode MODE if that's convenient). */
9325 static rtx
9326 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9327 enum built_in_function fcode)
9329 tree dest, src, len, size;
9331 if (!validate_arglist (exp,
9332 POINTER_TYPE,
9333 fcode == BUILT_IN_MEMSET_CHK
9334 ? INTEGER_TYPE : POINTER_TYPE,
9335 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9336 return NULL_RTX;
9338 dest = CALL_EXPR_ARG (exp, 0);
9339 src = CALL_EXPR_ARG (exp, 1);
9340 len = CALL_EXPR_ARG (exp, 2);
9341 size = CALL_EXPR_ARG (exp, 3);
9343 if (! tree_fits_uhwi_p (size))
9344 return NULL_RTX;
9346 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
9348 tree fn;
9350 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
9352 warning_at (tree_nonartificial_location (exp),
9353 0, "%Kcall to %D will always overflow destination buffer",
9354 exp, get_callee_fndecl (exp));
9355 return NULL_RTX;
9358 fn = NULL_TREE;
9359 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9360 mem{cpy,pcpy,move,set} is available. */
9361 switch (fcode)
9363 case BUILT_IN_MEMCPY_CHK:
9364 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
9365 break;
9366 case BUILT_IN_MEMPCPY_CHK:
9367 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
9368 break;
9369 case BUILT_IN_MEMMOVE_CHK:
9370 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
9371 break;
9372 case BUILT_IN_MEMSET_CHK:
9373 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
9374 break;
9375 default:
9376 break;
9379 if (! fn)
9380 return NULL_RTX;
9382 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
9383 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9384 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9385 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9387 else if (fcode == BUILT_IN_MEMSET_CHK)
9388 return NULL_RTX;
9389 else
9391 unsigned int dest_align = get_pointer_alignment (dest);
9393 /* If DEST is not a pointer type, call the normal function. */
9394 if (dest_align == 0)
9395 return NULL_RTX;
9397 /* If SRC and DEST are the same (and not volatile), do nothing. */
9398 if (operand_equal_p (src, dest, 0))
9400 tree expr;
9402 if (fcode != BUILT_IN_MEMPCPY_CHK)
9404 /* Evaluate and ignore LEN in case it has side-effects. */
9405 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9406 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9409 expr = fold_build_pointer_plus (dest, len);
9410 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9413 /* __memmove_chk special case. */
9414 if (fcode == BUILT_IN_MEMMOVE_CHK)
9416 unsigned int src_align = get_pointer_alignment (src);
9418 if (src_align == 0)
9419 return NULL_RTX;
9421 /* If src is categorized for a readonly section we can use
9422 normal __memcpy_chk. */
9423 if (readonly_data_expr (src))
9425 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
9426 if (!fn)
9427 return NULL_RTX;
9428 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9429 dest, src, len, size);
9430 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9431 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9432 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9435 return NULL_RTX;
9439 /* Emit warning if a buffer overflow is detected at compile time. */
9441 static void
9442 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9444 int is_strlen = 0;
9445 tree len, size;
9446 location_t loc = tree_nonartificial_location (exp);
9448 switch (fcode)
9450 case BUILT_IN_STRCPY_CHK:
9451 case BUILT_IN_STPCPY_CHK:
9452 /* For __strcat_chk the warning will be emitted only if overflowing
9453 by at least strlen (dest) + 1 bytes. */
9454 case BUILT_IN_STRCAT_CHK:
9455 len = CALL_EXPR_ARG (exp, 1);
9456 size = CALL_EXPR_ARG (exp, 2);
9457 is_strlen = 1;
9458 break;
9459 case BUILT_IN_STRNCAT_CHK:
9460 case BUILT_IN_STRNCPY_CHK:
9461 case BUILT_IN_STPNCPY_CHK:
9462 len = CALL_EXPR_ARG (exp, 2);
9463 size = CALL_EXPR_ARG (exp, 3);
9464 break;
9465 case BUILT_IN_SNPRINTF_CHK:
9466 case BUILT_IN_VSNPRINTF_CHK:
9467 len = CALL_EXPR_ARG (exp, 1);
9468 size = CALL_EXPR_ARG (exp, 3);
9469 break;
9470 default:
9471 gcc_unreachable ();
9474 if (!len || !size)
9475 return;
9477 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9478 return;
9480 if (is_strlen)
9482 len = c_strlen (len, 1);
9483 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
9484 return;
9486 else if (fcode == BUILT_IN_STRNCAT_CHK)
9488 tree src = CALL_EXPR_ARG (exp, 1);
9489 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
9490 return;
9491 src = c_strlen (src, 1);
9492 if (! src || ! tree_fits_uhwi_p (src))
9494 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
9495 exp, get_callee_fndecl (exp));
9496 return;
9498 else if (tree_int_cst_lt (src, size))
9499 return;
9501 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
9502 return;
9504 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
9505 exp, get_callee_fndecl (exp));
9508 /* Emit warning if a buffer overflow is detected at compile time
9509 in __sprintf_chk/__vsprintf_chk calls. */
9511 static void
9512 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
9514 tree size, len, fmt;
9515 const char *fmt_str;
9516 int nargs = call_expr_nargs (exp);
9518 /* Verify the required arguments in the original call. */
9520 if (nargs < 4)
9521 return;
9522 size = CALL_EXPR_ARG (exp, 2);
9523 fmt = CALL_EXPR_ARG (exp, 3);
9525 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9526 return;
9528 /* Check whether the format is a literal string constant. */
9529 fmt_str = c_getstr (fmt);
9530 if (fmt_str == NULL)
9531 return;
9533 if (!init_target_chars ())
9534 return;
9536 /* If the format doesn't contain % args or %%, we know its size. */
9537 if (strchr (fmt_str, target_percent) == 0)
9538 len = build_int_cstu (size_type_node, strlen (fmt_str));
9539 /* If the format is "%s" and first ... argument is a string literal,
9540 we know it too. */
9541 else if (fcode == BUILT_IN_SPRINTF_CHK
9542 && strcmp (fmt_str, target_percent_s) == 0)
9544 tree arg;
9546 if (nargs < 5)
9547 return;
9548 arg = CALL_EXPR_ARG (exp, 4);
9549 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
9550 return;
9552 len = c_strlen (arg, 1);
9553 if (!len || ! tree_fits_uhwi_p (len))
9554 return;
9556 else
9557 return;
9559 if (! tree_int_cst_lt (len, size))
9560 warning_at (tree_nonartificial_location (exp),
9561 0, "%Kcall to %D will always overflow destination buffer",
9562 exp, get_callee_fndecl (exp));
9565 /* Emit warning if a free is called with address of a variable. */
9567 static void
9568 maybe_emit_free_warning (tree exp)
9570 tree arg = CALL_EXPR_ARG (exp, 0);
9572 STRIP_NOPS (arg);
9573 if (TREE_CODE (arg) != ADDR_EXPR)
9574 return;
9576 arg = get_base_address (TREE_OPERAND (arg, 0));
9577 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
9578 return;
9580 if (SSA_VAR_P (arg))
9581 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9582 "%Kattempt to free a non-heap object %qD", exp, arg);
9583 else
9584 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9585 "%Kattempt to free a non-heap object", exp);
9588 /* Fold a call to __builtin_object_size with arguments PTR and OST,
9589 if possible. */
9591 static tree
9592 fold_builtin_object_size (tree ptr, tree ost)
9594 unsigned HOST_WIDE_INT bytes;
9595 int object_size_type;
9597 if (!validate_arg (ptr, POINTER_TYPE)
9598 || !validate_arg (ost, INTEGER_TYPE))
9599 return NULL_TREE;
9601 STRIP_NOPS (ost);
9603 if (TREE_CODE (ost) != INTEGER_CST
9604 || tree_int_cst_sgn (ost) < 0
9605 || compare_tree_int (ost, 3) > 0)
9606 return NULL_TREE;
9608 object_size_type = tree_to_shwi (ost);
9610 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
9611 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
9612 and (size_t) 0 for types 2 and 3. */
9613 if (TREE_SIDE_EFFECTS (ptr))
9614 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
9616 if (TREE_CODE (ptr) == ADDR_EXPR)
9618 compute_builtin_object_size (ptr, object_size_type, &bytes);
9619 if (wi::fits_to_tree_p (bytes, size_type_node))
9620 return build_int_cstu (size_type_node, bytes);
9622 else if (TREE_CODE (ptr) == SSA_NAME)
9624 /* If object size is not known yet, delay folding until
9625 later. Maybe subsequent passes will help determining
9626 it. */
9627 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
9628 && wi::fits_to_tree_p (bytes, size_type_node))
9629 return build_int_cstu (size_type_node, bytes);
9632 return NULL_TREE;
9635 /* Builtins with folding operations that operate on "..." arguments
9636 need special handling; we need to store the arguments in a convenient
9637 data structure before attempting any folding. Fortunately there are
9638 only a few builtins that fall into this category. FNDECL is the
9639 function, EXP is the CALL_EXPR for the call. */
9641 static tree
9642 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
9644 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9645 tree ret = NULL_TREE;
9647 switch (fcode)
9649 case BUILT_IN_FPCLASSIFY:
9650 ret = fold_builtin_fpclassify (loc, args, nargs);
9651 break;
9653 default:
9654 break;
9656 if (ret)
9658 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9659 SET_EXPR_LOCATION (ret, loc);
9660 TREE_NO_WARNING (ret) = 1;
9661 return ret;
9663 return NULL_TREE;
9666 /* Initialize format string characters in the target charset. */
9668 bool
9669 init_target_chars (void)
9671 static bool init;
9672 if (!init)
9674 target_newline = lang_hooks.to_target_charset ('\n');
9675 target_percent = lang_hooks.to_target_charset ('%');
9676 target_c = lang_hooks.to_target_charset ('c');
9677 target_s = lang_hooks.to_target_charset ('s');
9678 if (target_newline == 0 || target_percent == 0 || target_c == 0
9679 || target_s == 0)
9680 return false;
9682 target_percent_c[0] = target_percent;
9683 target_percent_c[1] = target_c;
9684 target_percent_c[2] = '\0';
9686 target_percent_s[0] = target_percent;
9687 target_percent_s[1] = target_s;
9688 target_percent_s[2] = '\0';
9690 target_percent_s_newline[0] = target_percent;
9691 target_percent_s_newline[1] = target_s;
9692 target_percent_s_newline[2] = target_newline;
9693 target_percent_s_newline[3] = '\0';
9695 init = true;
9697 return true;
9700 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
9701 and no overflow/underflow occurred. INEXACT is true if M was not
9702 exactly calculated. TYPE is the tree type for the result. This
9703 function assumes that you cleared the MPFR flags and then
9704 calculated M to see if anything subsequently set a flag prior to
9705 entering this function. Return NULL_TREE if any checks fail. */
9707 static tree
9708 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
9710 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9711 overflow/underflow occurred. If -frounding-math, proceed iff the
9712 result of calling FUNC was exact. */
9713 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
9714 && (!flag_rounding_math || !inexact))
9716 REAL_VALUE_TYPE rr;
9718 real_from_mpfr (&rr, m, type, GMP_RNDN);
9719 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
9720 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9721 but the mpft_t is not, then we underflowed in the
9722 conversion. */
9723 if (real_isfinite (&rr)
9724 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
9726 REAL_VALUE_TYPE rmode;
9728 real_convert (&rmode, TYPE_MODE (type), &rr);
9729 /* Proceed iff the specified mode can hold the value. */
9730 if (real_identical (&rmode, &rr))
9731 return build_real (type, rmode);
9734 return NULL_TREE;
9737 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
9738 number and no overflow/underflow occurred. INEXACT is true if M
9739 was not exactly calculated. TYPE is the tree type for the result.
9740 This function assumes that you cleared the MPFR flags and then
9741 calculated M to see if anything subsequently set a flag prior to
9742 entering this function. Return NULL_TREE if any checks fail, if
9743 FORCE_CONVERT is true, then bypass the checks. */
9745 static tree
9746 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
9748 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9749 overflow/underflow occurred. If -frounding-math, proceed iff the
9750 result of calling FUNC was exact. */
9751 if (force_convert
9752 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
9753 && !mpfr_overflow_p () && !mpfr_underflow_p ()
9754 && (!flag_rounding_math || !inexact)))
9756 REAL_VALUE_TYPE re, im;
9758 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
9759 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
9760 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
9761 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9762 but the mpft_t is not, then we underflowed in the
9763 conversion. */
9764 if (force_convert
9765 || (real_isfinite (&re) && real_isfinite (&im)
9766 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
9767 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
9769 REAL_VALUE_TYPE re_mode, im_mode;
9771 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
9772 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
9773 /* Proceed iff the specified mode can hold the value. */
9774 if (force_convert
9775 || (real_identical (&re_mode, &re)
9776 && real_identical (&im_mode, &im)))
9777 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
9778 build_real (TREE_TYPE (type), im_mode));
9781 return NULL_TREE;
9784 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
9785 the pointer *(ARG_QUO) and return the result. The type is taken
9786 from the type of ARG0 and is used for setting the precision of the
9787 calculation and results. */
9789 static tree
9790 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
9792 tree const type = TREE_TYPE (arg0);
9793 tree result = NULL_TREE;
9795 STRIP_NOPS (arg0);
9796 STRIP_NOPS (arg1);
9798 /* To proceed, MPFR must exactly represent the target floating point
9799 format, which only happens when the target base equals two. */
9800 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9801 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9802 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
9804 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
9805 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
9807 if (real_isfinite (ra0) && real_isfinite (ra1))
9809 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9810 const int prec = fmt->p;
9811 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9812 tree result_rem;
9813 long integer_quo;
9814 mpfr_t m0, m1;
9816 mpfr_inits2 (prec, m0, m1, NULL);
9817 mpfr_from_real (m0, ra0, GMP_RNDN);
9818 mpfr_from_real (m1, ra1, GMP_RNDN);
9819 mpfr_clear_flags ();
9820 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
9821 /* Remquo is independent of the rounding mode, so pass
9822 inexact=0 to do_mpfr_ckconv(). */
9823 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
9824 mpfr_clears (m0, m1, NULL);
9825 if (result_rem)
9827 /* MPFR calculates quo in the host's long so it may
9828 return more bits in quo than the target int can hold
9829 if sizeof(host long) > sizeof(target int). This can
9830 happen even for native compilers in LP64 mode. In
9831 these cases, modulo the quo value with the largest
9832 number that the target int can hold while leaving one
9833 bit for the sign. */
9834 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
9835 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
9837 /* Dereference the quo pointer argument. */
9838 arg_quo = build_fold_indirect_ref (arg_quo);
9839 /* Proceed iff a valid pointer type was passed in. */
9840 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
9842 /* Set the value. */
9843 tree result_quo
9844 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
9845 build_int_cst (TREE_TYPE (arg_quo),
9846 integer_quo));
9847 TREE_SIDE_EFFECTS (result_quo) = 1;
9848 /* Combine the quo assignment with the rem. */
9849 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9850 result_quo, result_rem));
9855 return result;
9858 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
9859 resulting value as a tree with type TYPE. The mpfr precision is
9860 set to the precision of TYPE. We assume that this mpfr function
9861 returns zero if the result could be calculated exactly within the
9862 requested precision. In addition, the integer pointer represented
9863 by ARG_SG will be dereferenced and set to the appropriate signgam
9864 (-1,1) value. */
9866 static tree
9867 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
9869 tree result = NULL_TREE;
9871 STRIP_NOPS (arg);
9873 /* To proceed, MPFR must exactly represent the target floating point
9874 format, which only happens when the target base equals two. Also
9875 verify ARG is a constant and that ARG_SG is an int pointer. */
9876 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9877 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
9878 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
9879 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
9881 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
9883 /* In addition to NaN and Inf, the argument cannot be zero or a
9884 negative integer. */
9885 if (real_isfinite (ra)
9886 && ra->cl != rvc_zero
9887 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
9889 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9890 const int prec = fmt->p;
9891 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9892 int inexact, sg;
9893 mpfr_t m;
9894 tree result_lg;
9896 mpfr_init2 (m, prec);
9897 mpfr_from_real (m, ra, GMP_RNDN);
9898 mpfr_clear_flags ();
9899 inexact = mpfr_lgamma (m, &sg, m, rnd);
9900 result_lg = do_mpfr_ckconv (m, type, inexact);
9901 mpfr_clear (m);
9902 if (result_lg)
9904 tree result_sg;
9906 /* Dereference the arg_sg pointer argument. */
9907 arg_sg = build_fold_indirect_ref (arg_sg);
9908 /* Assign the signgam value into *arg_sg. */
9909 result_sg = fold_build2 (MODIFY_EXPR,
9910 TREE_TYPE (arg_sg), arg_sg,
9911 build_int_cst (TREE_TYPE (arg_sg), sg));
9912 TREE_SIDE_EFFECTS (result_sg) = 1;
9913 /* Combine the signgam assignment with the lgamma result. */
9914 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9915 result_sg, result_lg));
9920 return result;
9923 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
9924 mpc function FUNC on it and return the resulting value as a tree
9925 with type TYPE. The mpfr precision is set to the precision of
9926 TYPE. We assume that function FUNC returns zero if the result
9927 could be calculated exactly within the requested precision. If
9928 DO_NONFINITE is true, then fold expressions containing Inf or NaN
9929 in the arguments and/or results. */
9931 tree
9932 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
9933 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
9935 tree result = NULL_TREE;
9937 STRIP_NOPS (arg0);
9938 STRIP_NOPS (arg1);
9940 /* To proceed, MPFR must exactly represent the target floating point
9941 format, which only happens when the target base equals two. */
9942 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
9943 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
9944 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
9945 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
9946 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
9948 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
9949 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
9950 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
9951 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
9953 if (do_nonfinite
9954 || (real_isfinite (re0) && real_isfinite (im0)
9955 && real_isfinite (re1) && real_isfinite (im1)))
9957 const struct real_format *const fmt =
9958 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
9959 const int prec = fmt->p;
9960 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
9961 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
9962 int inexact;
9963 mpc_t m0, m1;
9965 mpc_init2 (m0, prec);
9966 mpc_init2 (m1, prec);
9967 mpfr_from_real (mpc_realref (m0), re0, rnd);
9968 mpfr_from_real (mpc_imagref (m0), im0, rnd);
9969 mpfr_from_real (mpc_realref (m1), re1, rnd);
9970 mpfr_from_real (mpc_imagref (m1), im1, rnd);
9971 mpfr_clear_flags ();
9972 inexact = func (m0, m0, m1, crnd);
9973 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
9974 mpc_clear (m0);
9975 mpc_clear (m1);
9979 return result;
9982 /* A wrapper function for builtin folding that prevents warnings for
9983 "statement without effect" and the like, caused by removing the
9984 call node earlier than the warning is generated. */
9986 tree
9987 fold_call_stmt (gcall *stmt, bool ignore)
9989 tree ret = NULL_TREE;
9990 tree fndecl = gimple_call_fndecl (stmt);
9991 location_t loc = gimple_location (stmt);
9992 if (fndecl
9993 && TREE_CODE (fndecl) == FUNCTION_DECL
9994 && DECL_BUILT_IN (fndecl)
9995 && !gimple_call_va_arg_pack_p (stmt))
9997 int nargs = gimple_call_num_args (stmt);
9998 tree *args = (nargs > 0
9999 ? gimple_call_arg_ptr (stmt, 0)
10000 : &error_mark_node);
10002 if (avoid_folding_inline_builtin (fndecl))
10003 return NULL_TREE;
10004 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10006 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10008 else
10010 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10011 if (ret)
10013 /* Propagate location information from original call to
10014 expansion of builtin. Otherwise things like
10015 maybe_emit_chk_warning, that operate on the expansion
10016 of a builtin, will use the wrong location information. */
10017 if (gimple_has_location (stmt))
10019 tree realret = ret;
10020 if (TREE_CODE (ret) == NOP_EXPR)
10021 realret = TREE_OPERAND (ret, 0);
10022 if (CAN_HAVE_LOCATION_P (realret)
10023 && !EXPR_HAS_LOCATION (realret))
10024 SET_EXPR_LOCATION (realret, loc);
10025 return realret;
10027 return ret;
10031 return NULL_TREE;
10034 /* Look up the function in builtin_decl that corresponds to DECL
10035 and set ASMSPEC as its user assembler name. DECL must be a
10036 function decl that declares a builtin. */
10038 void
10039 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10041 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10042 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10043 && asmspec != 0);
10045 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10046 set_user_assembler_name (builtin, asmspec);
10048 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10049 && INT_TYPE_SIZE < BITS_PER_WORD)
10051 set_user_assembler_libfunc ("ffs", asmspec);
10052 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE, MODE_INT, 0),
10053 "ffs");
10057 /* Return true if DECL is a builtin that expands to a constant or similarly
10058 simple code. */
10059 bool
10060 is_simple_builtin (tree decl)
10062 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10063 switch (DECL_FUNCTION_CODE (decl))
10065 /* Builtins that expand to constants. */
10066 case BUILT_IN_CONSTANT_P:
10067 case BUILT_IN_EXPECT:
10068 case BUILT_IN_OBJECT_SIZE:
10069 case BUILT_IN_UNREACHABLE:
10070 /* Simple register moves or loads from stack. */
10071 case BUILT_IN_ASSUME_ALIGNED:
10072 case BUILT_IN_RETURN_ADDRESS:
10073 case BUILT_IN_EXTRACT_RETURN_ADDR:
10074 case BUILT_IN_FROB_RETURN_ADDR:
10075 case BUILT_IN_RETURN:
10076 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10077 case BUILT_IN_FRAME_ADDRESS:
10078 case BUILT_IN_VA_END:
10079 case BUILT_IN_STACK_SAVE:
10080 case BUILT_IN_STACK_RESTORE:
10081 /* Exception state returns or moves registers around. */
10082 case BUILT_IN_EH_FILTER:
10083 case BUILT_IN_EH_POINTER:
10084 case BUILT_IN_EH_COPY_VALUES:
10085 return true;
10087 default:
10088 return false;
10091 return false;
10094 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10095 most probably expanded inline into reasonably simple code. This is a
10096 superset of is_simple_builtin. */
10097 bool
10098 is_inexpensive_builtin (tree decl)
10100 if (!decl)
10101 return false;
10102 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10103 return true;
10104 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10105 switch (DECL_FUNCTION_CODE (decl))
10107 case BUILT_IN_ABS:
10108 case BUILT_IN_ALLOCA:
10109 case BUILT_IN_ALLOCA_WITH_ALIGN:
10110 case BUILT_IN_BSWAP16:
10111 case BUILT_IN_BSWAP32:
10112 case BUILT_IN_BSWAP64:
10113 case BUILT_IN_CLZ:
10114 case BUILT_IN_CLZIMAX:
10115 case BUILT_IN_CLZL:
10116 case BUILT_IN_CLZLL:
10117 case BUILT_IN_CTZ:
10118 case BUILT_IN_CTZIMAX:
10119 case BUILT_IN_CTZL:
10120 case BUILT_IN_CTZLL:
10121 case BUILT_IN_FFS:
10122 case BUILT_IN_FFSIMAX:
10123 case BUILT_IN_FFSL:
10124 case BUILT_IN_FFSLL:
10125 case BUILT_IN_IMAXABS:
10126 case BUILT_IN_FINITE:
10127 case BUILT_IN_FINITEF:
10128 case BUILT_IN_FINITEL:
10129 case BUILT_IN_FINITED32:
10130 case BUILT_IN_FINITED64:
10131 case BUILT_IN_FINITED128:
10132 case BUILT_IN_FPCLASSIFY:
10133 case BUILT_IN_ISFINITE:
10134 case BUILT_IN_ISINF_SIGN:
10135 case BUILT_IN_ISINF:
10136 case BUILT_IN_ISINFF:
10137 case BUILT_IN_ISINFL:
10138 case BUILT_IN_ISINFD32:
10139 case BUILT_IN_ISINFD64:
10140 case BUILT_IN_ISINFD128:
10141 case BUILT_IN_ISNAN:
10142 case BUILT_IN_ISNANF:
10143 case BUILT_IN_ISNANL:
10144 case BUILT_IN_ISNAND32:
10145 case BUILT_IN_ISNAND64:
10146 case BUILT_IN_ISNAND128:
10147 case BUILT_IN_ISNORMAL:
10148 case BUILT_IN_ISGREATER:
10149 case BUILT_IN_ISGREATEREQUAL:
10150 case BUILT_IN_ISLESS:
10151 case BUILT_IN_ISLESSEQUAL:
10152 case BUILT_IN_ISLESSGREATER:
10153 case BUILT_IN_ISUNORDERED:
10154 case BUILT_IN_VA_ARG_PACK:
10155 case BUILT_IN_VA_ARG_PACK_LEN:
10156 case BUILT_IN_VA_COPY:
10157 case BUILT_IN_TRAP:
10158 case BUILT_IN_SAVEREGS:
10159 case BUILT_IN_POPCOUNTL:
10160 case BUILT_IN_POPCOUNTLL:
10161 case BUILT_IN_POPCOUNTIMAX:
10162 case BUILT_IN_POPCOUNT:
10163 case BUILT_IN_PARITYL:
10164 case BUILT_IN_PARITYLL:
10165 case BUILT_IN_PARITYIMAX:
10166 case BUILT_IN_PARITY:
10167 case BUILT_IN_LABS:
10168 case BUILT_IN_LLABS:
10169 case BUILT_IN_PREFETCH:
10170 case BUILT_IN_ACC_ON_DEVICE:
10171 return true;
10173 default:
10174 return is_simple_builtin (decl);
10177 return false;