2016-09-10 Bernd Edlinger <bernd.edlinger@hotmail.de>
[official-gcc.git] / gcc / builtins.c
blob4a2a398744b8b006f6dac5dd179f307fcad3e0ef
1 /* Expand builtin functions.
2 Copyright (C) 1988-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "predict.h"
33 #include "tm_p.h"
34 #include "stringpool.h"
35 #include "tree-vrp.h"
36 #include "tree-ssanames.h"
37 #include "expmed.h"
38 #include "optabs.h"
39 #include "emit-rtl.h"
40 #include "recog.h"
41 #include "diagnostic-core.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "fold-const-call.h"
45 #include "stor-layout.h"
46 #include "calls.h"
47 #include "varasm.h"
48 #include "tree-object-size.h"
49 #include "realmpfr.h"
50 #include "cfgrtl.h"
51 #include "except.h"
52 #include "dojump.h"
53 #include "explow.h"
54 #include "stmt.h"
55 #include "expr.h"
56 #include "libfuncs.h"
57 #include "output.h"
58 #include "typeclass.h"
59 #include "langhooks.h"
60 #include "value-prof.h"
61 #include "builtins.h"
62 #include "asan.h"
63 #include "cilk.h"
64 #include "tree-chkp.h"
65 #include "rtl-chkp.h"
66 #include "internal-fn.h"
67 #include "case-cfn-macros.h"
68 #include "gimple-fold.h"
71 struct target_builtins default_target_builtins;
72 #if SWITCHABLE_TARGET
73 struct target_builtins *this_target_builtins = &default_target_builtins;
74 #endif
76 /* Define the names of the builtin function types and codes. */
77 const char *const built_in_class_names[BUILT_IN_LAST]
78 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
80 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
81 const char * built_in_names[(int) END_BUILTINS] =
83 #include "builtins.def"
86 /* Setup an array of builtin_info_type, make sure each element decl is
87 initialized to NULL_TREE. */
88 builtin_info_type builtin_info[(int)END_BUILTINS];
90 /* Non-zero if __builtin_constant_p should be folded right away. */
91 bool force_folding_builtin_constant_p;
93 static rtx c_readstr (const char *, machine_mode);
94 static int target_char_cast (tree, char *);
95 static rtx get_memory_rtx (tree, tree);
96 static int apply_args_size (void);
97 static int apply_result_size (void);
98 static rtx result_vector (int, rtx);
99 static void expand_builtin_prefetch (tree);
100 static rtx expand_builtin_apply_args (void);
101 static rtx expand_builtin_apply_args_1 (void);
102 static rtx expand_builtin_apply (rtx, rtx, rtx);
103 static void expand_builtin_return (rtx);
104 static enum type_class type_to_class (tree);
105 static rtx expand_builtin_classify_type (tree);
106 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
107 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
108 static rtx expand_builtin_interclass_mathfn (tree, rtx);
109 static rtx expand_builtin_sincos (tree);
110 static rtx expand_builtin_cexpi (tree, rtx);
111 static rtx expand_builtin_int_roundingfn (tree, rtx);
112 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
113 static rtx expand_builtin_next_arg (void);
114 static rtx expand_builtin_va_start (tree);
115 static rtx expand_builtin_va_end (tree);
116 static rtx expand_builtin_va_copy (tree);
117 static rtx expand_builtin_strcmp (tree, rtx);
118 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
119 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
120 static rtx expand_builtin_memcpy (tree, rtx);
121 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
122 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
123 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
124 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
125 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
126 machine_mode, int, tree);
127 static rtx expand_builtin_strcpy (tree, rtx);
128 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
129 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
130 static rtx expand_builtin_strncpy (tree, rtx);
131 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
132 static rtx expand_builtin_memset (tree, rtx, machine_mode);
133 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
134 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
135 static rtx expand_builtin_bzero (tree);
136 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
137 static rtx expand_builtin_alloca (tree, bool);
138 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
139 static rtx expand_builtin_frame_address (tree, tree);
140 static tree stabilize_va_list_loc (location_t, tree, int);
141 static rtx expand_builtin_expect (tree, rtx);
142 static tree fold_builtin_constant_p (tree);
143 static tree fold_builtin_classify_type (tree);
144 static tree fold_builtin_strlen (location_t, tree, tree);
145 static tree fold_builtin_inf (location_t, tree, int);
146 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
147 static bool validate_arg (const_tree, enum tree_code code);
148 static rtx expand_builtin_fabs (tree, rtx, rtx);
149 static rtx expand_builtin_signbit (tree, rtx);
150 static tree fold_builtin_strchr (location_t, tree, tree, tree);
151 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
152 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
153 static tree fold_builtin_strcmp (location_t, tree, tree);
154 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
155 static tree fold_builtin_isascii (location_t, tree);
156 static tree fold_builtin_toascii (location_t, tree);
157 static tree fold_builtin_isdigit (location_t, tree);
158 static tree fold_builtin_fabs (location_t, tree, tree);
159 static tree fold_builtin_abs (location_t, tree, tree);
160 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
161 enum tree_code);
162 static tree fold_builtin_0 (location_t, tree);
163 static tree fold_builtin_1 (location_t, tree, tree);
164 static tree fold_builtin_2 (location_t, tree, tree, tree);
165 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
166 static tree fold_builtin_varargs (location_t, tree, tree*, int);
168 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
169 static tree fold_builtin_strstr (location_t, tree, tree, tree);
170 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
171 static tree fold_builtin_strspn (location_t, tree, tree);
172 static tree fold_builtin_strcspn (location_t, tree, tree);
174 static rtx expand_builtin_object_size (tree);
175 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
176 enum built_in_function);
177 static void maybe_emit_chk_warning (tree, enum built_in_function);
178 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
179 static void maybe_emit_free_warning (tree);
180 static tree fold_builtin_object_size (tree, tree);
182 unsigned HOST_WIDE_INT target_newline;
183 unsigned HOST_WIDE_INT target_percent;
184 static unsigned HOST_WIDE_INT target_c;
185 static unsigned HOST_WIDE_INT target_s;
186 char target_percent_c[3];
187 char target_percent_s[3];
188 char target_percent_s_newline[4];
189 static tree do_mpfr_remquo (tree, tree, tree);
190 static tree do_mpfr_lgamma_r (tree, tree, tree);
191 static void expand_builtin_sync_synchronize (void);
193 /* Return true if NAME starts with __builtin_ or __sync_. */
195 static bool
196 is_builtin_name (const char *name)
198 if (strncmp (name, "__builtin_", 10) == 0)
199 return true;
200 if (strncmp (name, "__sync_", 7) == 0)
201 return true;
202 if (strncmp (name, "__atomic_", 9) == 0)
203 return true;
204 if (flag_cilkplus
205 && (!strcmp (name, "__cilkrts_detach")
206 || !strcmp (name, "__cilkrts_pop_frame")))
207 return true;
208 return false;
212 /* Return true if DECL is a function symbol representing a built-in. */
214 bool
215 is_builtin_fn (tree decl)
217 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
220 /* Return true if NODE should be considered for inline expansion regardless
221 of the optimization level. This means whenever a function is invoked with
222 its "internal" name, which normally contains the prefix "__builtin". */
224 bool
225 called_as_built_in (tree node)
227 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
228 we want the name used to call the function, not the name it
229 will have. */
230 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
231 return is_builtin_name (name);
234 /* Compute values M and N such that M divides (address of EXP - N) and such
235 that N < M. If these numbers can be determined, store M in alignp and N in
236 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
237 *alignp and any bit-offset to *bitposp.
239 Note that the address (and thus the alignment) computed here is based
240 on the address to which a symbol resolves, whereas DECL_ALIGN is based
241 on the address at which an object is actually located. These two
242 addresses are not always the same. For example, on ARM targets,
243 the address &foo of a Thumb function foo() has the lowest bit set,
244 whereas foo() itself starts on an even address.
246 If ADDR_P is true we are taking the address of the memory reference EXP
247 and thus cannot rely on the access taking place. */
249 static bool
250 get_object_alignment_2 (tree exp, unsigned int *alignp,
251 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
253 HOST_WIDE_INT bitsize, bitpos;
254 tree offset;
255 machine_mode mode;
256 int unsignedp, reversep, volatilep;
257 unsigned int align = BITS_PER_UNIT;
258 bool known_alignment = false;
260 /* Get the innermost object and the constant (bitpos) and possibly
261 variable (offset) offset of the access. */
262 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
263 &unsignedp, &reversep, &volatilep);
265 /* Extract alignment information from the innermost object and
266 possibly adjust bitpos and offset. */
267 if (TREE_CODE (exp) == FUNCTION_DECL)
269 /* Function addresses can encode extra information besides their
270 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
271 allows the low bit to be used as a virtual bit, we know
272 that the address itself must be at least 2-byte aligned. */
273 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
274 align = 2 * BITS_PER_UNIT;
276 else if (TREE_CODE (exp) == LABEL_DECL)
278 else if (TREE_CODE (exp) == CONST_DECL)
280 /* The alignment of a CONST_DECL is determined by its initializer. */
281 exp = DECL_INITIAL (exp);
282 align = TYPE_ALIGN (TREE_TYPE (exp));
283 if (CONSTANT_CLASS_P (exp))
284 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
286 known_alignment = true;
288 else if (DECL_P (exp))
290 align = DECL_ALIGN (exp);
291 known_alignment = true;
293 else if (TREE_CODE (exp) == INDIRECT_REF
294 || TREE_CODE (exp) == MEM_REF
295 || TREE_CODE (exp) == TARGET_MEM_REF)
297 tree addr = TREE_OPERAND (exp, 0);
298 unsigned ptr_align;
299 unsigned HOST_WIDE_INT ptr_bitpos;
300 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
302 /* If the address is explicitely aligned, handle that. */
303 if (TREE_CODE (addr) == BIT_AND_EXPR
304 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
306 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
307 ptr_bitmask *= BITS_PER_UNIT;
308 align = ptr_bitmask & -ptr_bitmask;
309 addr = TREE_OPERAND (addr, 0);
312 known_alignment
313 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
314 align = MAX (ptr_align, align);
316 /* Re-apply explicit alignment to the bitpos. */
317 ptr_bitpos &= ptr_bitmask;
319 /* The alignment of the pointer operand in a TARGET_MEM_REF
320 has to take the variable offset parts into account. */
321 if (TREE_CODE (exp) == TARGET_MEM_REF)
323 if (TMR_INDEX (exp))
325 unsigned HOST_WIDE_INT step = 1;
326 if (TMR_STEP (exp))
327 step = TREE_INT_CST_LOW (TMR_STEP (exp));
328 align = MIN (align, (step & -step) * BITS_PER_UNIT);
330 if (TMR_INDEX2 (exp))
331 align = BITS_PER_UNIT;
332 known_alignment = false;
335 /* When EXP is an actual memory reference then we can use
336 TYPE_ALIGN of a pointer indirection to derive alignment.
337 Do so only if get_pointer_alignment_1 did not reveal absolute
338 alignment knowledge and if using that alignment would
339 improve the situation. */
340 if (!addr_p && !known_alignment
341 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
342 align = TYPE_ALIGN (TREE_TYPE (exp));
343 else
345 /* Else adjust bitpos accordingly. */
346 bitpos += ptr_bitpos;
347 if (TREE_CODE (exp) == MEM_REF
348 || TREE_CODE (exp) == TARGET_MEM_REF)
349 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
352 else if (TREE_CODE (exp) == STRING_CST)
354 /* STRING_CST are the only constant objects we allow to be not
355 wrapped inside a CONST_DECL. */
356 align = TYPE_ALIGN (TREE_TYPE (exp));
357 if (CONSTANT_CLASS_P (exp))
358 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
360 known_alignment = true;
363 /* If there is a non-constant offset part extract the maximum
364 alignment that can prevail. */
365 if (offset)
367 unsigned int trailing_zeros = tree_ctz (offset);
368 if (trailing_zeros < HOST_BITS_PER_INT)
370 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
371 if (inner)
372 align = MIN (align, inner);
376 *alignp = align;
377 *bitposp = bitpos & (*alignp - 1);
378 return known_alignment;
381 /* For a memory reference expression EXP compute values M and N such that M
382 divides (&EXP - N) and such that N < M. If these numbers can be determined,
383 store M in alignp and N in *BITPOSP and return true. Otherwise return false
384 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
386 bool
387 get_object_alignment_1 (tree exp, unsigned int *alignp,
388 unsigned HOST_WIDE_INT *bitposp)
390 return get_object_alignment_2 (exp, alignp, bitposp, false);
393 /* Return the alignment in bits of EXP, an object. */
395 unsigned int
396 get_object_alignment (tree exp)
398 unsigned HOST_WIDE_INT bitpos = 0;
399 unsigned int align;
401 get_object_alignment_1 (exp, &align, &bitpos);
403 /* align and bitpos now specify known low bits of the pointer.
404 ptr & (align - 1) == bitpos. */
406 if (bitpos != 0)
407 align = (bitpos & -bitpos);
408 return align;
411 /* For a pointer valued expression EXP compute values M and N such that M
412 divides (EXP - N) and such that N < M. If these numbers can be determined,
413 store M in alignp and N in *BITPOSP and return true. Return false if
414 the results are just a conservative approximation.
416 If EXP is not a pointer, false is returned too. */
418 bool
419 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
420 unsigned HOST_WIDE_INT *bitposp)
422 STRIP_NOPS (exp);
424 if (TREE_CODE (exp) == ADDR_EXPR)
425 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
426 alignp, bitposp, true);
427 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
429 unsigned int align;
430 unsigned HOST_WIDE_INT bitpos;
431 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
432 &align, &bitpos);
433 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
434 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
435 else
437 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
438 if (trailing_zeros < HOST_BITS_PER_INT)
440 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
441 if (inner)
442 align = MIN (align, inner);
445 *alignp = align;
446 *bitposp = bitpos & (align - 1);
447 return res;
449 else if (TREE_CODE (exp) == SSA_NAME
450 && POINTER_TYPE_P (TREE_TYPE (exp)))
452 unsigned int ptr_align, ptr_misalign;
453 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
455 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
457 *bitposp = ptr_misalign * BITS_PER_UNIT;
458 *alignp = ptr_align * BITS_PER_UNIT;
459 /* Make sure to return a sensible alignment when the multiplication
460 by BITS_PER_UNIT overflowed. */
461 if (*alignp == 0)
462 *alignp = 1u << (HOST_BITS_PER_INT - 1);
463 /* We cannot really tell whether this result is an approximation. */
464 return false;
466 else
468 *bitposp = 0;
469 *alignp = BITS_PER_UNIT;
470 return false;
473 else if (TREE_CODE (exp) == INTEGER_CST)
475 *alignp = BIGGEST_ALIGNMENT;
476 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
477 & (BIGGEST_ALIGNMENT - 1));
478 return true;
481 *bitposp = 0;
482 *alignp = BITS_PER_UNIT;
483 return false;
486 /* Return the alignment in bits of EXP, a pointer valued expression.
487 The alignment returned is, by default, the alignment of the thing that
488 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
490 Otherwise, look at the expression to see if we can do better, i.e., if the
491 expression is actually pointing at an object whose alignment is tighter. */
493 unsigned int
494 get_pointer_alignment (tree exp)
496 unsigned HOST_WIDE_INT bitpos = 0;
497 unsigned int align;
499 get_pointer_alignment_1 (exp, &align, &bitpos);
501 /* align and bitpos now specify known low bits of the pointer.
502 ptr & (align - 1) == bitpos. */
504 if (bitpos != 0)
505 align = (bitpos & -bitpos);
507 return align;
510 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
511 way, because it could contain a zero byte in the middle.
512 TREE_STRING_LENGTH is the size of the character array, not the string.
514 ONLY_VALUE should be nonzero if the result is not going to be emitted
515 into the instruction stream and zero if it is going to be expanded.
516 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
517 is returned, otherwise NULL, since
518 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
519 evaluate the side-effects.
521 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
522 accesses. Note that this implies the result is not going to be emitted
523 into the instruction stream.
525 The value returned is of type `ssizetype'.
527 Unfortunately, string_constant can't access the values of const char
528 arrays with initializers, so neither can we do so here. */
530 tree
531 c_strlen (tree src, int only_value)
533 tree offset_node;
534 HOST_WIDE_INT offset;
535 int max;
536 const char *ptr;
537 location_t loc;
539 STRIP_NOPS (src);
540 if (TREE_CODE (src) == COND_EXPR
541 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
543 tree len1, len2;
545 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
546 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
547 if (tree_int_cst_equal (len1, len2))
548 return len1;
551 if (TREE_CODE (src) == COMPOUND_EXPR
552 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
553 return c_strlen (TREE_OPERAND (src, 1), only_value);
555 loc = EXPR_LOC_OR_LOC (src, input_location);
557 src = string_constant (src, &offset_node);
558 if (src == 0)
559 return NULL_TREE;
561 max = TREE_STRING_LENGTH (src) - 1;
562 ptr = TREE_STRING_POINTER (src);
564 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
566 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
567 compute the offset to the following null if we don't know where to
568 start searching for it. */
569 int i;
571 for (i = 0; i < max; i++)
572 if (ptr[i] == 0)
573 return NULL_TREE;
575 /* We don't know the starting offset, but we do know that the string
576 has no internal zero bytes. We can assume that the offset falls
577 within the bounds of the string; otherwise, the programmer deserves
578 what he gets. Subtract the offset from the length of the string,
579 and return that. This would perhaps not be valid if we were dealing
580 with named arrays in addition to literal string constants. */
582 return size_diffop_loc (loc, size_int (max), offset_node);
585 /* We have a known offset into the string. Start searching there for
586 a null character if we can represent it as a single HOST_WIDE_INT. */
587 if (offset_node == 0)
588 offset = 0;
589 else if (! tree_fits_shwi_p (offset_node))
590 offset = -1;
591 else
592 offset = tree_to_shwi (offset_node);
594 /* If the offset is known to be out of bounds, warn, and call strlen at
595 runtime. */
596 if (offset < 0 || offset > max)
598 /* Suppress multiple warnings for propagated constant strings. */
599 if (only_value != 2
600 && !TREE_NO_WARNING (src))
602 warning_at (loc, 0, "offset outside bounds of constant string");
603 TREE_NO_WARNING (src) = 1;
605 return NULL_TREE;
608 /* Use strlen to search for the first zero byte. Since any strings
609 constructed with build_string will have nulls appended, we win even
610 if we get handed something like (char[4])"abcd".
612 Since OFFSET is our starting index into the string, no further
613 calculation is needed. */
614 return ssize_int (strlen (ptr + offset));
617 /* Return a constant integer corresponding to target reading
618 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
620 static rtx
621 c_readstr (const char *str, machine_mode mode)
623 HOST_WIDE_INT ch;
624 unsigned int i, j;
625 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
627 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
628 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
629 / HOST_BITS_PER_WIDE_INT;
631 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
632 for (i = 0; i < len; i++)
633 tmp[i] = 0;
635 ch = 1;
636 for (i = 0; i < GET_MODE_SIZE (mode); i++)
638 j = i;
639 if (WORDS_BIG_ENDIAN)
640 j = GET_MODE_SIZE (mode) - i - 1;
641 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
642 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
643 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
644 j *= BITS_PER_UNIT;
646 if (ch)
647 ch = (unsigned char) str[i];
648 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
651 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
652 return immed_wide_int_const (c, mode);
655 /* Cast a target constant CST to target CHAR and if that value fits into
656 host char type, return zero and put that value into variable pointed to by
657 P. */
659 static int
660 target_char_cast (tree cst, char *p)
662 unsigned HOST_WIDE_INT val, hostval;
664 if (TREE_CODE (cst) != INTEGER_CST
665 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
666 return 1;
668 /* Do not care if it fits or not right here. */
669 val = TREE_INT_CST_LOW (cst);
671 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
672 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
674 hostval = val;
675 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
676 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
678 if (val != hostval)
679 return 1;
681 *p = hostval;
682 return 0;
685 /* Similar to save_expr, but assumes that arbitrary code is not executed
686 in between the multiple evaluations. In particular, we assume that a
687 non-addressable local variable will not be modified. */
689 static tree
690 builtin_save_expr (tree exp)
692 if (TREE_CODE (exp) == SSA_NAME
693 || (TREE_ADDRESSABLE (exp) == 0
694 && (TREE_CODE (exp) == PARM_DECL
695 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
696 return exp;
698 return save_expr (exp);
701 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
702 times to get the address of either a higher stack frame, or a return
703 address located within it (depending on FNDECL_CODE). */
705 static rtx
706 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
708 int i;
709 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
710 if (tem == NULL_RTX)
712 /* For a zero count with __builtin_return_address, we don't care what
713 frame address we return, because target-specific definitions will
714 override us. Therefore frame pointer elimination is OK, and using
715 the soft frame pointer is OK.
717 For a nonzero count, or a zero count with __builtin_frame_address,
718 we require a stable offset from the current frame pointer to the
719 previous one, so we must use the hard frame pointer, and
720 we must disable frame pointer elimination. */
721 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
722 tem = frame_pointer_rtx;
723 else
725 tem = hard_frame_pointer_rtx;
727 /* Tell reload not to eliminate the frame pointer. */
728 crtl->accesses_prior_frames = 1;
732 if (count > 0)
733 SETUP_FRAME_ADDRESSES ();
735 /* On the SPARC, the return address is not in the frame, it is in a
736 register. There is no way to access it off of the current frame
737 pointer, but it can be accessed off the previous frame pointer by
738 reading the value from the register window save area. */
739 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
740 count--;
742 /* Scan back COUNT frames to the specified frame. */
743 for (i = 0; i < count; i++)
745 /* Assume the dynamic chain pointer is in the word that the
746 frame address points to, unless otherwise specified. */
747 tem = DYNAMIC_CHAIN_ADDRESS (tem);
748 tem = memory_address (Pmode, tem);
749 tem = gen_frame_mem (Pmode, tem);
750 tem = copy_to_reg (tem);
753 /* For __builtin_frame_address, return what we've got. But, on
754 the SPARC for example, we may have to add a bias. */
755 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
756 return FRAME_ADDR_RTX (tem);
758 /* For __builtin_return_address, get the return address from that frame. */
759 #ifdef RETURN_ADDR_RTX
760 tem = RETURN_ADDR_RTX (count, tem);
761 #else
762 tem = memory_address (Pmode,
763 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
764 tem = gen_frame_mem (Pmode, tem);
765 #endif
766 return tem;
769 /* Alias set used for setjmp buffer. */
770 static alias_set_type setjmp_alias_set = -1;
772 /* Construct the leading half of a __builtin_setjmp call. Control will
773 return to RECEIVER_LABEL. This is also called directly by the SJLJ
774 exception handling code. */
776 void
777 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
779 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
780 rtx stack_save;
781 rtx mem;
783 if (setjmp_alias_set == -1)
784 setjmp_alias_set = new_alias_set ();
786 buf_addr = convert_memory_address (Pmode, buf_addr);
788 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
790 /* We store the frame pointer and the address of receiver_label in
791 the buffer and use the rest of it for the stack save area, which
792 is machine-dependent. */
794 mem = gen_rtx_MEM (Pmode, buf_addr);
795 set_mem_alias_set (mem, setjmp_alias_set);
796 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
798 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
799 GET_MODE_SIZE (Pmode))),
800 set_mem_alias_set (mem, setjmp_alias_set);
802 emit_move_insn (validize_mem (mem),
803 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
805 stack_save = gen_rtx_MEM (sa_mode,
806 plus_constant (Pmode, buf_addr,
807 2 * GET_MODE_SIZE (Pmode)));
808 set_mem_alias_set (stack_save, setjmp_alias_set);
809 emit_stack_save (SAVE_NONLOCAL, &stack_save);
811 /* If there is further processing to do, do it. */
812 if (targetm.have_builtin_setjmp_setup ())
813 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
815 /* We have a nonlocal label. */
816 cfun->has_nonlocal_label = 1;
819 /* Construct the trailing part of a __builtin_setjmp call. This is
820 also called directly by the SJLJ exception handling code.
821 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
823 void
824 expand_builtin_setjmp_receiver (rtx receiver_label)
826 rtx chain;
828 /* Mark the FP as used when we get here, so we have to make sure it's
829 marked as used by this function. */
830 emit_use (hard_frame_pointer_rtx);
832 /* Mark the static chain as clobbered here so life information
833 doesn't get messed up for it. */
834 chain = targetm.calls.static_chain (current_function_decl, true);
835 if (chain && REG_P (chain))
836 emit_clobber (chain);
838 /* Now put in the code to restore the frame pointer, and argument
839 pointer, if needed. */
840 if (! targetm.have_nonlocal_goto ())
842 /* First adjust our frame pointer to its actual value. It was
843 previously set to the start of the virtual area corresponding to
844 the stacked variables when we branched here and now needs to be
845 adjusted to the actual hardware fp value.
847 Assignments to virtual registers are converted by
848 instantiate_virtual_regs into the corresponding assignment
849 to the underlying register (fp in this case) that makes
850 the original assignment true.
851 So the following insn will actually be decrementing fp by
852 STARTING_FRAME_OFFSET. */
853 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
855 /* Restoring the frame pointer also modifies the hard frame pointer.
856 Mark it used (so that the previous assignment remains live once
857 the frame pointer is eliminated) and clobbered (to represent the
858 implicit update from the assignment). */
859 emit_use (hard_frame_pointer_rtx);
860 emit_clobber (hard_frame_pointer_rtx);
863 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
865 /* If the argument pointer can be eliminated in favor of the
866 frame pointer, we don't need to restore it. We assume here
867 that if such an elimination is present, it can always be used.
868 This is the case on all known machines; if we don't make this
869 assumption, we do unnecessary saving on many machines. */
870 size_t i;
871 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
873 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
874 if (elim_regs[i].from == ARG_POINTER_REGNUM
875 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
876 break;
878 if (i == ARRAY_SIZE (elim_regs))
880 /* Now restore our arg pointer from the address at which it
881 was saved in our stack frame. */
882 emit_move_insn (crtl->args.internal_arg_pointer,
883 copy_to_reg (get_arg_pointer_save_area ()));
887 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
888 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
889 else if (targetm.have_nonlocal_goto_receiver ())
890 emit_insn (targetm.gen_nonlocal_goto_receiver ());
891 else
892 { /* Nothing */ }
894 /* We must not allow the code we just generated to be reordered by
895 scheduling. Specifically, the update of the frame pointer must
896 happen immediately, not later. */
897 emit_insn (gen_blockage ());
900 /* __builtin_longjmp is passed a pointer to an array of five words (not
901 all will be used on all machines). It operates similarly to the C
902 library function of the same name, but is more efficient. Much of
903 the code below is copied from the handling of non-local gotos. */
905 static void
906 expand_builtin_longjmp (rtx buf_addr, rtx value)
908 rtx fp, lab, stack;
909 rtx_insn *insn, *last;
910 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
912 /* DRAP is needed for stack realign if longjmp is expanded to current
913 function */
914 if (SUPPORTS_STACK_ALIGNMENT)
915 crtl->need_drap = true;
917 if (setjmp_alias_set == -1)
918 setjmp_alias_set = new_alias_set ();
920 buf_addr = convert_memory_address (Pmode, buf_addr);
922 buf_addr = force_reg (Pmode, buf_addr);
924 /* We require that the user must pass a second argument of 1, because
925 that is what builtin_setjmp will return. */
926 gcc_assert (value == const1_rtx);
928 last = get_last_insn ();
929 if (targetm.have_builtin_longjmp ())
930 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
931 else
933 fp = gen_rtx_MEM (Pmode, buf_addr);
934 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
935 GET_MODE_SIZE (Pmode)));
937 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
938 2 * GET_MODE_SIZE (Pmode)));
939 set_mem_alias_set (fp, setjmp_alias_set);
940 set_mem_alias_set (lab, setjmp_alias_set);
941 set_mem_alias_set (stack, setjmp_alias_set);
943 /* Pick up FP, label, and SP from the block and jump. This code is
944 from expand_goto in stmt.c; see there for detailed comments. */
945 if (targetm.have_nonlocal_goto ())
946 /* We have to pass a value to the nonlocal_goto pattern that will
947 get copied into the static_chain pointer, but it does not matter
948 what that value is, because builtin_setjmp does not use it. */
949 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
950 else
952 lab = copy_to_reg (lab);
954 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
955 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
957 emit_move_insn (hard_frame_pointer_rtx, fp);
958 emit_stack_restore (SAVE_NONLOCAL, stack);
960 emit_use (hard_frame_pointer_rtx);
961 emit_use (stack_pointer_rtx);
962 emit_indirect_jump (lab);
966 /* Search backwards and mark the jump insn as a non-local goto.
967 Note that this precludes the use of __builtin_longjmp to a
968 __builtin_setjmp target in the same function. However, we've
969 already cautioned the user that these functions are for
970 internal exception handling use only. */
971 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
973 gcc_assert (insn != last);
975 if (JUMP_P (insn))
977 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
978 break;
980 else if (CALL_P (insn))
981 break;
985 static inline bool
986 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
988 return (iter->i < iter->n);
991 /* This function validates the types of a function call argument list
992 against a specified list of tree_codes. If the last specifier is a 0,
993 that represents an ellipses, otherwise the last specifier must be a
994 VOID_TYPE. */
996 static bool
997 validate_arglist (const_tree callexpr, ...)
999 enum tree_code code;
1000 bool res = 0;
1001 va_list ap;
1002 const_call_expr_arg_iterator iter;
1003 const_tree arg;
1005 va_start (ap, callexpr);
1006 init_const_call_expr_arg_iterator (callexpr, &iter);
1010 code = (enum tree_code) va_arg (ap, int);
1011 switch (code)
1013 case 0:
1014 /* This signifies an ellipses, any further arguments are all ok. */
1015 res = true;
1016 goto end;
1017 case VOID_TYPE:
1018 /* This signifies an endlink, if no arguments remain, return
1019 true, otherwise return false. */
1020 res = !more_const_call_expr_args_p (&iter);
1021 goto end;
1022 default:
1023 /* If no parameters remain or the parameter's code does not
1024 match the specified code, return false. Otherwise continue
1025 checking any remaining arguments. */
1026 arg = next_const_call_expr_arg (&iter);
1027 if (!validate_arg (arg, code))
1028 goto end;
1029 break;
1032 while (1);
1034 /* We need gotos here since we can only have one VA_CLOSE in a
1035 function. */
1036 end: ;
1037 va_end (ap);
1039 return res;
1042 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1043 and the address of the save area. */
1045 static rtx
1046 expand_builtin_nonlocal_goto (tree exp)
1048 tree t_label, t_save_area;
1049 rtx r_label, r_save_area, r_fp, r_sp;
1050 rtx_insn *insn;
1052 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1053 return NULL_RTX;
1055 t_label = CALL_EXPR_ARG (exp, 0);
1056 t_save_area = CALL_EXPR_ARG (exp, 1);
1058 r_label = expand_normal (t_label);
1059 r_label = convert_memory_address (Pmode, r_label);
1060 r_save_area = expand_normal (t_save_area);
1061 r_save_area = convert_memory_address (Pmode, r_save_area);
1062 /* Copy the address of the save location to a register just in case it was
1063 based on the frame pointer. */
1064 r_save_area = copy_to_reg (r_save_area);
1065 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1066 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1067 plus_constant (Pmode, r_save_area,
1068 GET_MODE_SIZE (Pmode)));
1070 crtl->has_nonlocal_goto = 1;
1072 /* ??? We no longer need to pass the static chain value, afaik. */
1073 if (targetm.have_nonlocal_goto ())
1074 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1075 else
1077 r_label = copy_to_reg (r_label);
1079 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1080 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1082 /* Restore frame pointer for containing function. */
1083 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1084 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1086 /* USE of hard_frame_pointer_rtx added for consistency;
1087 not clear if really needed. */
1088 emit_use (hard_frame_pointer_rtx);
1089 emit_use (stack_pointer_rtx);
1091 /* If the architecture is using a GP register, we must
1092 conservatively assume that the target function makes use of it.
1093 The prologue of functions with nonlocal gotos must therefore
1094 initialize the GP register to the appropriate value, and we
1095 must then make sure that this value is live at the point
1096 of the jump. (Note that this doesn't necessarily apply
1097 to targets with a nonlocal_goto pattern; they are free
1098 to implement it in their own way. Note also that this is
1099 a no-op if the GP register is a global invariant.) */
1100 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1101 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1102 emit_use (pic_offset_table_rtx);
1104 emit_indirect_jump (r_label);
1107 /* Search backwards to the jump insn and mark it as a
1108 non-local goto. */
1109 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1111 if (JUMP_P (insn))
1113 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1114 break;
1116 else if (CALL_P (insn))
1117 break;
1120 return const0_rtx;
1123 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1124 (not all will be used on all machines) that was passed to __builtin_setjmp.
1125 It updates the stack pointer in that block to the current value. This is
1126 also called directly by the SJLJ exception handling code. */
1128 void
1129 expand_builtin_update_setjmp_buf (rtx buf_addr)
1131 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1132 rtx stack_save
1133 = gen_rtx_MEM (sa_mode,
1134 memory_address
1135 (sa_mode,
1136 plus_constant (Pmode, buf_addr,
1137 2 * GET_MODE_SIZE (Pmode))));
1139 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1142 /* Expand a call to __builtin_prefetch. For a target that does not support
1143 data prefetch, evaluate the memory address argument in case it has side
1144 effects. */
1146 static void
1147 expand_builtin_prefetch (tree exp)
1149 tree arg0, arg1, arg2;
1150 int nargs;
1151 rtx op0, op1, op2;
1153 if (!validate_arglist (exp, POINTER_TYPE, 0))
1154 return;
1156 arg0 = CALL_EXPR_ARG (exp, 0);
1158 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1159 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1160 locality). */
1161 nargs = call_expr_nargs (exp);
1162 if (nargs > 1)
1163 arg1 = CALL_EXPR_ARG (exp, 1);
1164 else
1165 arg1 = integer_zero_node;
1166 if (nargs > 2)
1167 arg2 = CALL_EXPR_ARG (exp, 2);
1168 else
1169 arg2 = integer_three_node;
1171 /* Argument 0 is an address. */
1172 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1174 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1175 if (TREE_CODE (arg1) != INTEGER_CST)
1177 error ("second argument to %<__builtin_prefetch%> must be a constant");
1178 arg1 = integer_zero_node;
1180 op1 = expand_normal (arg1);
1181 /* Argument 1 must be either zero or one. */
1182 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1184 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1185 " using zero");
1186 op1 = const0_rtx;
1189 /* Argument 2 (locality) must be a compile-time constant int. */
1190 if (TREE_CODE (arg2) != INTEGER_CST)
1192 error ("third argument to %<__builtin_prefetch%> must be a constant");
1193 arg2 = integer_zero_node;
1195 op2 = expand_normal (arg2);
1196 /* Argument 2 must be 0, 1, 2, or 3. */
1197 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1199 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1200 op2 = const0_rtx;
1203 if (targetm.have_prefetch ())
1205 struct expand_operand ops[3];
1207 create_address_operand (&ops[0], op0);
1208 create_integer_operand (&ops[1], INTVAL (op1));
1209 create_integer_operand (&ops[2], INTVAL (op2));
1210 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1211 return;
1214 /* Don't do anything with direct references to volatile memory, but
1215 generate code to handle other side effects. */
1216 if (!MEM_P (op0) && side_effects_p (op0))
1217 emit_insn (op0);
1220 /* Get a MEM rtx for expression EXP which is the address of an operand
1221 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1222 the maximum length of the block of memory that might be accessed or
1223 NULL if unknown. */
1225 static rtx
1226 get_memory_rtx (tree exp, tree len)
1228 tree orig_exp = exp;
1229 rtx addr, mem;
1231 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1232 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1233 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1234 exp = TREE_OPERAND (exp, 0);
1236 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1237 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1239 /* Get an expression we can use to find the attributes to assign to MEM.
1240 First remove any nops. */
1241 while (CONVERT_EXPR_P (exp)
1242 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1243 exp = TREE_OPERAND (exp, 0);
1245 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1246 (as builtin stringops may alias with anything). */
1247 exp = fold_build2 (MEM_REF,
1248 build_array_type (char_type_node,
1249 build_range_type (sizetype,
1250 size_one_node, len)),
1251 exp, build_int_cst (ptr_type_node, 0));
1253 /* If the MEM_REF has no acceptable address, try to get the base object
1254 from the original address we got, and build an all-aliasing
1255 unknown-sized access to that one. */
1256 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1257 set_mem_attributes (mem, exp, 0);
1258 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1259 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1260 0))))
1262 exp = build_fold_addr_expr (exp);
1263 exp = fold_build2 (MEM_REF,
1264 build_array_type (char_type_node,
1265 build_range_type (sizetype,
1266 size_zero_node,
1267 NULL)),
1268 exp, build_int_cst (ptr_type_node, 0));
1269 set_mem_attributes (mem, exp, 0);
1271 set_mem_alias_set (mem, 0);
1272 return mem;
1275 /* Built-in functions to perform an untyped call and return. */
1277 #define apply_args_mode \
1278 (this_target_builtins->x_apply_args_mode)
1279 #define apply_result_mode \
1280 (this_target_builtins->x_apply_result_mode)
1282 /* Return the size required for the block returned by __builtin_apply_args,
1283 and initialize apply_args_mode. */
1285 static int
1286 apply_args_size (void)
1288 static int size = -1;
1289 int align;
1290 unsigned int regno;
1291 machine_mode mode;
1293 /* The values computed by this function never change. */
1294 if (size < 0)
1296 /* The first value is the incoming arg-pointer. */
1297 size = GET_MODE_SIZE (Pmode);
1299 /* The second value is the structure value address unless this is
1300 passed as an "invisible" first argument. */
1301 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1302 size += GET_MODE_SIZE (Pmode);
1304 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1305 if (FUNCTION_ARG_REGNO_P (regno))
1307 mode = targetm.calls.get_raw_arg_mode (regno);
1309 gcc_assert (mode != VOIDmode);
1311 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1312 if (size % align != 0)
1313 size = CEIL (size, align) * align;
1314 size += GET_MODE_SIZE (mode);
1315 apply_args_mode[regno] = mode;
1317 else
1319 apply_args_mode[regno] = VOIDmode;
1322 return size;
1325 /* Return the size required for the block returned by __builtin_apply,
1326 and initialize apply_result_mode. */
1328 static int
1329 apply_result_size (void)
1331 static int size = -1;
1332 int align, regno;
1333 machine_mode mode;
1335 /* The values computed by this function never change. */
1336 if (size < 0)
1338 size = 0;
1340 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1341 if (targetm.calls.function_value_regno_p (regno))
1343 mode = targetm.calls.get_raw_result_mode (regno);
1345 gcc_assert (mode != VOIDmode);
1347 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1348 if (size % align != 0)
1349 size = CEIL (size, align) * align;
1350 size += GET_MODE_SIZE (mode);
1351 apply_result_mode[regno] = mode;
1353 else
1354 apply_result_mode[regno] = VOIDmode;
1356 /* Allow targets that use untyped_call and untyped_return to override
1357 the size so that machine-specific information can be stored here. */
1358 #ifdef APPLY_RESULT_SIZE
1359 size = APPLY_RESULT_SIZE;
1360 #endif
1362 return size;
1365 /* Create a vector describing the result block RESULT. If SAVEP is true,
1366 the result block is used to save the values; otherwise it is used to
1367 restore the values. */
1369 static rtx
1370 result_vector (int savep, rtx result)
1372 int regno, size, align, nelts;
1373 machine_mode mode;
1374 rtx reg, mem;
1375 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1377 size = nelts = 0;
1378 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1379 if ((mode = apply_result_mode[regno]) != VOIDmode)
1381 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1382 if (size % align != 0)
1383 size = CEIL (size, align) * align;
1384 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1385 mem = adjust_address (result, mode, size);
1386 savevec[nelts++] = (savep
1387 ? gen_rtx_SET (mem, reg)
1388 : gen_rtx_SET (reg, mem));
1389 size += GET_MODE_SIZE (mode);
1391 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1394 /* Save the state required to perform an untyped call with the same
1395 arguments as were passed to the current function. */
1397 static rtx
1398 expand_builtin_apply_args_1 (void)
1400 rtx registers, tem;
1401 int size, align, regno;
1402 machine_mode mode;
1403 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1405 /* Create a block where the arg-pointer, structure value address,
1406 and argument registers can be saved. */
1407 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1409 /* Walk past the arg-pointer and structure value address. */
1410 size = GET_MODE_SIZE (Pmode);
1411 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1412 size += GET_MODE_SIZE (Pmode);
1414 /* Save each register used in calling a function to the block. */
1415 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1416 if ((mode = apply_args_mode[regno]) != VOIDmode)
1418 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1419 if (size % align != 0)
1420 size = CEIL (size, align) * align;
1422 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1424 emit_move_insn (adjust_address (registers, mode, size), tem);
1425 size += GET_MODE_SIZE (mode);
1428 /* Save the arg pointer to the block. */
1429 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1430 /* We need the pointer as the caller actually passed them to us, not
1431 as we might have pretended they were passed. Make sure it's a valid
1432 operand, as emit_move_insn isn't expected to handle a PLUS. */
1433 if (STACK_GROWS_DOWNWARD)
1435 = force_operand (plus_constant (Pmode, tem,
1436 crtl->args.pretend_args_size),
1437 NULL_RTX);
1438 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1440 size = GET_MODE_SIZE (Pmode);
1442 /* Save the structure value address unless this is passed as an
1443 "invisible" first argument. */
1444 if (struct_incoming_value)
1446 emit_move_insn (adjust_address (registers, Pmode, size),
1447 copy_to_reg (struct_incoming_value));
1448 size += GET_MODE_SIZE (Pmode);
1451 /* Return the address of the block. */
1452 return copy_addr_to_reg (XEXP (registers, 0));
1455 /* __builtin_apply_args returns block of memory allocated on
1456 the stack into which is stored the arg pointer, structure
1457 value address, static chain, and all the registers that might
1458 possibly be used in performing a function call. The code is
1459 moved to the start of the function so the incoming values are
1460 saved. */
1462 static rtx
1463 expand_builtin_apply_args (void)
1465 /* Don't do __builtin_apply_args more than once in a function.
1466 Save the result of the first call and reuse it. */
1467 if (apply_args_value != 0)
1468 return apply_args_value;
1470 /* When this function is called, it means that registers must be
1471 saved on entry to this function. So we migrate the
1472 call to the first insn of this function. */
1473 rtx temp;
1475 start_sequence ();
1476 temp = expand_builtin_apply_args_1 ();
1477 rtx_insn *seq = get_insns ();
1478 end_sequence ();
1480 apply_args_value = temp;
1482 /* Put the insns after the NOTE that starts the function.
1483 If this is inside a start_sequence, make the outer-level insn
1484 chain current, so the code is placed at the start of the
1485 function. If internal_arg_pointer is a non-virtual pseudo,
1486 it needs to be placed after the function that initializes
1487 that pseudo. */
1488 push_topmost_sequence ();
1489 if (REG_P (crtl->args.internal_arg_pointer)
1490 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1491 emit_insn_before (seq, parm_birth_insn);
1492 else
1493 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1494 pop_topmost_sequence ();
1495 return temp;
1499 /* Perform an untyped call and save the state required to perform an
1500 untyped return of whatever value was returned by the given function. */
1502 static rtx
1503 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1505 int size, align, regno;
1506 machine_mode mode;
1507 rtx incoming_args, result, reg, dest, src;
1508 rtx_call_insn *call_insn;
1509 rtx old_stack_level = 0;
1510 rtx call_fusage = 0;
1511 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1513 arguments = convert_memory_address (Pmode, arguments);
1515 /* Create a block where the return registers can be saved. */
1516 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1518 /* Fetch the arg pointer from the ARGUMENTS block. */
1519 incoming_args = gen_reg_rtx (Pmode);
1520 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1521 if (!STACK_GROWS_DOWNWARD)
1522 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1523 incoming_args, 0, OPTAB_LIB_WIDEN);
1525 /* Push a new argument block and copy the arguments. Do not allow
1526 the (potential) memcpy call below to interfere with our stack
1527 manipulations. */
1528 do_pending_stack_adjust ();
1529 NO_DEFER_POP;
1531 /* Save the stack with nonlocal if available. */
1532 if (targetm.have_save_stack_nonlocal ())
1533 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1534 else
1535 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1537 /* Allocate a block of memory onto the stack and copy the memory
1538 arguments to the outgoing arguments address. We can pass TRUE
1539 as the 4th argument because we just saved the stack pointer
1540 and will restore it right after the call. */
1541 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1543 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1544 may have already set current_function_calls_alloca to true.
1545 current_function_calls_alloca won't be set if argsize is zero,
1546 so we have to guarantee need_drap is true here. */
1547 if (SUPPORTS_STACK_ALIGNMENT)
1548 crtl->need_drap = true;
1550 dest = virtual_outgoing_args_rtx;
1551 if (!STACK_GROWS_DOWNWARD)
1553 if (CONST_INT_P (argsize))
1554 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1555 else
1556 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1558 dest = gen_rtx_MEM (BLKmode, dest);
1559 set_mem_align (dest, PARM_BOUNDARY);
1560 src = gen_rtx_MEM (BLKmode, incoming_args);
1561 set_mem_align (src, PARM_BOUNDARY);
1562 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1564 /* Refer to the argument block. */
1565 apply_args_size ();
1566 arguments = gen_rtx_MEM (BLKmode, arguments);
1567 set_mem_align (arguments, PARM_BOUNDARY);
1569 /* Walk past the arg-pointer and structure value address. */
1570 size = GET_MODE_SIZE (Pmode);
1571 if (struct_value)
1572 size += GET_MODE_SIZE (Pmode);
1574 /* Restore each of the registers previously saved. Make USE insns
1575 for each of these registers for use in making the call. */
1576 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1577 if ((mode = apply_args_mode[regno]) != VOIDmode)
1579 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1580 if (size % align != 0)
1581 size = CEIL (size, align) * align;
1582 reg = gen_rtx_REG (mode, regno);
1583 emit_move_insn (reg, adjust_address (arguments, mode, size));
1584 use_reg (&call_fusage, reg);
1585 size += GET_MODE_SIZE (mode);
1588 /* Restore the structure value address unless this is passed as an
1589 "invisible" first argument. */
1590 size = GET_MODE_SIZE (Pmode);
1591 if (struct_value)
1593 rtx value = gen_reg_rtx (Pmode);
1594 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1595 emit_move_insn (struct_value, value);
1596 if (REG_P (struct_value))
1597 use_reg (&call_fusage, struct_value);
1598 size += GET_MODE_SIZE (Pmode);
1601 /* All arguments and registers used for the call are set up by now! */
1602 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1604 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1605 and we don't want to load it into a register as an optimization,
1606 because prepare_call_address already did it if it should be done. */
1607 if (GET_CODE (function) != SYMBOL_REF)
1608 function = memory_address (FUNCTION_MODE, function);
1610 /* Generate the actual call instruction and save the return value. */
1611 if (targetm.have_untyped_call ())
1613 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1614 emit_call_insn (targetm.gen_untyped_call (mem, result,
1615 result_vector (1, result)));
1617 else if (targetm.have_call_value ())
1619 rtx valreg = 0;
1621 /* Locate the unique return register. It is not possible to
1622 express a call that sets more than one return register using
1623 call_value; use untyped_call for that. In fact, untyped_call
1624 only needs to save the return registers in the given block. */
1625 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1626 if ((mode = apply_result_mode[regno]) != VOIDmode)
1628 gcc_assert (!valreg); /* have_untyped_call required. */
1630 valreg = gen_rtx_REG (mode, regno);
1633 emit_insn (targetm.gen_call_value (valreg,
1634 gen_rtx_MEM (FUNCTION_MODE, function),
1635 const0_rtx, NULL_RTX, const0_rtx));
1637 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1639 else
1640 gcc_unreachable ();
1642 /* Find the CALL insn we just emitted, and attach the register usage
1643 information. */
1644 call_insn = last_call_insn ();
1645 add_function_usage_to (call_insn, call_fusage);
1647 /* Restore the stack. */
1648 if (targetm.have_save_stack_nonlocal ())
1649 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1650 else
1651 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1652 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1654 OK_DEFER_POP;
1656 /* Return the address of the result block. */
1657 result = copy_addr_to_reg (XEXP (result, 0));
1658 return convert_memory_address (ptr_mode, result);
1661 /* Perform an untyped return. */
1663 static void
1664 expand_builtin_return (rtx result)
1666 int size, align, regno;
1667 machine_mode mode;
1668 rtx reg;
1669 rtx_insn *call_fusage = 0;
1671 result = convert_memory_address (Pmode, result);
1673 apply_result_size ();
1674 result = gen_rtx_MEM (BLKmode, result);
1676 if (targetm.have_untyped_return ())
1678 rtx vector = result_vector (0, result);
1679 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1680 emit_barrier ();
1681 return;
1684 /* Restore the return value and note that each value is used. */
1685 size = 0;
1686 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1687 if ((mode = apply_result_mode[regno]) != VOIDmode)
1689 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1690 if (size % align != 0)
1691 size = CEIL (size, align) * align;
1692 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1693 emit_move_insn (reg, adjust_address (result, mode, size));
1695 push_to_sequence (call_fusage);
1696 emit_use (reg);
1697 call_fusage = get_insns ();
1698 end_sequence ();
1699 size += GET_MODE_SIZE (mode);
1702 /* Put the USE insns before the return. */
1703 emit_insn (call_fusage);
1705 /* Return whatever values was restored by jumping directly to the end
1706 of the function. */
1707 expand_naked_return ();
1710 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1712 static enum type_class
1713 type_to_class (tree type)
1715 switch (TREE_CODE (type))
1717 case VOID_TYPE: return void_type_class;
1718 case INTEGER_TYPE: return integer_type_class;
1719 case ENUMERAL_TYPE: return enumeral_type_class;
1720 case BOOLEAN_TYPE: return boolean_type_class;
1721 case POINTER_TYPE: return pointer_type_class;
1722 case REFERENCE_TYPE: return reference_type_class;
1723 case OFFSET_TYPE: return offset_type_class;
1724 case REAL_TYPE: return real_type_class;
1725 case COMPLEX_TYPE: return complex_type_class;
1726 case FUNCTION_TYPE: return function_type_class;
1727 case METHOD_TYPE: return method_type_class;
1728 case RECORD_TYPE: return record_type_class;
1729 case UNION_TYPE:
1730 case QUAL_UNION_TYPE: return union_type_class;
1731 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1732 ? string_type_class : array_type_class);
1733 case LANG_TYPE: return lang_type_class;
1734 default: return no_type_class;
1738 /* Expand a call EXP to __builtin_classify_type. */
1740 static rtx
1741 expand_builtin_classify_type (tree exp)
1743 if (call_expr_nargs (exp))
1744 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1745 return GEN_INT (no_type_class);
1748 /* This helper macro, meant to be used in mathfn_built_in below,
1749 determines which among a set of three builtin math functions is
1750 appropriate for a given type mode. The `F' and `L' cases are
1751 automatically generated from the `double' case. */
1752 #define CASE_MATHFN(MATHFN) \
1753 CASE_CFN_##MATHFN: \
1754 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1755 fcodel = BUILT_IN_##MATHFN##L ; break;
1756 /* Similar to above, but appends _R after any F/L suffix. */
1757 #define CASE_MATHFN_REENT(MATHFN) \
1758 case CFN_BUILT_IN_##MATHFN##_R: \
1759 case CFN_BUILT_IN_##MATHFN##F_R: \
1760 case CFN_BUILT_IN_##MATHFN##L_R: \
1761 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1762 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1764 /* Return a function equivalent to FN but operating on floating-point
1765 values of type TYPE, or END_BUILTINS if no such function exists.
1766 This is purely an operation on function codes; it does not guarantee
1767 that the target actually has an implementation of the function. */
1769 static built_in_function
1770 mathfn_built_in_2 (tree type, combined_fn fn)
1772 built_in_function fcode, fcodef, fcodel;
1774 switch (fn)
1776 CASE_MATHFN (ACOS)
1777 CASE_MATHFN (ACOSH)
1778 CASE_MATHFN (ASIN)
1779 CASE_MATHFN (ASINH)
1780 CASE_MATHFN (ATAN)
1781 CASE_MATHFN (ATAN2)
1782 CASE_MATHFN (ATANH)
1783 CASE_MATHFN (CBRT)
1784 CASE_MATHFN (CEIL)
1785 CASE_MATHFN (CEXPI)
1786 CASE_MATHFN (COPYSIGN)
1787 CASE_MATHFN (COS)
1788 CASE_MATHFN (COSH)
1789 CASE_MATHFN (DREM)
1790 CASE_MATHFN (ERF)
1791 CASE_MATHFN (ERFC)
1792 CASE_MATHFN (EXP)
1793 CASE_MATHFN (EXP10)
1794 CASE_MATHFN (EXP2)
1795 CASE_MATHFN (EXPM1)
1796 CASE_MATHFN (FABS)
1797 CASE_MATHFN (FDIM)
1798 CASE_MATHFN (FLOOR)
1799 CASE_MATHFN (FMA)
1800 CASE_MATHFN (FMAX)
1801 CASE_MATHFN (FMIN)
1802 CASE_MATHFN (FMOD)
1803 CASE_MATHFN (FREXP)
1804 CASE_MATHFN (GAMMA)
1805 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1806 CASE_MATHFN (HUGE_VAL)
1807 CASE_MATHFN (HYPOT)
1808 CASE_MATHFN (ILOGB)
1809 CASE_MATHFN (ICEIL)
1810 CASE_MATHFN (IFLOOR)
1811 CASE_MATHFN (INF)
1812 CASE_MATHFN (IRINT)
1813 CASE_MATHFN (IROUND)
1814 CASE_MATHFN (ISINF)
1815 CASE_MATHFN (J0)
1816 CASE_MATHFN (J1)
1817 CASE_MATHFN (JN)
1818 CASE_MATHFN (LCEIL)
1819 CASE_MATHFN (LDEXP)
1820 CASE_MATHFN (LFLOOR)
1821 CASE_MATHFN (LGAMMA)
1822 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1823 CASE_MATHFN (LLCEIL)
1824 CASE_MATHFN (LLFLOOR)
1825 CASE_MATHFN (LLRINT)
1826 CASE_MATHFN (LLROUND)
1827 CASE_MATHFN (LOG)
1828 CASE_MATHFN (LOG10)
1829 CASE_MATHFN (LOG1P)
1830 CASE_MATHFN (LOG2)
1831 CASE_MATHFN (LOGB)
1832 CASE_MATHFN (LRINT)
1833 CASE_MATHFN (LROUND)
1834 CASE_MATHFN (MODF)
1835 CASE_MATHFN (NAN)
1836 CASE_MATHFN (NANS)
1837 CASE_MATHFN (NEARBYINT)
1838 CASE_MATHFN (NEXTAFTER)
1839 CASE_MATHFN (NEXTTOWARD)
1840 CASE_MATHFN (POW)
1841 CASE_MATHFN (POWI)
1842 CASE_MATHFN (POW10)
1843 CASE_MATHFN (REMAINDER)
1844 CASE_MATHFN (REMQUO)
1845 CASE_MATHFN (RINT)
1846 CASE_MATHFN (ROUND)
1847 CASE_MATHFN (SCALB)
1848 CASE_MATHFN (SCALBLN)
1849 CASE_MATHFN (SCALBN)
1850 CASE_MATHFN (SIGNBIT)
1851 CASE_MATHFN (SIGNIFICAND)
1852 CASE_MATHFN (SIN)
1853 CASE_MATHFN (SINCOS)
1854 CASE_MATHFN (SINH)
1855 CASE_MATHFN (SQRT)
1856 CASE_MATHFN (TAN)
1857 CASE_MATHFN (TANH)
1858 CASE_MATHFN (TGAMMA)
1859 CASE_MATHFN (TRUNC)
1860 CASE_MATHFN (Y0)
1861 CASE_MATHFN (Y1)
1862 CASE_MATHFN (YN)
1864 default:
1865 return END_BUILTINS;
1868 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1869 return fcode;
1870 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1871 return fcodef;
1872 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1873 return fcodel;
1874 else
1875 return END_BUILTINS;
1878 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1879 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1880 otherwise use the explicit declaration. If we can't do the conversion,
1881 return null. */
1883 static tree
1884 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
1886 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1887 if (fcode2 == END_BUILTINS)
1888 return NULL_TREE;
1890 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1891 return NULL_TREE;
1893 return builtin_decl_explicit (fcode2);
1896 /* Like mathfn_built_in_1, but always use the implicit array. */
1898 tree
1899 mathfn_built_in (tree type, combined_fn fn)
1901 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1904 /* Like mathfn_built_in_1, but take a built_in_function and
1905 always use the implicit array. */
1907 tree
1908 mathfn_built_in (tree type, enum built_in_function fn)
1910 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
1913 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
1914 return its code, otherwise return IFN_LAST. Note that this function
1915 only tests whether the function is defined in internals.def, not whether
1916 it is actually available on the target. */
1918 internal_fn
1919 associated_internal_fn (tree fndecl)
1921 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
1922 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
1923 switch (DECL_FUNCTION_CODE (fndecl))
1925 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
1926 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1927 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
1928 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1929 #include "internal-fn.def"
1931 CASE_FLT_FN (BUILT_IN_POW10):
1932 return IFN_EXP10;
1934 CASE_FLT_FN (BUILT_IN_DREM):
1935 return IFN_REMAINDER;
1937 CASE_FLT_FN (BUILT_IN_SCALBN):
1938 CASE_FLT_FN (BUILT_IN_SCALBLN):
1939 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
1940 return IFN_LDEXP;
1941 return IFN_LAST;
1943 default:
1944 return IFN_LAST;
1948 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
1949 on the current target by a call to an internal function, return the
1950 code of that internal function, otherwise return IFN_LAST. The caller
1951 is responsible for ensuring that any side-effects of the built-in
1952 call are dealt with correctly. E.g. if CALL sets errno, the caller
1953 must decide that the errno result isn't needed or make it available
1954 in some other way. */
1956 internal_fn
1957 replacement_internal_fn (gcall *call)
1959 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1961 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
1962 if (ifn != IFN_LAST)
1964 tree_pair types = direct_internal_fn_types (ifn, call);
1965 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
1966 if (direct_internal_fn_supported_p (ifn, types, opt_type))
1967 return ifn;
1970 return IFN_LAST;
1973 /* Expand a call to the builtin trinary math functions (fma).
1974 Return NULL_RTX if a normal call should be emitted rather than expanding the
1975 function in-line. EXP is the expression that is a call to the builtin
1976 function; if convenient, the result should be placed in TARGET.
1977 SUBTARGET may be used as the target for computing one of EXP's
1978 operands. */
1980 static rtx
1981 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
1983 optab builtin_optab;
1984 rtx op0, op1, op2, result;
1985 rtx_insn *insns;
1986 tree fndecl = get_callee_fndecl (exp);
1987 tree arg0, arg1, arg2;
1988 machine_mode mode;
1990 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
1991 return NULL_RTX;
1993 arg0 = CALL_EXPR_ARG (exp, 0);
1994 arg1 = CALL_EXPR_ARG (exp, 1);
1995 arg2 = CALL_EXPR_ARG (exp, 2);
1997 switch (DECL_FUNCTION_CODE (fndecl))
1999 CASE_FLT_FN (BUILT_IN_FMA):
2000 builtin_optab = fma_optab; break;
2001 default:
2002 gcc_unreachable ();
2005 /* Make a suitable register to place result in. */
2006 mode = TYPE_MODE (TREE_TYPE (exp));
2008 /* Before working hard, check whether the instruction is available. */
2009 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2010 return NULL_RTX;
2012 result = gen_reg_rtx (mode);
2014 /* Always stabilize the argument list. */
2015 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2016 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2017 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2019 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2020 op1 = expand_normal (arg1);
2021 op2 = expand_normal (arg2);
2023 start_sequence ();
2025 /* Compute into RESULT.
2026 Set RESULT to wherever the result comes back. */
2027 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2028 result, 0);
2030 /* If we were unable to expand via the builtin, stop the sequence
2031 (without outputting the insns) and call to the library function
2032 with the stabilized argument list. */
2033 if (result == 0)
2035 end_sequence ();
2036 return expand_call (exp, target, target == const0_rtx);
2039 /* Output the entire sequence. */
2040 insns = get_insns ();
2041 end_sequence ();
2042 emit_insn (insns);
2044 return result;
2047 /* Expand a call to the builtin sin and cos math functions.
2048 Return NULL_RTX if a normal call should be emitted rather than expanding the
2049 function in-line. EXP is the expression that is a call to the builtin
2050 function; if convenient, the result should be placed in TARGET.
2051 SUBTARGET may be used as the target for computing one of EXP's
2052 operands. */
2054 static rtx
2055 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2057 optab builtin_optab;
2058 rtx op0;
2059 rtx_insn *insns;
2060 tree fndecl = get_callee_fndecl (exp);
2061 machine_mode mode;
2062 tree arg;
2064 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2065 return NULL_RTX;
2067 arg = CALL_EXPR_ARG (exp, 0);
2069 switch (DECL_FUNCTION_CODE (fndecl))
2071 CASE_FLT_FN (BUILT_IN_SIN):
2072 CASE_FLT_FN (BUILT_IN_COS):
2073 builtin_optab = sincos_optab; break;
2074 default:
2075 gcc_unreachable ();
2078 /* Make a suitable register to place result in. */
2079 mode = TYPE_MODE (TREE_TYPE (exp));
2081 /* Check if sincos insn is available, otherwise fallback
2082 to sin or cos insn. */
2083 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2084 switch (DECL_FUNCTION_CODE (fndecl))
2086 CASE_FLT_FN (BUILT_IN_SIN):
2087 builtin_optab = sin_optab; break;
2088 CASE_FLT_FN (BUILT_IN_COS):
2089 builtin_optab = cos_optab; break;
2090 default:
2091 gcc_unreachable ();
2094 /* Before working hard, check whether the instruction is available. */
2095 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2097 rtx result = gen_reg_rtx (mode);
2099 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2100 need to expand the argument again. This way, we will not perform
2101 side-effects more the once. */
2102 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2104 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2106 start_sequence ();
2108 /* Compute into RESULT.
2109 Set RESULT to wherever the result comes back. */
2110 if (builtin_optab == sincos_optab)
2112 int ok;
2114 switch (DECL_FUNCTION_CODE (fndecl))
2116 CASE_FLT_FN (BUILT_IN_SIN):
2117 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2118 break;
2119 CASE_FLT_FN (BUILT_IN_COS):
2120 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2121 break;
2122 default:
2123 gcc_unreachable ();
2125 gcc_assert (ok);
2127 else
2128 result = expand_unop (mode, builtin_optab, op0, result, 0);
2130 if (result != 0)
2132 /* Output the entire sequence. */
2133 insns = get_insns ();
2134 end_sequence ();
2135 emit_insn (insns);
2136 return result;
2139 /* If we were unable to expand via the builtin, stop the sequence
2140 (without outputting the insns) and call to the library function
2141 with the stabilized argument list. */
2142 end_sequence ();
2145 return expand_call (exp, target, target == const0_rtx);
2148 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2149 return an RTL instruction code that implements the functionality.
2150 If that isn't possible or available return CODE_FOR_nothing. */
2152 static enum insn_code
2153 interclass_mathfn_icode (tree arg, tree fndecl)
2155 bool errno_set = false;
2156 optab builtin_optab = unknown_optab;
2157 machine_mode mode;
2159 switch (DECL_FUNCTION_CODE (fndecl))
2161 CASE_FLT_FN (BUILT_IN_ILOGB):
2162 errno_set = true; builtin_optab = ilogb_optab; break;
2163 CASE_FLT_FN (BUILT_IN_ISINF):
2164 builtin_optab = isinf_optab; break;
2165 case BUILT_IN_ISNORMAL:
2166 case BUILT_IN_ISFINITE:
2167 CASE_FLT_FN (BUILT_IN_FINITE):
2168 case BUILT_IN_FINITED32:
2169 case BUILT_IN_FINITED64:
2170 case BUILT_IN_FINITED128:
2171 case BUILT_IN_ISINFD32:
2172 case BUILT_IN_ISINFD64:
2173 case BUILT_IN_ISINFD128:
2174 /* These builtins have no optabs (yet). */
2175 break;
2176 default:
2177 gcc_unreachable ();
2180 /* There's no easy way to detect the case we need to set EDOM. */
2181 if (flag_errno_math && errno_set)
2182 return CODE_FOR_nothing;
2184 /* Optab mode depends on the mode of the input argument. */
2185 mode = TYPE_MODE (TREE_TYPE (arg));
2187 if (builtin_optab)
2188 return optab_handler (builtin_optab, mode);
2189 return CODE_FOR_nothing;
2192 /* Expand a call to one of the builtin math functions that operate on
2193 floating point argument and output an integer result (ilogb, isinf,
2194 isnan, etc).
2195 Return 0 if a normal call should be emitted rather than expanding the
2196 function in-line. EXP is the expression that is a call to the builtin
2197 function; if convenient, the result should be placed in TARGET. */
2199 static rtx
2200 expand_builtin_interclass_mathfn (tree exp, rtx target)
2202 enum insn_code icode = CODE_FOR_nothing;
2203 rtx op0;
2204 tree fndecl = get_callee_fndecl (exp);
2205 machine_mode mode;
2206 tree arg;
2208 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2209 return NULL_RTX;
2211 arg = CALL_EXPR_ARG (exp, 0);
2212 icode = interclass_mathfn_icode (arg, fndecl);
2213 mode = TYPE_MODE (TREE_TYPE (arg));
2215 if (icode != CODE_FOR_nothing)
2217 struct expand_operand ops[1];
2218 rtx_insn *last = get_last_insn ();
2219 tree orig_arg = arg;
2221 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2222 need to expand the argument again. This way, we will not perform
2223 side-effects more the once. */
2224 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2226 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2228 if (mode != GET_MODE (op0))
2229 op0 = convert_to_mode (mode, op0, 0);
2231 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2232 if (maybe_legitimize_operands (icode, 0, 1, ops)
2233 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2234 return ops[0].value;
2236 delete_insns_since (last);
2237 CALL_EXPR_ARG (exp, 0) = orig_arg;
2240 return NULL_RTX;
2243 /* Expand a call to the builtin sincos math function.
2244 Return NULL_RTX if a normal call should be emitted rather than expanding the
2245 function in-line. EXP is the expression that is a call to the builtin
2246 function. */
2248 static rtx
2249 expand_builtin_sincos (tree exp)
2251 rtx op0, op1, op2, target1, target2;
2252 machine_mode mode;
2253 tree arg, sinp, cosp;
2254 int result;
2255 location_t loc = EXPR_LOCATION (exp);
2256 tree alias_type, alias_off;
2258 if (!validate_arglist (exp, REAL_TYPE,
2259 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2260 return NULL_RTX;
2262 arg = CALL_EXPR_ARG (exp, 0);
2263 sinp = CALL_EXPR_ARG (exp, 1);
2264 cosp = CALL_EXPR_ARG (exp, 2);
2266 /* Make a suitable register to place result in. */
2267 mode = TYPE_MODE (TREE_TYPE (arg));
2269 /* Check if sincos insn is available, otherwise emit the call. */
2270 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2271 return NULL_RTX;
2273 target1 = gen_reg_rtx (mode);
2274 target2 = gen_reg_rtx (mode);
2276 op0 = expand_normal (arg);
2277 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2278 alias_off = build_int_cst (alias_type, 0);
2279 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2280 sinp, alias_off));
2281 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2282 cosp, alias_off));
2284 /* Compute into target1 and target2.
2285 Set TARGET to wherever the result comes back. */
2286 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2287 gcc_assert (result);
2289 /* Move target1 and target2 to the memory locations indicated
2290 by op1 and op2. */
2291 emit_move_insn (op1, target1);
2292 emit_move_insn (op2, target2);
2294 return const0_rtx;
2297 /* Expand a call to the internal cexpi builtin to the sincos math function.
2298 EXP is the expression that is a call to the builtin function; if convenient,
2299 the result should be placed in TARGET. */
2301 static rtx
2302 expand_builtin_cexpi (tree exp, rtx target)
2304 tree fndecl = get_callee_fndecl (exp);
2305 tree arg, type;
2306 machine_mode mode;
2307 rtx op0, op1, op2;
2308 location_t loc = EXPR_LOCATION (exp);
2310 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2311 return NULL_RTX;
2313 arg = CALL_EXPR_ARG (exp, 0);
2314 type = TREE_TYPE (arg);
2315 mode = TYPE_MODE (TREE_TYPE (arg));
2317 /* Try expanding via a sincos optab, fall back to emitting a libcall
2318 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2319 is only generated from sincos, cexp or if we have either of them. */
2320 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2322 op1 = gen_reg_rtx (mode);
2323 op2 = gen_reg_rtx (mode);
2325 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2327 /* Compute into op1 and op2. */
2328 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2330 else if (targetm.libc_has_function (function_sincos))
2332 tree call, fn = NULL_TREE;
2333 tree top1, top2;
2334 rtx op1a, op2a;
2336 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2337 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2338 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2339 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2340 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2341 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2342 else
2343 gcc_unreachable ();
2345 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2346 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2347 op1a = copy_addr_to_reg (XEXP (op1, 0));
2348 op2a = copy_addr_to_reg (XEXP (op2, 0));
2349 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2350 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2352 /* Make sure not to fold the sincos call again. */
2353 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2354 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2355 call, 3, arg, top1, top2));
2357 else
2359 tree call, fn = NULL_TREE, narg;
2360 tree ctype = build_complex_type (type);
2362 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2363 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2364 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2365 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2366 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2367 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2368 else
2369 gcc_unreachable ();
2371 /* If we don't have a decl for cexp create one. This is the
2372 friendliest fallback if the user calls __builtin_cexpi
2373 without full target C99 function support. */
2374 if (fn == NULL_TREE)
2376 tree fntype;
2377 const char *name = NULL;
2379 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2380 name = "cexpf";
2381 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2382 name = "cexp";
2383 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2384 name = "cexpl";
2386 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2387 fn = build_fn_decl (name, fntype);
2390 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2391 build_real (type, dconst0), arg);
2393 /* Make sure not to fold the cexp call again. */
2394 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2395 return expand_expr (build_call_nary (ctype, call, 1, narg),
2396 target, VOIDmode, EXPAND_NORMAL);
2399 /* Now build the proper return type. */
2400 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2401 make_tree (TREE_TYPE (arg), op2),
2402 make_tree (TREE_TYPE (arg), op1)),
2403 target, VOIDmode, EXPAND_NORMAL);
2406 /* Conveniently construct a function call expression. FNDECL names the
2407 function to be called, N is the number of arguments, and the "..."
2408 parameters are the argument expressions. Unlike build_call_exr
2409 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2411 static tree
2412 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2414 va_list ap;
2415 tree fntype = TREE_TYPE (fndecl);
2416 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2418 va_start (ap, n);
2419 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2420 va_end (ap);
2421 SET_EXPR_LOCATION (fn, loc);
2422 return fn;
2425 /* Expand a call to one of the builtin rounding functions gcc defines
2426 as an extension (lfloor and lceil). As these are gcc extensions we
2427 do not need to worry about setting errno to EDOM.
2428 If expanding via optab fails, lower expression to (int)(floor(x)).
2429 EXP is the expression that is a call to the builtin function;
2430 if convenient, the result should be placed in TARGET. */
2432 static rtx
2433 expand_builtin_int_roundingfn (tree exp, rtx target)
2435 convert_optab builtin_optab;
2436 rtx op0, tmp;
2437 rtx_insn *insns;
2438 tree fndecl = get_callee_fndecl (exp);
2439 enum built_in_function fallback_fn;
2440 tree fallback_fndecl;
2441 machine_mode mode;
2442 tree arg;
2444 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2445 gcc_unreachable ();
2447 arg = CALL_EXPR_ARG (exp, 0);
2449 switch (DECL_FUNCTION_CODE (fndecl))
2451 CASE_FLT_FN (BUILT_IN_ICEIL):
2452 CASE_FLT_FN (BUILT_IN_LCEIL):
2453 CASE_FLT_FN (BUILT_IN_LLCEIL):
2454 builtin_optab = lceil_optab;
2455 fallback_fn = BUILT_IN_CEIL;
2456 break;
2458 CASE_FLT_FN (BUILT_IN_IFLOOR):
2459 CASE_FLT_FN (BUILT_IN_LFLOOR):
2460 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2461 builtin_optab = lfloor_optab;
2462 fallback_fn = BUILT_IN_FLOOR;
2463 break;
2465 default:
2466 gcc_unreachable ();
2469 /* Make a suitable register to place result in. */
2470 mode = TYPE_MODE (TREE_TYPE (exp));
2472 target = gen_reg_rtx (mode);
2474 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2475 need to expand the argument again. This way, we will not perform
2476 side-effects more the once. */
2477 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2479 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2481 start_sequence ();
2483 /* Compute into TARGET. */
2484 if (expand_sfix_optab (target, op0, builtin_optab))
2486 /* Output the entire sequence. */
2487 insns = get_insns ();
2488 end_sequence ();
2489 emit_insn (insns);
2490 return target;
2493 /* If we were unable to expand via the builtin, stop the sequence
2494 (without outputting the insns). */
2495 end_sequence ();
2497 /* Fall back to floating point rounding optab. */
2498 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2500 /* For non-C99 targets we may end up without a fallback fndecl here
2501 if the user called __builtin_lfloor directly. In this case emit
2502 a call to the floor/ceil variants nevertheless. This should result
2503 in the best user experience for not full C99 targets. */
2504 if (fallback_fndecl == NULL_TREE)
2506 tree fntype;
2507 const char *name = NULL;
2509 switch (DECL_FUNCTION_CODE (fndecl))
2511 case BUILT_IN_ICEIL:
2512 case BUILT_IN_LCEIL:
2513 case BUILT_IN_LLCEIL:
2514 name = "ceil";
2515 break;
2516 case BUILT_IN_ICEILF:
2517 case BUILT_IN_LCEILF:
2518 case BUILT_IN_LLCEILF:
2519 name = "ceilf";
2520 break;
2521 case BUILT_IN_ICEILL:
2522 case BUILT_IN_LCEILL:
2523 case BUILT_IN_LLCEILL:
2524 name = "ceill";
2525 break;
2526 case BUILT_IN_IFLOOR:
2527 case BUILT_IN_LFLOOR:
2528 case BUILT_IN_LLFLOOR:
2529 name = "floor";
2530 break;
2531 case BUILT_IN_IFLOORF:
2532 case BUILT_IN_LFLOORF:
2533 case BUILT_IN_LLFLOORF:
2534 name = "floorf";
2535 break;
2536 case BUILT_IN_IFLOORL:
2537 case BUILT_IN_LFLOORL:
2538 case BUILT_IN_LLFLOORL:
2539 name = "floorl";
2540 break;
2541 default:
2542 gcc_unreachable ();
2545 fntype = build_function_type_list (TREE_TYPE (arg),
2546 TREE_TYPE (arg), NULL_TREE);
2547 fallback_fndecl = build_fn_decl (name, fntype);
2550 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2552 tmp = expand_normal (exp);
2553 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2555 /* Truncate the result of floating point optab to integer
2556 via expand_fix (). */
2557 target = gen_reg_rtx (mode);
2558 expand_fix (target, tmp, 0);
2560 return target;
2563 /* Expand a call to one of the builtin math functions doing integer
2564 conversion (lrint).
2565 Return 0 if a normal call should be emitted rather than expanding the
2566 function in-line. EXP is the expression that is a call to the builtin
2567 function; if convenient, the result should be placed in TARGET. */
2569 static rtx
2570 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2572 convert_optab builtin_optab;
2573 rtx op0;
2574 rtx_insn *insns;
2575 tree fndecl = get_callee_fndecl (exp);
2576 tree arg;
2577 machine_mode mode;
2578 enum built_in_function fallback_fn = BUILT_IN_NONE;
2580 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2581 gcc_unreachable ();
2583 arg = CALL_EXPR_ARG (exp, 0);
2585 switch (DECL_FUNCTION_CODE (fndecl))
2587 CASE_FLT_FN (BUILT_IN_IRINT):
2588 fallback_fn = BUILT_IN_LRINT;
2589 /* FALLTHRU */
2590 CASE_FLT_FN (BUILT_IN_LRINT):
2591 CASE_FLT_FN (BUILT_IN_LLRINT):
2592 builtin_optab = lrint_optab;
2593 break;
2595 CASE_FLT_FN (BUILT_IN_IROUND):
2596 fallback_fn = BUILT_IN_LROUND;
2597 /* FALLTHRU */
2598 CASE_FLT_FN (BUILT_IN_LROUND):
2599 CASE_FLT_FN (BUILT_IN_LLROUND):
2600 builtin_optab = lround_optab;
2601 break;
2603 default:
2604 gcc_unreachable ();
2607 /* There's no easy way to detect the case we need to set EDOM. */
2608 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2609 return NULL_RTX;
2611 /* Make a suitable register to place result in. */
2612 mode = TYPE_MODE (TREE_TYPE (exp));
2614 /* There's no easy way to detect the case we need to set EDOM. */
2615 if (!flag_errno_math)
2617 rtx result = gen_reg_rtx (mode);
2619 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2620 need to expand the argument again. This way, we will not perform
2621 side-effects more the once. */
2622 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2624 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2626 start_sequence ();
2628 if (expand_sfix_optab (result, op0, builtin_optab))
2630 /* Output the entire sequence. */
2631 insns = get_insns ();
2632 end_sequence ();
2633 emit_insn (insns);
2634 return result;
2637 /* If we were unable to expand via the builtin, stop the sequence
2638 (without outputting the insns) and call to the library function
2639 with the stabilized argument list. */
2640 end_sequence ();
2643 if (fallback_fn != BUILT_IN_NONE)
2645 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2646 targets, (int) round (x) should never be transformed into
2647 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2648 a call to lround in the hope that the target provides at least some
2649 C99 functions. This should result in the best user experience for
2650 not full C99 targets. */
2651 tree fallback_fndecl = mathfn_built_in_1
2652 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2654 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2655 fallback_fndecl, 1, arg);
2657 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2658 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2659 return convert_to_mode (mode, target, 0);
2662 return expand_call (exp, target, target == const0_rtx);
2665 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2666 a normal call should be emitted rather than expanding the function
2667 in-line. EXP is the expression that is a call to the builtin
2668 function; if convenient, the result should be placed in TARGET. */
2670 static rtx
2671 expand_builtin_powi (tree exp, rtx target)
2673 tree arg0, arg1;
2674 rtx op0, op1;
2675 machine_mode mode;
2676 machine_mode mode2;
2678 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2679 return NULL_RTX;
2681 arg0 = CALL_EXPR_ARG (exp, 0);
2682 arg1 = CALL_EXPR_ARG (exp, 1);
2683 mode = TYPE_MODE (TREE_TYPE (exp));
2685 /* Emit a libcall to libgcc. */
2687 /* Mode of the 2nd argument must match that of an int. */
2688 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2690 if (target == NULL_RTX)
2691 target = gen_reg_rtx (mode);
2693 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2694 if (GET_MODE (op0) != mode)
2695 op0 = convert_to_mode (mode, op0, 0);
2696 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2697 if (GET_MODE (op1) != mode2)
2698 op1 = convert_to_mode (mode2, op1, 0);
2700 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2701 target, LCT_CONST, mode, 2,
2702 op0, mode, op1, mode2);
2704 return target;
2707 /* Expand expression EXP which is a call to the strlen builtin. Return
2708 NULL_RTX if we failed the caller should emit a normal call, otherwise
2709 try to get the result in TARGET, if convenient. */
2711 static rtx
2712 expand_builtin_strlen (tree exp, rtx target,
2713 machine_mode target_mode)
2715 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2716 return NULL_RTX;
2717 else
2719 struct expand_operand ops[4];
2720 rtx pat;
2721 tree len;
2722 tree src = CALL_EXPR_ARG (exp, 0);
2723 rtx src_reg;
2724 rtx_insn *before_strlen;
2725 machine_mode insn_mode = target_mode;
2726 enum insn_code icode = CODE_FOR_nothing;
2727 unsigned int align;
2729 /* If the length can be computed at compile-time, return it. */
2730 len = c_strlen (src, 0);
2731 if (len)
2732 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2734 /* If the length can be computed at compile-time and is constant
2735 integer, but there are side-effects in src, evaluate
2736 src for side-effects, then return len.
2737 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2738 can be optimized into: i++; x = 3; */
2739 len = c_strlen (src, 1);
2740 if (len && TREE_CODE (len) == INTEGER_CST)
2742 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2743 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2746 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2748 /* If SRC is not a pointer type, don't do this operation inline. */
2749 if (align == 0)
2750 return NULL_RTX;
2752 /* Bail out if we can't compute strlen in the right mode. */
2753 while (insn_mode != VOIDmode)
2755 icode = optab_handler (strlen_optab, insn_mode);
2756 if (icode != CODE_FOR_nothing)
2757 break;
2759 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2761 if (insn_mode == VOIDmode)
2762 return NULL_RTX;
2764 /* Make a place to hold the source address. We will not expand
2765 the actual source until we are sure that the expansion will
2766 not fail -- there are trees that cannot be expanded twice. */
2767 src_reg = gen_reg_rtx (Pmode);
2769 /* Mark the beginning of the strlen sequence so we can emit the
2770 source operand later. */
2771 before_strlen = get_last_insn ();
2773 create_output_operand (&ops[0], target, insn_mode);
2774 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2775 create_integer_operand (&ops[2], 0);
2776 create_integer_operand (&ops[3], align);
2777 if (!maybe_expand_insn (icode, 4, ops))
2778 return NULL_RTX;
2780 /* Now that we are assured of success, expand the source. */
2781 start_sequence ();
2782 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2783 if (pat != src_reg)
2785 #ifdef POINTERS_EXTEND_UNSIGNED
2786 if (GET_MODE (pat) != Pmode)
2787 pat = convert_to_mode (Pmode, pat,
2788 POINTERS_EXTEND_UNSIGNED);
2789 #endif
2790 emit_move_insn (src_reg, pat);
2792 pat = get_insns ();
2793 end_sequence ();
2795 if (before_strlen)
2796 emit_insn_after (pat, before_strlen);
2797 else
2798 emit_insn_before (pat, get_insns ());
2800 /* Return the value in the proper mode for this function. */
2801 if (GET_MODE (ops[0].value) == target_mode)
2802 target = ops[0].value;
2803 else if (target != 0)
2804 convert_move (target, ops[0].value, 0);
2805 else
2806 target = convert_to_mode (target_mode, ops[0].value, 0);
2808 return target;
2812 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2813 bytes from constant string DATA + OFFSET and return it as target
2814 constant. */
2816 static rtx
2817 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2818 machine_mode mode)
2820 const char *str = (const char *) data;
2822 gcc_assert (offset >= 0
2823 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2824 <= strlen (str) + 1));
2826 return c_readstr (str + offset, mode);
2829 /* LEN specify length of the block of memcpy/memset operation.
2830 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2831 In some cases we can make very likely guess on max size, then we
2832 set it into PROBABLE_MAX_SIZE. */
2834 static void
2835 determine_block_size (tree len, rtx len_rtx,
2836 unsigned HOST_WIDE_INT *min_size,
2837 unsigned HOST_WIDE_INT *max_size,
2838 unsigned HOST_WIDE_INT *probable_max_size)
2840 if (CONST_INT_P (len_rtx))
2842 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
2843 return;
2845 else
2847 wide_int min, max;
2848 enum value_range_type range_type = VR_UNDEFINED;
2850 /* Determine bounds from the type. */
2851 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2852 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2853 else
2854 *min_size = 0;
2855 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2856 *probable_max_size = *max_size
2857 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
2858 else
2859 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2861 if (TREE_CODE (len) == SSA_NAME)
2862 range_type = get_range_info (len, &min, &max);
2863 if (range_type == VR_RANGE)
2865 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
2866 *min_size = min.to_uhwi ();
2867 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
2868 *probable_max_size = *max_size = max.to_uhwi ();
2870 else if (range_type == VR_ANTI_RANGE)
2872 /* Anti range 0...N lets us to determine minimal size to N+1. */
2873 if (min == 0)
2875 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2876 *min_size = max.to_uhwi () + 1;
2878 /* Code like
2880 int n;
2881 if (n < 100)
2882 memcpy (a, b, n)
2884 Produce anti range allowing negative values of N. We still
2885 can use the information and make a guess that N is not negative.
2887 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
2888 *probable_max_size = min.to_uhwi () - 1;
2891 gcc_checking_assert (*max_size <=
2892 (unsigned HOST_WIDE_INT)
2893 GET_MODE_MASK (GET_MODE (len_rtx)));
2896 /* Helper function to do the actual work for expand_builtin_memcpy. */
2898 static rtx
2899 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
2901 const char *src_str;
2902 unsigned int src_align = get_pointer_alignment (src);
2903 unsigned int dest_align = get_pointer_alignment (dest);
2904 rtx dest_mem, src_mem, dest_addr, len_rtx;
2905 HOST_WIDE_INT expected_size = -1;
2906 unsigned int expected_align = 0;
2907 unsigned HOST_WIDE_INT min_size;
2908 unsigned HOST_WIDE_INT max_size;
2909 unsigned HOST_WIDE_INT probable_max_size;
2911 /* If DEST is not a pointer type, call the normal function. */
2912 if (dest_align == 0)
2913 return NULL_RTX;
2915 /* If either SRC is not a pointer type, don't do this
2916 operation in-line. */
2917 if (src_align == 0)
2918 return NULL_RTX;
2920 if (currently_expanding_gimple_stmt)
2921 stringop_block_profile (currently_expanding_gimple_stmt,
2922 &expected_align, &expected_size);
2924 if (expected_align < dest_align)
2925 expected_align = dest_align;
2926 dest_mem = get_memory_rtx (dest, len);
2927 set_mem_align (dest_mem, dest_align);
2928 len_rtx = expand_normal (len);
2929 determine_block_size (len, len_rtx, &min_size, &max_size,
2930 &probable_max_size);
2931 src_str = c_getstr (src);
2933 /* If SRC is a string constant and block move would be done
2934 by pieces, we can avoid loading the string from memory
2935 and only stored the computed constants. */
2936 if (src_str
2937 && CONST_INT_P (len_rtx)
2938 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2939 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2940 CONST_CAST (char *, src_str),
2941 dest_align, false))
2943 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2944 builtin_memcpy_read_str,
2945 CONST_CAST (char *, src_str),
2946 dest_align, false, 0);
2947 dest_mem = force_operand (XEXP (dest_mem, 0), target);
2948 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2949 return dest_mem;
2952 src_mem = get_memory_rtx (src, len);
2953 set_mem_align (src_mem, src_align);
2955 /* Copy word part most expediently. */
2956 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
2957 CALL_EXPR_TAILCALL (exp)
2958 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
2959 expected_align, expected_size,
2960 min_size, max_size, probable_max_size);
2962 if (dest_addr == 0)
2964 dest_addr = force_operand (XEXP (dest_mem, 0), target);
2965 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2968 return dest_addr;
2971 /* Expand a call EXP to the memcpy builtin.
2972 Return NULL_RTX if we failed, the caller should emit a normal call,
2973 otherwise try to get the result in TARGET, if convenient (and in
2974 mode MODE if that's convenient). */
2976 static rtx
2977 expand_builtin_memcpy (tree exp, rtx target)
2979 if (!validate_arglist (exp,
2980 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2981 return NULL_RTX;
2982 else
2984 tree dest = CALL_EXPR_ARG (exp, 0);
2985 tree src = CALL_EXPR_ARG (exp, 1);
2986 tree len = CALL_EXPR_ARG (exp, 2);
2987 return expand_builtin_memcpy_args (dest, src, len, target, exp);
2991 /* Expand an instrumented call EXP to the memcpy builtin.
2992 Return NULL_RTX if we failed, the caller should emit a normal call,
2993 otherwise try to get the result in TARGET, if convenient (and in
2994 mode MODE if that's convenient). */
2996 static rtx
2997 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
2999 if (!validate_arglist (exp,
3000 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3001 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3002 INTEGER_TYPE, VOID_TYPE))
3003 return NULL_RTX;
3004 else
3006 tree dest = CALL_EXPR_ARG (exp, 0);
3007 tree src = CALL_EXPR_ARG (exp, 2);
3008 tree len = CALL_EXPR_ARG (exp, 4);
3009 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3011 /* Return src bounds with the result. */
3012 if (res)
3014 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3015 expand_normal (CALL_EXPR_ARG (exp, 1)));
3016 res = chkp_join_splitted_slot (res, bnd);
3018 return res;
3022 /* Expand a call EXP to the mempcpy builtin.
3023 Return NULL_RTX if we failed; the caller should emit a normal call,
3024 otherwise try to get the result in TARGET, if convenient (and in
3025 mode MODE if that's convenient). If ENDP is 0 return the
3026 destination pointer, if ENDP is 1 return the end pointer ala
3027 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3028 stpcpy. */
3030 static rtx
3031 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3033 if (!validate_arglist (exp,
3034 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3035 return NULL_RTX;
3036 else
3038 tree dest = CALL_EXPR_ARG (exp, 0);
3039 tree src = CALL_EXPR_ARG (exp, 1);
3040 tree len = CALL_EXPR_ARG (exp, 2);
3041 return expand_builtin_mempcpy_args (dest, src, len,
3042 target, mode, /*endp=*/ 1,
3043 exp);
3047 /* Expand an instrumented call EXP to the mempcpy builtin.
3048 Return NULL_RTX if we failed, the caller should emit a normal call,
3049 otherwise try to get the result in TARGET, if convenient (and in
3050 mode MODE if that's convenient). */
3052 static rtx
3053 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3055 if (!validate_arglist (exp,
3056 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3057 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3058 INTEGER_TYPE, VOID_TYPE))
3059 return NULL_RTX;
3060 else
3062 tree dest = CALL_EXPR_ARG (exp, 0);
3063 tree src = CALL_EXPR_ARG (exp, 2);
3064 tree len = CALL_EXPR_ARG (exp, 4);
3065 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3066 mode, 1, exp);
3068 /* Return src bounds with the result. */
3069 if (res)
3071 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3072 expand_normal (CALL_EXPR_ARG (exp, 1)));
3073 res = chkp_join_splitted_slot (res, bnd);
3075 return res;
3079 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3080 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3081 so that this can also be called without constructing an actual CALL_EXPR.
3082 The other arguments and return value are the same as for
3083 expand_builtin_mempcpy. */
3085 static rtx
3086 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3087 rtx target, machine_mode mode, int endp,
3088 tree orig_exp)
3090 tree fndecl = get_callee_fndecl (orig_exp);
3092 /* If return value is ignored, transform mempcpy into memcpy. */
3093 if (target == const0_rtx
3094 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3095 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3097 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3098 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3099 dest, src, len);
3100 return expand_expr (result, target, mode, EXPAND_NORMAL);
3102 else if (target == const0_rtx
3103 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3105 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3106 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3107 dest, src, len);
3108 return expand_expr (result, target, mode, EXPAND_NORMAL);
3110 else
3112 const char *src_str;
3113 unsigned int src_align = get_pointer_alignment (src);
3114 unsigned int dest_align = get_pointer_alignment (dest);
3115 rtx dest_mem, src_mem, len_rtx;
3117 /* If either SRC or DEST is not a pointer type, don't do this
3118 operation in-line. */
3119 if (dest_align == 0 || src_align == 0)
3120 return NULL_RTX;
3122 /* If LEN is not constant, call the normal function. */
3123 if (! tree_fits_uhwi_p (len))
3124 return NULL_RTX;
3126 len_rtx = expand_normal (len);
3127 src_str = c_getstr (src);
3129 /* If SRC is a string constant and block move would be done
3130 by pieces, we can avoid loading the string from memory
3131 and only stored the computed constants. */
3132 if (src_str
3133 && CONST_INT_P (len_rtx)
3134 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3135 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3136 CONST_CAST (char *, src_str),
3137 dest_align, false))
3139 dest_mem = get_memory_rtx (dest, len);
3140 set_mem_align (dest_mem, dest_align);
3141 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3142 builtin_memcpy_read_str,
3143 CONST_CAST (char *, src_str),
3144 dest_align, false, endp);
3145 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3146 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3147 return dest_mem;
3150 if (CONST_INT_P (len_rtx)
3151 && can_move_by_pieces (INTVAL (len_rtx),
3152 MIN (dest_align, src_align)))
3154 dest_mem = get_memory_rtx (dest, len);
3155 set_mem_align (dest_mem, dest_align);
3156 src_mem = get_memory_rtx (src, len);
3157 set_mem_align (src_mem, src_align);
3158 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3159 MIN (dest_align, src_align), endp);
3160 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3161 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3162 return dest_mem;
3165 return NULL_RTX;
3169 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3170 we failed, the caller should emit a normal call, otherwise try to
3171 get the result in TARGET, if convenient. If ENDP is 0 return the
3172 destination pointer, if ENDP is 1 return the end pointer ala
3173 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3174 stpcpy. */
3176 static rtx
3177 expand_movstr (tree dest, tree src, rtx target, int endp)
3179 struct expand_operand ops[3];
3180 rtx dest_mem;
3181 rtx src_mem;
3183 if (!targetm.have_movstr ())
3184 return NULL_RTX;
3186 dest_mem = get_memory_rtx (dest, NULL);
3187 src_mem = get_memory_rtx (src, NULL);
3188 if (!endp)
3190 target = force_reg (Pmode, XEXP (dest_mem, 0));
3191 dest_mem = replace_equiv_address (dest_mem, target);
3194 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3195 create_fixed_operand (&ops[1], dest_mem);
3196 create_fixed_operand (&ops[2], src_mem);
3197 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3198 return NULL_RTX;
3200 if (endp && target != const0_rtx)
3202 target = ops[0].value;
3203 /* movstr is supposed to set end to the address of the NUL
3204 terminator. If the caller requested a mempcpy-like return value,
3205 adjust it. */
3206 if (endp == 1)
3208 rtx tem = plus_constant (GET_MODE (target),
3209 gen_lowpart (GET_MODE (target), target), 1);
3210 emit_move_insn (target, force_operand (tem, NULL_RTX));
3213 return target;
3216 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3217 NULL_RTX if we failed the caller should emit a normal call, otherwise
3218 try to get the result in TARGET, if convenient (and in mode MODE if that's
3219 convenient). */
3221 static rtx
3222 expand_builtin_strcpy (tree exp, rtx target)
3224 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3226 tree dest = CALL_EXPR_ARG (exp, 0);
3227 tree src = CALL_EXPR_ARG (exp, 1);
3228 return expand_builtin_strcpy_args (dest, src, target);
3230 return NULL_RTX;
3233 /* Helper function to do the actual work for expand_builtin_strcpy. The
3234 arguments to the builtin_strcpy call DEST and SRC are broken out
3235 so that this can also be called without constructing an actual CALL_EXPR.
3236 The other arguments and return value are the same as for
3237 expand_builtin_strcpy. */
3239 static rtx
3240 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3242 return expand_movstr (dest, src, target, /*endp=*/0);
3245 /* Expand a call EXP to the stpcpy builtin.
3246 Return NULL_RTX if we failed the caller should emit a normal call,
3247 otherwise try to get the result in TARGET, if convenient (and in
3248 mode MODE if that's convenient). */
3250 static rtx
3251 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3253 tree dst, src;
3254 location_t loc = EXPR_LOCATION (exp);
3256 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3257 return NULL_RTX;
3259 dst = CALL_EXPR_ARG (exp, 0);
3260 src = CALL_EXPR_ARG (exp, 1);
3262 /* If return value is ignored, transform stpcpy into strcpy. */
3263 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3265 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3266 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3267 return expand_expr (result, target, mode, EXPAND_NORMAL);
3269 else
3271 tree len, lenp1;
3272 rtx ret;
3274 /* Ensure we get an actual string whose length can be evaluated at
3275 compile-time, not an expression containing a string. This is
3276 because the latter will potentially produce pessimized code
3277 when used to produce the return value. */
3278 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3279 return expand_movstr (dst, src, target, /*endp=*/2);
3281 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3282 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3283 target, mode, /*endp=*/2,
3284 exp);
3286 if (ret)
3287 return ret;
3289 if (TREE_CODE (len) == INTEGER_CST)
3291 rtx len_rtx = expand_normal (len);
3293 if (CONST_INT_P (len_rtx))
3295 ret = expand_builtin_strcpy_args (dst, src, target);
3297 if (ret)
3299 if (! target)
3301 if (mode != VOIDmode)
3302 target = gen_reg_rtx (mode);
3303 else
3304 target = gen_reg_rtx (GET_MODE (ret));
3306 if (GET_MODE (target) != GET_MODE (ret))
3307 ret = gen_lowpart (GET_MODE (target), ret);
3309 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3310 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3311 gcc_assert (ret);
3313 return target;
3318 return expand_movstr (dst, src, target, /*endp=*/2);
3322 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3323 bytes from constant string DATA + OFFSET and return it as target
3324 constant. */
3327 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3328 machine_mode mode)
3330 const char *str = (const char *) data;
3332 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3333 return const0_rtx;
3335 return c_readstr (str + offset, mode);
3338 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3339 NULL_RTX if we failed the caller should emit a normal call. */
3341 static rtx
3342 expand_builtin_strncpy (tree exp, rtx target)
3344 location_t loc = EXPR_LOCATION (exp);
3346 if (validate_arglist (exp,
3347 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3349 tree dest = CALL_EXPR_ARG (exp, 0);
3350 tree src = CALL_EXPR_ARG (exp, 1);
3351 tree len = CALL_EXPR_ARG (exp, 2);
3352 tree slen = c_strlen (src, 1);
3354 /* We must be passed a constant len and src parameter. */
3355 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3356 return NULL_RTX;
3358 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3360 /* We're required to pad with trailing zeros if the requested
3361 len is greater than strlen(s2)+1. In that case try to
3362 use store_by_pieces, if it fails, punt. */
3363 if (tree_int_cst_lt (slen, len))
3365 unsigned int dest_align = get_pointer_alignment (dest);
3366 const char *p = c_getstr (src);
3367 rtx dest_mem;
3369 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3370 || !can_store_by_pieces (tree_to_uhwi (len),
3371 builtin_strncpy_read_str,
3372 CONST_CAST (char *, p),
3373 dest_align, false))
3374 return NULL_RTX;
3376 dest_mem = get_memory_rtx (dest, len);
3377 store_by_pieces (dest_mem, tree_to_uhwi (len),
3378 builtin_strncpy_read_str,
3379 CONST_CAST (char *, p), dest_align, false, 0);
3380 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3381 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3382 return dest_mem;
3385 return NULL_RTX;
3388 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3389 bytes from constant string DATA + OFFSET and return it as target
3390 constant. */
3393 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3394 machine_mode mode)
3396 const char *c = (const char *) data;
3397 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3399 memset (p, *c, GET_MODE_SIZE (mode));
3401 return c_readstr (p, mode);
3404 /* Callback routine for store_by_pieces. Return the RTL of a register
3405 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3406 char value given in the RTL register data. For example, if mode is
3407 4 bytes wide, return the RTL for 0x01010101*data. */
3409 static rtx
3410 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3411 machine_mode mode)
3413 rtx target, coeff;
3414 size_t size;
3415 char *p;
3417 size = GET_MODE_SIZE (mode);
3418 if (size == 1)
3419 return (rtx) data;
3421 p = XALLOCAVEC (char, size);
3422 memset (p, 1, size);
3423 coeff = c_readstr (p, mode);
3425 target = convert_to_mode (mode, (rtx) data, 1);
3426 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3427 return force_reg (mode, target);
3430 /* Expand expression EXP, which is a call to the memset builtin. Return
3431 NULL_RTX if we failed the caller should emit a normal call, otherwise
3432 try to get the result in TARGET, if convenient (and in mode MODE if that's
3433 convenient). */
3435 static rtx
3436 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3438 if (!validate_arglist (exp,
3439 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3440 return NULL_RTX;
3441 else
3443 tree dest = CALL_EXPR_ARG (exp, 0);
3444 tree val = CALL_EXPR_ARG (exp, 1);
3445 tree len = CALL_EXPR_ARG (exp, 2);
3446 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3450 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3451 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3452 try to get the result in TARGET, if convenient (and in mode MODE if that's
3453 convenient). */
3455 static rtx
3456 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3458 if (!validate_arglist (exp,
3459 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3460 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3461 return NULL_RTX;
3462 else
3464 tree dest = CALL_EXPR_ARG (exp, 0);
3465 tree val = CALL_EXPR_ARG (exp, 2);
3466 tree len = CALL_EXPR_ARG (exp, 3);
3467 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3469 /* Return src bounds with the result. */
3470 if (res)
3472 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3473 expand_normal (CALL_EXPR_ARG (exp, 1)));
3474 res = chkp_join_splitted_slot (res, bnd);
3476 return res;
3480 /* Helper function to do the actual work for expand_builtin_memset. The
3481 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3482 so that this can also be called without constructing an actual CALL_EXPR.
3483 The other arguments and return value are the same as for
3484 expand_builtin_memset. */
3486 static rtx
3487 expand_builtin_memset_args (tree dest, tree val, tree len,
3488 rtx target, machine_mode mode, tree orig_exp)
3490 tree fndecl, fn;
3491 enum built_in_function fcode;
3492 machine_mode val_mode;
3493 char c;
3494 unsigned int dest_align;
3495 rtx dest_mem, dest_addr, len_rtx;
3496 HOST_WIDE_INT expected_size = -1;
3497 unsigned int expected_align = 0;
3498 unsigned HOST_WIDE_INT min_size;
3499 unsigned HOST_WIDE_INT max_size;
3500 unsigned HOST_WIDE_INT probable_max_size;
3502 dest_align = get_pointer_alignment (dest);
3504 /* If DEST is not a pointer type, don't do this operation in-line. */
3505 if (dest_align == 0)
3506 return NULL_RTX;
3508 if (currently_expanding_gimple_stmt)
3509 stringop_block_profile (currently_expanding_gimple_stmt,
3510 &expected_align, &expected_size);
3512 if (expected_align < dest_align)
3513 expected_align = dest_align;
3515 /* If the LEN parameter is zero, return DEST. */
3516 if (integer_zerop (len))
3518 /* Evaluate and ignore VAL in case it has side-effects. */
3519 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3520 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3523 /* Stabilize the arguments in case we fail. */
3524 dest = builtin_save_expr (dest);
3525 val = builtin_save_expr (val);
3526 len = builtin_save_expr (len);
3528 len_rtx = expand_normal (len);
3529 determine_block_size (len, len_rtx, &min_size, &max_size,
3530 &probable_max_size);
3531 dest_mem = get_memory_rtx (dest, len);
3532 val_mode = TYPE_MODE (unsigned_char_type_node);
3534 if (TREE_CODE (val) != INTEGER_CST)
3536 rtx val_rtx;
3538 val_rtx = expand_normal (val);
3539 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3541 /* Assume that we can memset by pieces if we can store
3542 * the coefficients by pieces (in the required modes).
3543 * We can't pass builtin_memset_gen_str as that emits RTL. */
3544 c = 1;
3545 if (tree_fits_uhwi_p (len)
3546 && can_store_by_pieces (tree_to_uhwi (len),
3547 builtin_memset_read_str, &c, dest_align,
3548 true))
3550 val_rtx = force_reg (val_mode, val_rtx);
3551 store_by_pieces (dest_mem, tree_to_uhwi (len),
3552 builtin_memset_gen_str, val_rtx, dest_align,
3553 true, 0);
3555 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3556 dest_align, expected_align,
3557 expected_size, min_size, max_size,
3558 probable_max_size))
3559 goto do_libcall;
3561 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3562 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3563 return dest_mem;
3566 if (target_char_cast (val, &c))
3567 goto do_libcall;
3569 if (c)
3571 if (tree_fits_uhwi_p (len)
3572 && can_store_by_pieces (tree_to_uhwi (len),
3573 builtin_memset_read_str, &c, dest_align,
3574 true))
3575 store_by_pieces (dest_mem, tree_to_uhwi (len),
3576 builtin_memset_read_str, &c, dest_align, true, 0);
3577 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3578 gen_int_mode (c, val_mode),
3579 dest_align, expected_align,
3580 expected_size, min_size, max_size,
3581 probable_max_size))
3582 goto do_libcall;
3584 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3585 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3586 return dest_mem;
3589 set_mem_align (dest_mem, dest_align);
3590 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3591 CALL_EXPR_TAILCALL (orig_exp)
3592 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3593 expected_align, expected_size,
3594 min_size, max_size,
3595 probable_max_size);
3597 if (dest_addr == 0)
3599 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3600 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3603 return dest_addr;
3605 do_libcall:
3606 fndecl = get_callee_fndecl (orig_exp);
3607 fcode = DECL_FUNCTION_CODE (fndecl);
3608 if (fcode == BUILT_IN_MEMSET
3609 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3610 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3611 dest, val, len);
3612 else if (fcode == BUILT_IN_BZERO)
3613 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3614 dest, len);
3615 else
3616 gcc_unreachable ();
3617 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3618 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3619 return expand_call (fn, target, target == const0_rtx);
3622 /* Expand expression EXP, which is a call to the bzero builtin. Return
3623 NULL_RTX if we failed the caller should emit a normal call. */
3625 static rtx
3626 expand_builtin_bzero (tree exp)
3628 tree dest, size;
3629 location_t loc = EXPR_LOCATION (exp);
3631 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3632 return NULL_RTX;
3634 dest = CALL_EXPR_ARG (exp, 0);
3635 size = CALL_EXPR_ARG (exp, 1);
3637 /* New argument list transforming bzero(ptr x, int y) to
3638 memset(ptr x, int 0, size_t y). This is done this way
3639 so that if it isn't expanded inline, we fallback to
3640 calling bzero instead of memset. */
3642 return expand_builtin_memset_args (dest, integer_zero_node,
3643 fold_convert_loc (loc,
3644 size_type_node, size),
3645 const0_rtx, VOIDmode, exp);
3648 /* Try to expand cmpstr operation ICODE with the given operands.
3649 Return the result rtx on success, otherwise return null. */
3651 static rtx
3652 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
3653 HOST_WIDE_INT align)
3655 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3657 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3658 target = NULL_RTX;
3660 struct expand_operand ops[4];
3661 create_output_operand (&ops[0], target, insn_mode);
3662 create_fixed_operand (&ops[1], arg1_rtx);
3663 create_fixed_operand (&ops[2], arg2_rtx);
3664 create_integer_operand (&ops[3], align);
3665 if (maybe_expand_insn (icode, 4, ops))
3666 return ops[0].value;
3667 return NULL_RTX;
3670 /* Expand expression EXP, which is a call to the memcmp built-in function.
3671 Return NULL_RTX if we failed and the caller should emit a normal call,
3672 otherwise try to get the result in TARGET, if convenient.
3673 RESULT_EQ is true if we can relax the returned value to be either zero
3674 or nonzero, without caring about the sign. */
3676 static rtx
3677 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
3679 if (!validate_arglist (exp,
3680 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3681 return NULL_RTX;
3683 tree arg1 = CALL_EXPR_ARG (exp, 0);
3684 tree arg2 = CALL_EXPR_ARG (exp, 1);
3685 tree len = CALL_EXPR_ARG (exp, 2);
3686 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3687 location_t loc = EXPR_LOCATION (exp);
3689 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3690 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3692 /* If we don't have POINTER_TYPE, call the function. */
3693 if (arg1_align == 0 || arg2_align == 0)
3694 return NULL_RTX;
3696 rtx arg1_rtx = get_memory_rtx (arg1, len);
3697 rtx arg2_rtx = get_memory_rtx (arg2, len);
3698 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3700 /* Set MEM_SIZE as appropriate. */
3701 if (CONST_INT_P (len_rtx))
3703 set_mem_size (arg1_rtx, INTVAL (len_rtx));
3704 set_mem_size (arg2_rtx, INTVAL (len_rtx));
3707 by_pieces_constfn constfn = NULL;
3709 const char *src_str = c_getstr (arg1);
3710 if (src_str == NULL)
3711 src_str = c_getstr (arg2);
3712 else
3713 std::swap (arg1_rtx, arg2_rtx);
3715 /* If SRC is a string constant and block move would be done
3716 by pieces, we can avoid loading the string from memory
3717 and only stored the computed constants. */
3718 if (src_str
3719 && CONST_INT_P (len_rtx)
3720 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
3721 constfn = builtin_memcpy_read_str;
3723 rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
3724 TREE_TYPE (len), target,
3725 result_eq, constfn,
3726 CONST_CAST (char *, src_str));
3728 if (result)
3730 /* Return the value in the proper mode for this function. */
3731 if (GET_MODE (result) == mode)
3732 return result;
3734 if (target != 0)
3736 convert_move (target, result, 0);
3737 return target;
3740 return convert_to_mode (mode, result, 0);
3743 return NULL_RTX;
3746 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3747 if we failed the caller should emit a normal call, otherwise try to get
3748 the result in TARGET, if convenient. */
3750 static rtx
3751 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3753 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3754 return NULL_RTX;
3756 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
3757 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
3758 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
3760 rtx arg1_rtx, arg2_rtx;
3761 tree fndecl, fn;
3762 tree arg1 = CALL_EXPR_ARG (exp, 0);
3763 tree arg2 = CALL_EXPR_ARG (exp, 1);
3764 rtx result = NULL_RTX;
3766 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3767 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3769 /* If we don't have POINTER_TYPE, call the function. */
3770 if (arg1_align == 0 || arg2_align == 0)
3771 return NULL_RTX;
3773 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3774 arg1 = builtin_save_expr (arg1);
3775 arg2 = builtin_save_expr (arg2);
3777 arg1_rtx = get_memory_rtx (arg1, NULL);
3778 arg2_rtx = get_memory_rtx (arg2, NULL);
3780 /* Try to call cmpstrsi. */
3781 if (cmpstr_icode != CODE_FOR_nothing)
3782 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
3783 MIN (arg1_align, arg2_align));
3785 /* Try to determine at least one length and call cmpstrnsi. */
3786 if (!result && cmpstrn_icode != CODE_FOR_nothing)
3788 tree len;
3789 rtx arg3_rtx;
3791 tree len1 = c_strlen (arg1, 1);
3792 tree len2 = c_strlen (arg2, 1);
3794 if (len1)
3795 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3796 if (len2)
3797 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3799 /* If we don't have a constant length for the first, use the length
3800 of the second, if we know it. We don't require a constant for
3801 this case; some cost analysis could be done if both are available
3802 but neither is constant. For now, assume they're equally cheap,
3803 unless one has side effects. If both strings have constant lengths,
3804 use the smaller. */
3806 if (!len1)
3807 len = len2;
3808 else if (!len2)
3809 len = len1;
3810 else if (TREE_SIDE_EFFECTS (len1))
3811 len = len2;
3812 else if (TREE_SIDE_EFFECTS (len2))
3813 len = len1;
3814 else if (TREE_CODE (len1) != INTEGER_CST)
3815 len = len2;
3816 else if (TREE_CODE (len2) != INTEGER_CST)
3817 len = len1;
3818 else if (tree_int_cst_lt (len1, len2))
3819 len = len1;
3820 else
3821 len = len2;
3823 /* If both arguments have side effects, we cannot optimize. */
3824 if (len && !TREE_SIDE_EFFECTS (len))
3826 arg3_rtx = expand_normal (len);
3827 result = expand_cmpstrn_or_cmpmem
3828 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
3829 arg3_rtx, MIN (arg1_align, arg2_align));
3833 if (result)
3835 /* Return the value in the proper mode for this function. */
3836 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3837 if (GET_MODE (result) == mode)
3838 return result;
3839 if (target == 0)
3840 return convert_to_mode (mode, result, 0);
3841 convert_move (target, result, 0);
3842 return target;
3845 /* Expand the library call ourselves using a stabilized argument
3846 list to avoid re-evaluating the function's arguments twice. */
3847 fndecl = get_callee_fndecl (exp);
3848 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3849 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3850 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3851 return expand_call (fn, target, target == const0_rtx);
3853 return NULL_RTX;
3856 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3857 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3858 the result in TARGET, if convenient. */
3860 static rtx
3861 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3862 ATTRIBUTE_UNUSED machine_mode mode)
3864 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3866 if (!validate_arglist (exp,
3867 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3868 return NULL_RTX;
3870 /* If c_strlen can determine an expression for one of the string
3871 lengths, and it doesn't have side effects, then emit cmpstrnsi
3872 using length MIN(strlen(string)+1, arg3). */
3873 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
3874 if (cmpstrn_icode != CODE_FOR_nothing)
3876 tree len, len1, len2;
3877 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3878 rtx result;
3879 tree fndecl, fn;
3880 tree arg1 = CALL_EXPR_ARG (exp, 0);
3881 tree arg2 = CALL_EXPR_ARG (exp, 1);
3882 tree arg3 = CALL_EXPR_ARG (exp, 2);
3884 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3885 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3887 len1 = c_strlen (arg1, 1);
3888 len2 = c_strlen (arg2, 1);
3890 if (len1)
3891 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
3892 if (len2)
3893 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
3895 /* If we don't have a constant length for the first, use the length
3896 of the second, if we know it. We don't require a constant for
3897 this case; some cost analysis could be done if both are available
3898 but neither is constant. For now, assume they're equally cheap,
3899 unless one has side effects. If both strings have constant lengths,
3900 use the smaller. */
3902 if (!len1)
3903 len = len2;
3904 else if (!len2)
3905 len = len1;
3906 else if (TREE_SIDE_EFFECTS (len1))
3907 len = len2;
3908 else if (TREE_SIDE_EFFECTS (len2))
3909 len = len1;
3910 else if (TREE_CODE (len1) != INTEGER_CST)
3911 len = len2;
3912 else if (TREE_CODE (len2) != INTEGER_CST)
3913 len = len1;
3914 else if (tree_int_cst_lt (len1, len2))
3915 len = len1;
3916 else
3917 len = len2;
3919 /* If both arguments have side effects, we cannot optimize. */
3920 if (!len || TREE_SIDE_EFFECTS (len))
3921 return NULL_RTX;
3923 /* The actual new length parameter is MIN(len,arg3). */
3924 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
3925 fold_convert_loc (loc, TREE_TYPE (len), arg3));
3927 /* If we don't have POINTER_TYPE, call the function. */
3928 if (arg1_align == 0 || arg2_align == 0)
3929 return NULL_RTX;
3931 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
3932 arg1 = builtin_save_expr (arg1);
3933 arg2 = builtin_save_expr (arg2);
3934 len = builtin_save_expr (len);
3936 arg1_rtx = get_memory_rtx (arg1, len);
3937 arg2_rtx = get_memory_rtx (arg2, len);
3938 arg3_rtx = expand_normal (len);
3939 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
3940 arg2_rtx, TREE_TYPE (len), arg3_rtx,
3941 MIN (arg1_align, arg2_align));
3942 if (result)
3944 /* Return the value in the proper mode for this function. */
3945 mode = TYPE_MODE (TREE_TYPE (exp));
3946 if (GET_MODE (result) == mode)
3947 return result;
3948 if (target == 0)
3949 return convert_to_mode (mode, result, 0);
3950 convert_move (target, result, 0);
3951 return target;
3954 /* Expand the library call ourselves using a stabilized argument
3955 list to avoid re-evaluating the function's arguments twice. */
3956 fndecl = get_callee_fndecl (exp);
3957 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
3958 arg1, arg2, len);
3959 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3960 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3961 return expand_call (fn, target, target == const0_rtx);
3963 return NULL_RTX;
3966 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
3967 if that's convenient. */
3970 expand_builtin_saveregs (void)
3972 rtx val;
3973 rtx_insn *seq;
3975 /* Don't do __builtin_saveregs more than once in a function.
3976 Save the result of the first call and reuse it. */
3977 if (saveregs_value != 0)
3978 return saveregs_value;
3980 /* When this function is called, it means that registers must be
3981 saved on entry to this function. So we migrate the call to the
3982 first insn of this function. */
3984 start_sequence ();
3986 /* Do whatever the machine needs done in this case. */
3987 val = targetm.calls.expand_builtin_saveregs ();
3989 seq = get_insns ();
3990 end_sequence ();
3992 saveregs_value = val;
3994 /* Put the insns after the NOTE that starts the function. If this
3995 is inside a start_sequence, make the outer-level insn chain current, so
3996 the code is placed at the start of the function. */
3997 push_topmost_sequence ();
3998 emit_insn_after (seq, entry_of_function ());
3999 pop_topmost_sequence ();
4001 return val;
4004 /* Expand a call to __builtin_next_arg. */
4006 static rtx
4007 expand_builtin_next_arg (void)
4009 /* Checking arguments is already done in fold_builtin_next_arg
4010 that must be called before this function. */
4011 return expand_binop (ptr_mode, add_optab,
4012 crtl->args.internal_arg_pointer,
4013 crtl->args.arg_offset_rtx,
4014 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4017 /* Make it easier for the backends by protecting the valist argument
4018 from multiple evaluations. */
4020 static tree
4021 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4023 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4025 /* The current way of determining the type of valist is completely
4026 bogus. We should have the information on the va builtin instead. */
4027 if (!vatype)
4028 vatype = targetm.fn_abi_va_list (cfun->decl);
4030 if (TREE_CODE (vatype) == ARRAY_TYPE)
4032 if (TREE_SIDE_EFFECTS (valist))
4033 valist = save_expr (valist);
4035 /* For this case, the backends will be expecting a pointer to
4036 vatype, but it's possible we've actually been given an array
4037 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4038 So fix it. */
4039 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4041 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4042 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4045 else
4047 tree pt = build_pointer_type (vatype);
4049 if (! needs_lvalue)
4051 if (! TREE_SIDE_EFFECTS (valist))
4052 return valist;
4054 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4055 TREE_SIDE_EFFECTS (valist) = 1;
4058 if (TREE_SIDE_EFFECTS (valist))
4059 valist = save_expr (valist);
4060 valist = fold_build2_loc (loc, MEM_REF,
4061 vatype, valist, build_int_cst (pt, 0));
4064 return valist;
4067 /* The "standard" definition of va_list is void*. */
4069 tree
4070 std_build_builtin_va_list (void)
4072 return ptr_type_node;
4075 /* The "standard" abi va_list is va_list_type_node. */
4077 tree
4078 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4080 return va_list_type_node;
4083 /* The "standard" type of va_list is va_list_type_node. */
4085 tree
4086 std_canonical_va_list_type (tree type)
4088 tree wtype, htype;
4090 if (INDIRECT_REF_P (type))
4091 type = TREE_TYPE (type);
4092 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4093 type = TREE_TYPE (type);
4094 wtype = va_list_type_node;
4095 htype = type;
4096 /* Treat structure va_list types. */
4097 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4098 htype = TREE_TYPE (htype);
4099 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4101 /* If va_list is an array type, the argument may have decayed
4102 to a pointer type, e.g. by being passed to another function.
4103 In that case, unwrap both types so that we can compare the
4104 underlying records. */
4105 if (TREE_CODE (htype) == ARRAY_TYPE
4106 || POINTER_TYPE_P (htype))
4108 wtype = TREE_TYPE (wtype);
4109 htype = TREE_TYPE (htype);
4112 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4113 return va_list_type_node;
4115 return NULL_TREE;
4118 /* The "standard" implementation of va_start: just assign `nextarg' to
4119 the variable. */
4121 void
4122 std_expand_builtin_va_start (tree valist, rtx nextarg)
4124 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4125 convert_move (va_r, nextarg, 0);
4127 /* We do not have any valid bounds for the pointer, so
4128 just store zero bounds for it. */
4129 if (chkp_function_instrumented_p (current_function_decl))
4130 chkp_expand_bounds_reset_for_mem (valist,
4131 make_tree (TREE_TYPE (valist),
4132 nextarg));
4135 /* Expand EXP, a call to __builtin_va_start. */
4137 static rtx
4138 expand_builtin_va_start (tree exp)
4140 rtx nextarg;
4141 tree valist;
4142 location_t loc = EXPR_LOCATION (exp);
4144 if (call_expr_nargs (exp) < 2)
4146 error_at (loc, "too few arguments to function %<va_start%>");
4147 return const0_rtx;
4150 if (fold_builtin_next_arg (exp, true))
4151 return const0_rtx;
4153 nextarg = expand_builtin_next_arg ();
4154 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4156 if (targetm.expand_builtin_va_start)
4157 targetm.expand_builtin_va_start (valist, nextarg);
4158 else
4159 std_expand_builtin_va_start (valist, nextarg);
4161 return const0_rtx;
4164 /* Expand EXP, a call to __builtin_va_end. */
4166 static rtx
4167 expand_builtin_va_end (tree exp)
4169 tree valist = CALL_EXPR_ARG (exp, 0);
4171 /* Evaluate for side effects, if needed. I hate macros that don't
4172 do that. */
4173 if (TREE_SIDE_EFFECTS (valist))
4174 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4176 return const0_rtx;
4179 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4180 builtin rather than just as an assignment in stdarg.h because of the
4181 nastiness of array-type va_list types. */
4183 static rtx
4184 expand_builtin_va_copy (tree exp)
4186 tree dst, src, t;
4187 location_t loc = EXPR_LOCATION (exp);
4189 dst = CALL_EXPR_ARG (exp, 0);
4190 src = CALL_EXPR_ARG (exp, 1);
4192 dst = stabilize_va_list_loc (loc, dst, 1);
4193 src = stabilize_va_list_loc (loc, src, 0);
4195 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4197 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4199 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4200 TREE_SIDE_EFFECTS (t) = 1;
4201 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4203 else
4205 rtx dstb, srcb, size;
4207 /* Evaluate to pointers. */
4208 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4209 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4210 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4211 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4213 dstb = convert_memory_address (Pmode, dstb);
4214 srcb = convert_memory_address (Pmode, srcb);
4216 /* "Dereference" to BLKmode memories. */
4217 dstb = gen_rtx_MEM (BLKmode, dstb);
4218 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4219 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4220 srcb = gen_rtx_MEM (BLKmode, srcb);
4221 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4222 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4224 /* Copy. */
4225 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4228 return const0_rtx;
4231 /* Expand a call to one of the builtin functions __builtin_frame_address or
4232 __builtin_return_address. */
4234 static rtx
4235 expand_builtin_frame_address (tree fndecl, tree exp)
4237 /* The argument must be a nonnegative integer constant.
4238 It counts the number of frames to scan up the stack.
4239 The value is either the frame pointer value or the return
4240 address saved in that frame. */
4241 if (call_expr_nargs (exp) == 0)
4242 /* Warning about missing arg was already issued. */
4243 return const0_rtx;
4244 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4246 error ("invalid argument to %qD", fndecl);
4247 return const0_rtx;
4249 else
4251 /* Number of frames to scan up the stack. */
4252 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4254 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4256 /* Some ports cannot access arbitrary stack frames. */
4257 if (tem == NULL)
4259 warning (0, "unsupported argument to %qD", fndecl);
4260 return const0_rtx;
4263 if (count)
4265 /* Warn since no effort is made to ensure that any frame
4266 beyond the current one exists or can be safely reached. */
4267 warning (OPT_Wframe_address, "calling %qD with "
4268 "a nonzero argument is unsafe", fndecl);
4271 /* For __builtin_frame_address, return what we've got. */
4272 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4273 return tem;
4275 if (!REG_P (tem)
4276 && ! CONSTANT_P (tem))
4277 tem = copy_addr_to_reg (tem);
4278 return tem;
4282 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4283 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4284 is the same as for allocate_dynamic_stack_space. */
4286 static rtx
4287 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4289 rtx op0;
4290 rtx result;
4291 bool valid_arglist;
4292 unsigned int align;
4293 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4294 == BUILT_IN_ALLOCA_WITH_ALIGN);
4296 valid_arglist
4297 = (alloca_with_align
4298 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4299 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4301 if (!valid_arglist)
4302 return NULL_RTX;
4304 /* Compute the argument. */
4305 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4307 /* Compute the alignment. */
4308 align = (alloca_with_align
4309 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4310 : BIGGEST_ALIGNMENT);
4312 /* Allocate the desired space. */
4313 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4314 result = convert_memory_address (ptr_mode, result);
4316 return result;
4319 /* Expand a call to bswap builtin in EXP.
4320 Return NULL_RTX if a normal call should be emitted rather than expanding the
4321 function in-line. If convenient, the result should be placed in TARGET.
4322 SUBTARGET may be used as the target for computing one of EXP's operands. */
4324 static rtx
4325 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4326 rtx subtarget)
4328 tree arg;
4329 rtx op0;
4331 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4332 return NULL_RTX;
4334 arg = CALL_EXPR_ARG (exp, 0);
4335 op0 = expand_expr (arg,
4336 subtarget && GET_MODE (subtarget) == target_mode
4337 ? subtarget : NULL_RTX,
4338 target_mode, EXPAND_NORMAL);
4339 if (GET_MODE (op0) != target_mode)
4340 op0 = convert_to_mode (target_mode, op0, 1);
4342 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4344 gcc_assert (target);
4346 return convert_to_mode (target_mode, target, 1);
4349 /* Expand a call to a unary builtin in EXP.
4350 Return NULL_RTX if a normal call should be emitted rather than expanding the
4351 function in-line. If convenient, the result should be placed in TARGET.
4352 SUBTARGET may be used as the target for computing one of EXP's operands. */
4354 static rtx
4355 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4356 rtx subtarget, optab op_optab)
4358 rtx op0;
4360 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4361 return NULL_RTX;
4363 /* Compute the argument. */
4364 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4365 (subtarget
4366 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4367 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4368 VOIDmode, EXPAND_NORMAL);
4369 /* Compute op, into TARGET if possible.
4370 Set TARGET to wherever the result comes back. */
4371 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4372 op_optab, op0, target, op_optab != clrsb_optab);
4373 gcc_assert (target);
4375 return convert_to_mode (target_mode, target, 0);
4378 /* Expand a call to __builtin_expect. We just return our argument
4379 as the builtin_expect semantic should've been already executed by
4380 tree branch prediction pass. */
4382 static rtx
4383 expand_builtin_expect (tree exp, rtx target)
4385 tree arg;
4387 if (call_expr_nargs (exp) < 2)
4388 return const0_rtx;
4389 arg = CALL_EXPR_ARG (exp, 0);
4391 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4392 /* When guessing was done, the hints should be already stripped away. */
4393 gcc_assert (!flag_guess_branch_prob
4394 || optimize == 0 || seen_error ());
4395 return target;
4398 /* Expand a call to __builtin_assume_aligned. We just return our first
4399 argument as the builtin_assume_aligned semantic should've been already
4400 executed by CCP. */
4402 static rtx
4403 expand_builtin_assume_aligned (tree exp, rtx target)
4405 if (call_expr_nargs (exp) < 2)
4406 return const0_rtx;
4407 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4408 EXPAND_NORMAL);
4409 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4410 && (call_expr_nargs (exp) < 3
4411 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4412 return target;
4415 void
4416 expand_builtin_trap (void)
4418 if (targetm.have_trap ())
4420 rtx_insn *insn = emit_insn (targetm.gen_trap ());
4421 /* For trap insns when not accumulating outgoing args force
4422 REG_ARGS_SIZE note to prevent crossjumping of calls with
4423 different args sizes. */
4424 if (!ACCUMULATE_OUTGOING_ARGS)
4425 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4427 else
4429 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
4430 tree call_expr = build_call_expr (fn, 0);
4431 expand_call (call_expr, NULL_RTX, false);
4434 emit_barrier ();
4437 /* Expand a call to __builtin_unreachable. We do nothing except emit
4438 a barrier saying that control flow will not pass here.
4440 It is the responsibility of the program being compiled to ensure
4441 that control flow does never reach __builtin_unreachable. */
4442 static void
4443 expand_builtin_unreachable (void)
4445 emit_barrier ();
4448 /* Expand EXP, a call to fabs, fabsf or fabsl.
4449 Return NULL_RTX if a normal call should be emitted rather than expanding
4450 the function inline. If convenient, the result should be placed
4451 in TARGET. SUBTARGET may be used as the target for computing
4452 the operand. */
4454 static rtx
4455 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4457 machine_mode mode;
4458 tree arg;
4459 rtx op0;
4461 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4462 return NULL_RTX;
4464 arg = CALL_EXPR_ARG (exp, 0);
4465 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4466 mode = TYPE_MODE (TREE_TYPE (arg));
4467 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4468 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4471 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4472 Return NULL is a normal call should be emitted rather than expanding the
4473 function inline. If convenient, the result should be placed in TARGET.
4474 SUBTARGET may be used as the target for computing the operand. */
4476 static rtx
4477 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4479 rtx op0, op1;
4480 tree arg;
4482 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4483 return NULL_RTX;
4485 arg = CALL_EXPR_ARG (exp, 0);
4486 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4488 arg = CALL_EXPR_ARG (exp, 1);
4489 op1 = expand_normal (arg);
4491 return expand_copysign (op0, op1, target);
4494 /* Expand a call to __builtin___clear_cache. */
4496 static rtx
4497 expand_builtin___clear_cache (tree exp)
4499 if (!targetm.code_for_clear_cache)
4501 #ifdef CLEAR_INSN_CACHE
4502 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4503 does something. Just do the default expansion to a call to
4504 __clear_cache(). */
4505 return NULL_RTX;
4506 #else
4507 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4508 does nothing. There is no need to call it. Do nothing. */
4509 return const0_rtx;
4510 #endif /* CLEAR_INSN_CACHE */
4513 /* We have a "clear_cache" insn, and it will handle everything. */
4514 tree begin, end;
4515 rtx begin_rtx, end_rtx;
4517 /* We must not expand to a library call. If we did, any
4518 fallback library function in libgcc that might contain a call to
4519 __builtin___clear_cache() would recurse infinitely. */
4520 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4522 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4523 return const0_rtx;
4526 if (targetm.have_clear_cache ())
4528 struct expand_operand ops[2];
4530 begin = CALL_EXPR_ARG (exp, 0);
4531 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4533 end = CALL_EXPR_ARG (exp, 1);
4534 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4536 create_address_operand (&ops[0], begin_rtx);
4537 create_address_operand (&ops[1], end_rtx);
4538 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
4539 return const0_rtx;
4541 return const0_rtx;
4544 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4546 static rtx
4547 round_trampoline_addr (rtx tramp)
4549 rtx temp, addend, mask;
4551 /* If we don't need too much alignment, we'll have been guaranteed
4552 proper alignment by get_trampoline_type. */
4553 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4554 return tramp;
4556 /* Round address up to desired boundary. */
4557 temp = gen_reg_rtx (Pmode);
4558 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4559 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4561 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4562 temp, 0, OPTAB_LIB_WIDEN);
4563 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4564 temp, 0, OPTAB_LIB_WIDEN);
4566 return tramp;
4569 static rtx
4570 expand_builtin_init_trampoline (tree exp, bool onstack)
4572 tree t_tramp, t_func, t_chain;
4573 rtx m_tramp, r_tramp, r_chain, tmp;
4575 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4576 POINTER_TYPE, VOID_TYPE))
4577 return NULL_RTX;
4579 t_tramp = CALL_EXPR_ARG (exp, 0);
4580 t_func = CALL_EXPR_ARG (exp, 1);
4581 t_chain = CALL_EXPR_ARG (exp, 2);
4583 r_tramp = expand_normal (t_tramp);
4584 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4585 MEM_NOTRAP_P (m_tramp) = 1;
4587 /* If ONSTACK, the TRAMP argument should be the address of a field
4588 within the local function's FRAME decl. Either way, let's see if
4589 we can fill in the MEM_ATTRs for this memory. */
4590 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4591 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4593 /* Creator of a heap trampoline is responsible for making sure the
4594 address is aligned to at least STACK_BOUNDARY. Normally malloc
4595 will ensure this anyhow. */
4596 tmp = round_trampoline_addr (r_tramp);
4597 if (tmp != r_tramp)
4599 m_tramp = change_address (m_tramp, BLKmode, tmp);
4600 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4601 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4604 /* The FUNC argument should be the address of the nested function.
4605 Extract the actual function decl to pass to the hook. */
4606 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4607 t_func = TREE_OPERAND (t_func, 0);
4608 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4610 r_chain = expand_normal (t_chain);
4612 /* Generate insns to initialize the trampoline. */
4613 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4615 if (onstack)
4617 trampolines_created = 1;
4619 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4620 "trampoline generated for nested function %qD", t_func);
4623 return const0_rtx;
4626 static rtx
4627 expand_builtin_adjust_trampoline (tree exp)
4629 rtx tramp;
4631 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4632 return NULL_RTX;
4634 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4635 tramp = round_trampoline_addr (tramp);
4636 if (targetm.calls.trampoline_adjust_address)
4637 tramp = targetm.calls.trampoline_adjust_address (tramp);
4639 return tramp;
4642 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4643 function. The function first checks whether the back end provides
4644 an insn to implement signbit for the respective mode. If not, it
4645 checks whether the floating point format of the value is such that
4646 the sign bit can be extracted. If that is not the case, error out.
4647 EXP is the expression that is a call to the builtin function; if
4648 convenient, the result should be placed in TARGET. */
4649 static rtx
4650 expand_builtin_signbit (tree exp, rtx target)
4652 const struct real_format *fmt;
4653 machine_mode fmode, imode, rmode;
4654 tree arg;
4655 int word, bitpos;
4656 enum insn_code icode;
4657 rtx temp;
4658 location_t loc = EXPR_LOCATION (exp);
4660 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4661 return NULL_RTX;
4663 arg = CALL_EXPR_ARG (exp, 0);
4664 fmode = TYPE_MODE (TREE_TYPE (arg));
4665 rmode = TYPE_MODE (TREE_TYPE (exp));
4666 fmt = REAL_MODE_FORMAT (fmode);
4668 arg = builtin_save_expr (arg);
4670 /* Expand the argument yielding a RTX expression. */
4671 temp = expand_normal (arg);
4673 /* Check if the back end provides an insn that handles signbit for the
4674 argument's mode. */
4675 icode = optab_handler (signbit_optab, fmode);
4676 if (icode != CODE_FOR_nothing)
4678 rtx_insn *last = get_last_insn ();
4679 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4680 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4681 return target;
4682 delete_insns_since (last);
4685 /* For floating point formats without a sign bit, implement signbit
4686 as "ARG < 0.0". */
4687 bitpos = fmt->signbit_ro;
4688 if (bitpos < 0)
4690 /* But we can't do this if the format supports signed zero. */
4691 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
4693 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4694 build_real (TREE_TYPE (arg), dconst0));
4695 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4698 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4700 imode = int_mode_for_mode (fmode);
4701 gcc_assert (imode != BLKmode);
4702 temp = gen_lowpart (imode, temp);
4704 else
4706 imode = word_mode;
4707 /* Handle targets with different FP word orders. */
4708 if (FLOAT_WORDS_BIG_ENDIAN)
4709 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4710 else
4711 word = bitpos / BITS_PER_WORD;
4712 temp = operand_subword_force (temp, word, fmode);
4713 bitpos = bitpos % BITS_PER_WORD;
4716 /* Force the intermediate word_mode (or narrower) result into a
4717 register. This avoids attempting to create paradoxical SUBREGs
4718 of floating point modes below. */
4719 temp = force_reg (imode, temp);
4721 /* If the bitpos is within the "result mode" lowpart, the operation
4722 can be implement with a single bitwise AND. Otherwise, we need
4723 a right shift and an AND. */
4725 if (bitpos < GET_MODE_BITSIZE (rmode))
4727 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
4729 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4730 temp = gen_lowpart (rmode, temp);
4731 temp = expand_binop (rmode, and_optab, temp,
4732 immed_wide_int_const (mask, rmode),
4733 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4735 else
4737 /* Perform a logical right shift to place the signbit in the least
4738 significant bit, then truncate the result to the desired mode
4739 and mask just this bit. */
4740 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4741 temp = gen_lowpart (rmode, temp);
4742 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4743 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4746 return temp;
4749 /* Expand fork or exec calls. TARGET is the desired target of the
4750 call. EXP is the call. FN is the
4751 identificator of the actual function. IGNORE is nonzero if the
4752 value is to be ignored. */
4754 static rtx
4755 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4757 tree id, decl;
4758 tree call;
4760 /* If we are not profiling, just call the function. */
4761 if (!profile_arc_flag)
4762 return NULL_RTX;
4764 /* Otherwise call the wrapper. This should be equivalent for the rest of
4765 compiler, so the code does not diverge, and the wrapper may run the
4766 code necessary for keeping the profiling sane. */
4768 switch (DECL_FUNCTION_CODE (fn))
4770 case BUILT_IN_FORK:
4771 id = get_identifier ("__gcov_fork");
4772 break;
4774 case BUILT_IN_EXECL:
4775 id = get_identifier ("__gcov_execl");
4776 break;
4778 case BUILT_IN_EXECV:
4779 id = get_identifier ("__gcov_execv");
4780 break;
4782 case BUILT_IN_EXECLP:
4783 id = get_identifier ("__gcov_execlp");
4784 break;
4786 case BUILT_IN_EXECLE:
4787 id = get_identifier ("__gcov_execle");
4788 break;
4790 case BUILT_IN_EXECVP:
4791 id = get_identifier ("__gcov_execvp");
4792 break;
4794 case BUILT_IN_EXECVE:
4795 id = get_identifier ("__gcov_execve");
4796 break;
4798 default:
4799 gcc_unreachable ();
4802 decl = build_decl (DECL_SOURCE_LOCATION (fn),
4803 FUNCTION_DECL, id, TREE_TYPE (fn));
4804 DECL_EXTERNAL (decl) = 1;
4805 TREE_PUBLIC (decl) = 1;
4806 DECL_ARTIFICIAL (decl) = 1;
4807 TREE_NOTHROW (decl) = 1;
4808 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
4809 DECL_VISIBILITY_SPECIFIED (decl) = 1;
4810 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
4811 return expand_call (call, target, ignore);
4816 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
4817 the pointer in these functions is void*, the tree optimizers may remove
4818 casts. The mode computed in expand_builtin isn't reliable either, due
4819 to __sync_bool_compare_and_swap.
4821 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
4822 group of builtins. This gives us log2 of the mode size. */
4824 static inline machine_mode
4825 get_builtin_sync_mode (int fcode_diff)
4827 /* The size is not negotiable, so ask not to get BLKmode in return
4828 if the target indicates that a smaller size would be better. */
4829 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
4832 /* Expand the memory expression LOC and return the appropriate memory operand
4833 for the builtin_sync operations. */
4835 static rtx
4836 get_builtin_sync_mem (tree loc, machine_mode mode)
4838 rtx addr, mem;
4840 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
4841 addr = convert_memory_address (Pmode, addr);
4843 /* Note that we explicitly do not want any alias information for this
4844 memory, so that we kill all other live memories. Otherwise we don't
4845 satisfy the full barrier semantics of the intrinsic. */
4846 mem = validize_mem (gen_rtx_MEM (mode, addr));
4848 /* The alignment needs to be at least according to that of the mode. */
4849 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
4850 get_pointer_alignment (loc)));
4851 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
4852 MEM_VOLATILE_P (mem) = 1;
4854 return mem;
4857 /* Make sure an argument is in the right mode.
4858 EXP is the tree argument.
4859 MODE is the mode it should be in. */
4861 static rtx
4862 expand_expr_force_mode (tree exp, machine_mode mode)
4864 rtx val;
4865 machine_mode old_mode;
4867 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
4868 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
4869 of CONST_INTs, where we know the old_mode only from the call argument. */
4871 old_mode = GET_MODE (val);
4872 if (old_mode == VOIDmode)
4873 old_mode = TYPE_MODE (TREE_TYPE (exp));
4874 val = convert_modes (mode, old_mode, val, 1);
4875 return val;
4879 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
4880 EXP is the CALL_EXPR. CODE is the rtx code
4881 that corresponds to the arithmetic or logical operation from the name;
4882 an exception here is that NOT actually means NAND. TARGET is an optional
4883 place for us to store the results; AFTER is true if this is the
4884 fetch_and_xxx form. */
4886 static rtx
4887 expand_builtin_sync_operation (machine_mode mode, tree exp,
4888 enum rtx_code code, bool after,
4889 rtx target)
4891 rtx val, mem;
4892 location_t loc = EXPR_LOCATION (exp);
4894 if (code == NOT && warn_sync_nand)
4896 tree fndecl = get_callee_fndecl (exp);
4897 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4899 static bool warned_f_a_n, warned_n_a_f;
4901 switch (fcode)
4903 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
4904 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
4905 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
4906 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
4907 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
4908 if (warned_f_a_n)
4909 break;
4911 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
4912 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
4913 warned_f_a_n = true;
4914 break;
4916 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
4917 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
4918 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
4919 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
4920 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
4921 if (warned_n_a_f)
4922 break;
4924 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
4925 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
4926 warned_n_a_f = true;
4927 break;
4929 default:
4930 gcc_unreachable ();
4934 /* Expand the operands. */
4935 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
4936 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
4938 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
4939 after);
4942 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
4943 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
4944 true if this is the boolean form. TARGET is a place for us to store the
4945 results; this is NOT optional if IS_BOOL is true. */
4947 static rtx
4948 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
4949 bool is_bool, rtx target)
4951 rtx old_val, new_val, mem;
4952 rtx *pbool, *poval;
4954 /* Expand the operands. */
4955 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
4956 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
4957 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
4959 pbool = poval = NULL;
4960 if (target != const0_rtx)
4962 if (is_bool)
4963 pbool = &target;
4964 else
4965 poval = &target;
4967 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
4968 false, MEMMODEL_SYNC_SEQ_CST,
4969 MEMMODEL_SYNC_SEQ_CST))
4970 return NULL_RTX;
4972 return target;
4975 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
4976 general form is actually an atomic exchange, and some targets only
4977 support a reduced form with the second argument being a constant 1.
4978 EXP is the CALL_EXPR; TARGET is an optional place for us to store
4979 the results. */
4981 static rtx
4982 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
4983 rtx target)
4985 rtx val, mem;
4987 /* Expand the operands. */
4988 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
4989 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
4991 return expand_sync_lock_test_and_set (target, mem, val);
4994 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
4996 static void
4997 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
4999 rtx mem;
5001 /* Expand the operands. */
5002 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5004 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5007 /* Given an integer representing an ``enum memmodel'', verify its
5008 correctness and return the memory model enum. */
5010 static enum memmodel
5011 get_memmodel (tree exp)
5013 rtx op;
5014 unsigned HOST_WIDE_INT val;
5015 source_location loc
5016 = expansion_point_location_if_in_system_header (input_location);
5018 /* If the parameter is not a constant, it's a run time value so we'll just
5019 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5020 if (TREE_CODE (exp) != INTEGER_CST)
5021 return MEMMODEL_SEQ_CST;
5023 op = expand_normal (exp);
5025 val = INTVAL (op);
5026 if (targetm.memmodel_check)
5027 val = targetm.memmodel_check (val);
5028 else if (val & ~MEMMODEL_MASK)
5030 warning_at (loc, OPT_Winvalid_memory_model,
5031 "unknown architecture specifier in memory model to builtin");
5032 return MEMMODEL_SEQ_CST;
5035 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5036 if (memmodel_base (val) >= MEMMODEL_LAST)
5038 warning_at (loc, OPT_Winvalid_memory_model,
5039 "invalid memory model argument to builtin");
5040 return MEMMODEL_SEQ_CST;
5043 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5044 be conservative and promote consume to acquire. */
5045 if (val == MEMMODEL_CONSUME)
5046 val = MEMMODEL_ACQUIRE;
5048 return (enum memmodel) val;
5051 /* Expand the __atomic_exchange intrinsic:
5052 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5053 EXP is the CALL_EXPR.
5054 TARGET is an optional place for us to store the results. */
5056 static rtx
5057 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5059 rtx val, mem;
5060 enum memmodel model;
5062 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5064 if (!flag_inline_atomics)
5065 return NULL_RTX;
5067 /* Expand the operands. */
5068 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5069 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5071 return expand_atomic_exchange (target, mem, val, model);
5074 /* Expand the __atomic_compare_exchange intrinsic:
5075 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5076 TYPE desired, BOOL weak,
5077 enum memmodel success,
5078 enum memmodel failure)
5079 EXP is the CALL_EXPR.
5080 TARGET is an optional place for us to store the results. */
5082 static rtx
5083 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5084 rtx target)
5086 rtx expect, desired, mem, oldval;
5087 rtx_code_label *label;
5088 enum memmodel success, failure;
5089 tree weak;
5090 bool is_weak;
5091 source_location loc
5092 = expansion_point_location_if_in_system_header (input_location);
5094 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5095 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5097 if (failure > success)
5099 warning_at (loc, OPT_Winvalid_memory_model,
5100 "failure memory model cannot be stronger than success "
5101 "memory model for %<__atomic_compare_exchange%>");
5102 success = MEMMODEL_SEQ_CST;
5105 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5107 warning_at (loc, OPT_Winvalid_memory_model,
5108 "invalid failure memory model for "
5109 "%<__atomic_compare_exchange%>");
5110 failure = MEMMODEL_SEQ_CST;
5111 success = MEMMODEL_SEQ_CST;
5115 if (!flag_inline_atomics)
5116 return NULL_RTX;
5118 /* Expand the operands. */
5119 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5121 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5122 expect = convert_memory_address (Pmode, expect);
5123 expect = gen_rtx_MEM (mode, expect);
5124 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5126 weak = CALL_EXPR_ARG (exp, 3);
5127 is_weak = false;
5128 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5129 is_weak = true;
5131 if (target == const0_rtx)
5132 target = NULL;
5134 /* Lest the rtl backend create a race condition with an imporoper store
5135 to memory, always create a new pseudo for OLDVAL. */
5136 oldval = NULL;
5138 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5139 is_weak, success, failure))
5140 return NULL_RTX;
5142 /* Conditionally store back to EXPECT, lest we create a race condition
5143 with an improper store to memory. */
5144 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5145 the normal case where EXPECT is totally private, i.e. a register. At
5146 which point the store can be unconditional. */
5147 label = gen_label_rtx ();
5148 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5149 GET_MODE (target), 1, label);
5150 emit_move_insn (expect, oldval);
5151 emit_label (label);
5153 return target;
5156 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5157 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5158 call. The weak parameter must be dropped to match the expected parameter
5159 list and the expected argument changed from value to pointer to memory
5160 slot. */
5162 static void
5163 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5165 unsigned int z;
5166 vec<tree, va_gc> *vec;
5168 vec_alloc (vec, 5);
5169 vec->quick_push (gimple_call_arg (call, 0));
5170 tree expected = gimple_call_arg (call, 1);
5171 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5172 TREE_TYPE (expected));
5173 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5174 if (expd != x)
5175 emit_move_insn (x, expd);
5176 tree v = make_tree (TREE_TYPE (expected), x);
5177 vec->quick_push (build1 (ADDR_EXPR,
5178 build_pointer_type (TREE_TYPE (expected)), v));
5179 vec->quick_push (gimple_call_arg (call, 2));
5180 /* Skip the boolean weak parameter. */
5181 for (z = 4; z < 6; z++)
5182 vec->quick_push (gimple_call_arg (call, z));
5183 built_in_function fncode
5184 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5185 + exact_log2 (GET_MODE_SIZE (mode)));
5186 tree fndecl = builtin_decl_explicit (fncode);
5187 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
5188 fndecl);
5189 tree exp = build_call_vec (boolean_type_node, fn, vec);
5190 tree lhs = gimple_call_lhs (call);
5191 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
5192 if (lhs)
5194 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5195 if (GET_MODE (boolret) != mode)
5196 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5197 x = force_reg (mode, x);
5198 write_complex_part (target, boolret, true);
5199 write_complex_part (target, x, false);
5203 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
5205 void
5206 expand_ifn_atomic_compare_exchange (gcall *call)
5208 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
5209 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
5210 machine_mode mode = mode_for_size (BITS_PER_UNIT * size, MODE_INT, 0);
5211 rtx expect, desired, mem, oldval, boolret;
5212 enum memmodel success, failure;
5213 tree lhs;
5214 bool is_weak;
5215 source_location loc
5216 = expansion_point_location_if_in_system_header (gimple_location (call));
5218 success = get_memmodel (gimple_call_arg (call, 4));
5219 failure = get_memmodel (gimple_call_arg (call, 5));
5221 if (failure > success)
5223 warning_at (loc, OPT_Winvalid_memory_model,
5224 "failure memory model cannot be stronger than success "
5225 "memory model for %<__atomic_compare_exchange%>");
5226 success = MEMMODEL_SEQ_CST;
5229 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5231 warning_at (loc, OPT_Winvalid_memory_model,
5232 "invalid failure memory model for "
5233 "%<__atomic_compare_exchange%>");
5234 failure = MEMMODEL_SEQ_CST;
5235 success = MEMMODEL_SEQ_CST;
5238 if (!flag_inline_atomics)
5240 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5241 return;
5244 /* Expand the operands. */
5245 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
5247 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
5248 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
5250 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
5252 boolret = NULL;
5253 oldval = NULL;
5255 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
5256 is_weak, success, failure))
5258 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5259 return;
5262 lhs = gimple_call_lhs (call);
5263 if (lhs)
5265 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5266 if (GET_MODE (boolret) != mode)
5267 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5268 write_complex_part (target, boolret, true);
5269 write_complex_part (target, oldval, false);
5273 /* Expand the __atomic_load intrinsic:
5274 TYPE __atomic_load (TYPE *object, enum memmodel)
5275 EXP is the CALL_EXPR.
5276 TARGET is an optional place for us to store the results. */
5278 static rtx
5279 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5281 rtx mem;
5282 enum memmodel model;
5284 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5285 if (is_mm_release (model) || is_mm_acq_rel (model))
5287 source_location loc
5288 = expansion_point_location_if_in_system_header (input_location);
5289 warning_at (loc, OPT_Winvalid_memory_model,
5290 "invalid memory model for %<__atomic_load%>");
5291 model = MEMMODEL_SEQ_CST;
5294 if (!flag_inline_atomics)
5295 return NULL_RTX;
5297 /* Expand the operand. */
5298 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5300 return expand_atomic_load (target, mem, model);
5304 /* Expand the __atomic_store intrinsic:
5305 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5306 EXP is the CALL_EXPR.
5307 TARGET is an optional place for us to store the results. */
5309 static rtx
5310 expand_builtin_atomic_store (machine_mode mode, tree exp)
5312 rtx mem, val;
5313 enum memmodel model;
5315 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5316 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5317 || is_mm_release (model)))
5319 source_location loc
5320 = expansion_point_location_if_in_system_header (input_location);
5321 warning_at (loc, OPT_Winvalid_memory_model,
5322 "invalid memory model for %<__atomic_store%>");
5323 model = MEMMODEL_SEQ_CST;
5326 if (!flag_inline_atomics)
5327 return NULL_RTX;
5329 /* Expand the operands. */
5330 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5331 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5333 return expand_atomic_store (mem, val, model, false);
5336 /* Expand the __atomic_fetch_XXX intrinsic:
5337 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5338 EXP is the CALL_EXPR.
5339 TARGET is an optional place for us to store the results.
5340 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5341 FETCH_AFTER is true if returning the result of the operation.
5342 FETCH_AFTER is false if returning the value before the operation.
5343 IGNORE is true if the result is not used.
5344 EXT_CALL is the correct builtin for an external call if this cannot be
5345 resolved to an instruction sequence. */
5347 static rtx
5348 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5349 enum rtx_code code, bool fetch_after,
5350 bool ignore, enum built_in_function ext_call)
5352 rtx val, mem, ret;
5353 enum memmodel model;
5354 tree fndecl;
5355 tree addr;
5357 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5359 /* Expand the operands. */
5360 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5361 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5363 /* Only try generating instructions if inlining is turned on. */
5364 if (flag_inline_atomics)
5366 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5367 if (ret)
5368 return ret;
5371 /* Return if a different routine isn't needed for the library call. */
5372 if (ext_call == BUILT_IN_NONE)
5373 return NULL_RTX;
5375 /* Change the call to the specified function. */
5376 fndecl = get_callee_fndecl (exp);
5377 addr = CALL_EXPR_FN (exp);
5378 STRIP_NOPS (addr);
5380 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5381 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5383 /* Expand the call here so we can emit trailing code. */
5384 ret = expand_call (exp, target, ignore);
5386 /* Replace the original function just in case it matters. */
5387 TREE_OPERAND (addr, 0) = fndecl;
5389 /* Then issue the arithmetic correction to return the right result. */
5390 if (!ignore)
5392 if (code == NOT)
5394 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5395 OPTAB_LIB_WIDEN);
5396 ret = expand_simple_unop (mode, NOT, ret, target, true);
5398 else
5399 ret = expand_simple_binop (mode, code, ret, val, target, true,
5400 OPTAB_LIB_WIDEN);
5402 return ret;
5405 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
5407 void
5408 expand_ifn_atomic_bit_test_and (gcall *call)
5410 tree ptr = gimple_call_arg (call, 0);
5411 tree bit = gimple_call_arg (call, 1);
5412 tree flag = gimple_call_arg (call, 2);
5413 tree lhs = gimple_call_lhs (call);
5414 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
5415 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
5416 enum rtx_code code;
5417 optab optab;
5418 struct expand_operand ops[5];
5420 gcc_assert (flag_inline_atomics);
5422 if (gimple_call_num_args (call) == 4)
5423 model = get_memmodel (gimple_call_arg (call, 3));
5425 rtx mem = get_builtin_sync_mem (ptr, mode);
5426 rtx val = expand_expr_force_mode (bit, mode);
5428 switch (gimple_call_internal_fn (call))
5430 case IFN_ATOMIC_BIT_TEST_AND_SET:
5431 code = IOR;
5432 optab = atomic_bit_test_and_set_optab;
5433 break;
5434 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
5435 code = XOR;
5436 optab = atomic_bit_test_and_complement_optab;
5437 break;
5438 case IFN_ATOMIC_BIT_TEST_AND_RESET:
5439 code = AND;
5440 optab = atomic_bit_test_and_reset_optab;
5441 break;
5442 default:
5443 gcc_unreachable ();
5446 if (lhs == NULL_TREE)
5448 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
5449 val, NULL_RTX, true, OPTAB_DIRECT);
5450 if (code == AND)
5451 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
5452 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
5453 return;
5456 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5457 enum insn_code icode = direct_optab_handler (optab, mode);
5458 gcc_assert (icode != CODE_FOR_nothing);
5459 create_output_operand (&ops[0], target, mode);
5460 create_fixed_operand (&ops[1], mem);
5461 create_convert_operand_to (&ops[2], val, mode, true);
5462 create_integer_operand (&ops[3], model);
5463 create_integer_operand (&ops[4], integer_onep (flag));
5464 if (maybe_expand_insn (icode, 5, ops))
5465 return;
5467 rtx bitval = val;
5468 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
5469 val, NULL_RTX, true, OPTAB_DIRECT);
5470 rtx maskval = val;
5471 if (code == AND)
5472 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
5473 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
5474 code, model, false);
5475 if (integer_onep (flag))
5477 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
5478 NULL_RTX, true, OPTAB_DIRECT);
5479 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
5480 true, OPTAB_DIRECT);
5482 else
5483 result = expand_simple_binop (mode, AND, result, maskval, target, true,
5484 OPTAB_DIRECT);
5485 if (result != target)
5486 emit_move_insn (target, result);
5489 /* Expand an atomic clear operation.
5490 void _atomic_clear (BOOL *obj, enum memmodel)
5491 EXP is the call expression. */
5493 static rtx
5494 expand_builtin_atomic_clear (tree exp)
5496 machine_mode mode;
5497 rtx mem, ret;
5498 enum memmodel model;
5500 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5501 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5502 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5504 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
5506 source_location loc
5507 = expansion_point_location_if_in_system_header (input_location);
5508 warning_at (loc, OPT_Winvalid_memory_model,
5509 "invalid memory model for %<__atomic_store%>");
5510 model = MEMMODEL_SEQ_CST;
5513 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5514 Failing that, a store is issued by __atomic_store. The only way this can
5515 fail is if the bool type is larger than a word size. Unlikely, but
5516 handle it anyway for completeness. Assume a single threaded model since
5517 there is no atomic support in this case, and no barriers are required. */
5518 ret = expand_atomic_store (mem, const0_rtx, model, true);
5519 if (!ret)
5520 emit_move_insn (mem, const0_rtx);
5521 return const0_rtx;
5524 /* Expand an atomic test_and_set operation.
5525 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5526 EXP is the call expression. */
5528 static rtx
5529 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5531 rtx mem;
5532 enum memmodel model;
5533 machine_mode mode;
5535 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5536 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5537 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5539 return expand_atomic_test_and_set (target, mem, model);
5543 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5544 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5546 static tree
5547 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5549 int size;
5550 machine_mode mode;
5551 unsigned int mode_align, type_align;
5553 if (TREE_CODE (arg0) != INTEGER_CST)
5554 return NULL_TREE;
5556 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5557 mode = mode_for_size (size, MODE_INT, 0);
5558 mode_align = GET_MODE_ALIGNMENT (mode);
5560 if (TREE_CODE (arg1) == INTEGER_CST)
5562 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
5564 /* Either this argument is null, or it's a fake pointer encoding
5565 the alignment of the object. */
5566 val = val & -val;
5567 val *= BITS_PER_UNIT;
5569 if (val == 0 || mode_align < val)
5570 type_align = mode_align;
5571 else
5572 type_align = val;
5574 else
5576 tree ttype = TREE_TYPE (arg1);
5578 /* This function is usually invoked and folded immediately by the front
5579 end before anything else has a chance to look at it. The pointer
5580 parameter at this point is usually cast to a void *, so check for that
5581 and look past the cast. */
5582 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
5583 && VOID_TYPE_P (TREE_TYPE (ttype)))
5584 arg1 = TREE_OPERAND (arg1, 0);
5586 ttype = TREE_TYPE (arg1);
5587 gcc_assert (POINTER_TYPE_P (ttype));
5589 /* Get the underlying type of the object. */
5590 ttype = TREE_TYPE (ttype);
5591 type_align = TYPE_ALIGN (ttype);
5594 /* If the object has smaller alignment, the lock free routines cannot
5595 be used. */
5596 if (type_align < mode_align)
5597 return boolean_false_node;
5599 /* Check if a compare_and_swap pattern exists for the mode which represents
5600 the required size. The pattern is not allowed to fail, so the existence
5601 of the pattern indicates support is present. */
5602 if (can_compare_and_swap_p (mode, true))
5603 return boolean_true_node;
5604 else
5605 return boolean_false_node;
5608 /* Return true if the parameters to call EXP represent an object which will
5609 always generate lock free instructions. The first argument represents the
5610 size of the object, and the second parameter is a pointer to the object
5611 itself. If NULL is passed for the object, then the result is based on
5612 typical alignment for an object of the specified size. Otherwise return
5613 false. */
5615 static rtx
5616 expand_builtin_atomic_always_lock_free (tree exp)
5618 tree size;
5619 tree arg0 = CALL_EXPR_ARG (exp, 0);
5620 tree arg1 = CALL_EXPR_ARG (exp, 1);
5622 if (TREE_CODE (arg0) != INTEGER_CST)
5624 error ("non-constant argument 1 to __atomic_always_lock_free");
5625 return const0_rtx;
5628 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5629 if (size == boolean_true_node)
5630 return const1_rtx;
5631 return const0_rtx;
5634 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5635 is lock free on this architecture. */
5637 static tree
5638 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5640 if (!flag_inline_atomics)
5641 return NULL_TREE;
5643 /* If it isn't always lock free, don't generate a result. */
5644 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5645 return boolean_true_node;
5647 return NULL_TREE;
5650 /* Return true if the parameters to call EXP represent an object which will
5651 always generate lock free instructions. The first argument represents the
5652 size of the object, and the second parameter is a pointer to the object
5653 itself. If NULL is passed for the object, then the result is based on
5654 typical alignment for an object of the specified size. Otherwise return
5655 NULL*/
5657 static rtx
5658 expand_builtin_atomic_is_lock_free (tree exp)
5660 tree size;
5661 tree arg0 = CALL_EXPR_ARG (exp, 0);
5662 tree arg1 = CALL_EXPR_ARG (exp, 1);
5664 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5666 error ("non-integer argument 1 to __atomic_is_lock_free");
5667 return NULL_RTX;
5670 if (!flag_inline_atomics)
5671 return NULL_RTX;
5673 /* If the value is known at compile time, return the RTX for it. */
5674 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5675 if (size == boolean_true_node)
5676 return const1_rtx;
5678 return NULL_RTX;
5681 /* Expand the __atomic_thread_fence intrinsic:
5682 void __atomic_thread_fence (enum memmodel)
5683 EXP is the CALL_EXPR. */
5685 static void
5686 expand_builtin_atomic_thread_fence (tree exp)
5688 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5689 expand_mem_thread_fence (model);
5692 /* Expand the __atomic_signal_fence intrinsic:
5693 void __atomic_signal_fence (enum memmodel)
5694 EXP is the CALL_EXPR. */
5696 static void
5697 expand_builtin_atomic_signal_fence (tree exp)
5699 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5700 expand_mem_signal_fence (model);
5703 /* Expand the __sync_synchronize intrinsic. */
5705 static void
5706 expand_builtin_sync_synchronize (void)
5708 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
5711 static rtx
5712 expand_builtin_thread_pointer (tree exp, rtx target)
5714 enum insn_code icode;
5715 if (!validate_arglist (exp, VOID_TYPE))
5716 return const0_rtx;
5717 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5718 if (icode != CODE_FOR_nothing)
5720 struct expand_operand op;
5721 /* If the target is not sutitable then create a new target. */
5722 if (target == NULL_RTX
5723 || !REG_P (target)
5724 || GET_MODE (target) != Pmode)
5725 target = gen_reg_rtx (Pmode);
5726 create_output_operand (&op, target, Pmode);
5727 expand_insn (icode, 1, &op);
5728 return target;
5730 error ("__builtin_thread_pointer is not supported on this target");
5731 return const0_rtx;
5734 static void
5735 expand_builtin_set_thread_pointer (tree exp)
5737 enum insn_code icode;
5738 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5739 return;
5740 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5741 if (icode != CODE_FOR_nothing)
5743 struct expand_operand op;
5744 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5745 Pmode, EXPAND_NORMAL);
5746 create_input_operand (&op, val, Pmode);
5747 expand_insn (icode, 1, &op);
5748 return;
5750 error ("__builtin_set_thread_pointer is not supported on this target");
5754 /* Emit code to restore the current value of stack. */
5756 static void
5757 expand_stack_restore (tree var)
5759 rtx_insn *prev;
5760 rtx sa = expand_normal (var);
5762 sa = convert_memory_address (Pmode, sa);
5764 prev = get_last_insn ();
5765 emit_stack_restore (SAVE_BLOCK, sa);
5767 record_new_stack_level ();
5769 fixup_args_size_notes (prev, get_last_insn (), 0);
5772 /* Emit code to save the current value of stack. */
5774 static rtx
5775 expand_stack_save (void)
5777 rtx ret = NULL_RTX;
5779 emit_stack_save (SAVE_BLOCK, &ret);
5780 return ret;
5784 /* Expand an expression EXP that calls a built-in function,
5785 with result going to TARGET if that's convenient
5786 (and in mode MODE if that's convenient).
5787 SUBTARGET may be used as the target for computing one of EXP's operands.
5788 IGNORE is nonzero if the value is to be ignored. */
5791 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5792 int ignore)
5794 tree fndecl = get_callee_fndecl (exp);
5795 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5796 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5797 int flags;
5799 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5800 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5802 /* When ASan is enabled, we don't want to expand some memory/string
5803 builtins and rely on libsanitizer's hooks. This allows us to avoid
5804 redundant checks and be sure, that possible overflow will be detected
5805 by ASan. */
5807 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5808 return expand_call (exp, target, ignore);
5810 /* When not optimizing, generate calls to library functions for a certain
5811 set of builtins. */
5812 if (!optimize
5813 && !called_as_built_in (fndecl)
5814 && fcode != BUILT_IN_FORK
5815 && fcode != BUILT_IN_EXECL
5816 && fcode != BUILT_IN_EXECV
5817 && fcode != BUILT_IN_EXECLP
5818 && fcode != BUILT_IN_EXECLE
5819 && fcode != BUILT_IN_EXECVP
5820 && fcode != BUILT_IN_EXECVE
5821 && fcode != BUILT_IN_ALLOCA
5822 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5823 && fcode != BUILT_IN_FREE
5824 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5825 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5826 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5827 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5828 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5829 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5830 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5831 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5832 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5833 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5834 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5835 && fcode != BUILT_IN_CHKP_BNDRET)
5836 return expand_call (exp, target, ignore);
5838 /* The built-in function expanders test for target == const0_rtx
5839 to determine whether the function's result will be ignored. */
5840 if (ignore)
5841 target = const0_rtx;
5843 /* If the result of a pure or const built-in function is ignored, and
5844 none of its arguments are volatile, we can avoid expanding the
5845 built-in call and just evaluate the arguments for side-effects. */
5846 if (target == const0_rtx
5847 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5848 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5850 bool volatilep = false;
5851 tree arg;
5852 call_expr_arg_iterator iter;
5854 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5855 if (TREE_THIS_VOLATILE (arg))
5857 volatilep = true;
5858 break;
5861 if (! volatilep)
5863 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5864 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5865 return const0_rtx;
5869 /* expand_builtin_with_bounds is supposed to be used for
5870 instrumented builtin calls. */
5871 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
5873 switch (fcode)
5875 CASE_FLT_FN (BUILT_IN_FABS):
5876 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
5877 case BUILT_IN_FABSD32:
5878 case BUILT_IN_FABSD64:
5879 case BUILT_IN_FABSD128:
5880 target = expand_builtin_fabs (exp, target, subtarget);
5881 if (target)
5882 return target;
5883 break;
5885 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5886 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
5887 target = expand_builtin_copysign (exp, target, subtarget);
5888 if (target)
5889 return target;
5890 break;
5892 /* Just do a normal library call if we were unable to fold
5893 the values. */
5894 CASE_FLT_FN (BUILT_IN_CABS):
5895 break;
5897 CASE_FLT_FN (BUILT_IN_FMA):
5898 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5899 if (target)
5900 return target;
5901 break;
5903 CASE_FLT_FN (BUILT_IN_ILOGB):
5904 if (! flag_unsafe_math_optimizations)
5905 break;
5906 CASE_FLT_FN (BUILT_IN_ISINF):
5907 CASE_FLT_FN (BUILT_IN_FINITE):
5908 case BUILT_IN_ISFINITE:
5909 case BUILT_IN_ISNORMAL:
5910 target = expand_builtin_interclass_mathfn (exp, target);
5911 if (target)
5912 return target;
5913 break;
5915 CASE_FLT_FN (BUILT_IN_ICEIL):
5916 CASE_FLT_FN (BUILT_IN_LCEIL):
5917 CASE_FLT_FN (BUILT_IN_LLCEIL):
5918 CASE_FLT_FN (BUILT_IN_LFLOOR):
5919 CASE_FLT_FN (BUILT_IN_IFLOOR):
5920 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5921 target = expand_builtin_int_roundingfn (exp, target);
5922 if (target)
5923 return target;
5924 break;
5926 CASE_FLT_FN (BUILT_IN_IRINT):
5927 CASE_FLT_FN (BUILT_IN_LRINT):
5928 CASE_FLT_FN (BUILT_IN_LLRINT):
5929 CASE_FLT_FN (BUILT_IN_IROUND):
5930 CASE_FLT_FN (BUILT_IN_LROUND):
5931 CASE_FLT_FN (BUILT_IN_LLROUND):
5932 target = expand_builtin_int_roundingfn_2 (exp, target);
5933 if (target)
5934 return target;
5935 break;
5937 CASE_FLT_FN (BUILT_IN_POWI):
5938 target = expand_builtin_powi (exp, target);
5939 if (target)
5940 return target;
5941 break;
5943 CASE_FLT_FN (BUILT_IN_CEXPI):
5944 target = expand_builtin_cexpi (exp, target);
5945 gcc_assert (target);
5946 return target;
5948 CASE_FLT_FN (BUILT_IN_SIN):
5949 CASE_FLT_FN (BUILT_IN_COS):
5950 if (! flag_unsafe_math_optimizations)
5951 break;
5952 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5953 if (target)
5954 return target;
5955 break;
5957 CASE_FLT_FN (BUILT_IN_SINCOS):
5958 if (! flag_unsafe_math_optimizations)
5959 break;
5960 target = expand_builtin_sincos (exp);
5961 if (target)
5962 return target;
5963 break;
5965 case BUILT_IN_APPLY_ARGS:
5966 return expand_builtin_apply_args ();
5968 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5969 FUNCTION with a copy of the parameters described by
5970 ARGUMENTS, and ARGSIZE. It returns a block of memory
5971 allocated on the stack into which is stored all the registers
5972 that might possibly be used for returning the result of a
5973 function. ARGUMENTS is the value returned by
5974 __builtin_apply_args. ARGSIZE is the number of bytes of
5975 arguments that must be copied. ??? How should this value be
5976 computed? We'll also need a safe worst case value for varargs
5977 functions. */
5978 case BUILT_IN_APPLY:
5979 if (!validate_arglist (exp, POINTER_TYPE,
5980 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5981 && !validate_arglist (exp, REFERENCE_TYPE,
5982 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5983 return const0_rtx;
5984 else
5986 rtx ops[3];
5988 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5989 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5990 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5992 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5995 /* __builtin_return (RESULT) causes the function to return the
5996 value described by RESULT. RESULT is address of the block of
5997 memory returned by __builtin_apply. */
5998 case BUILT_IN_RETURN:
5999 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6000 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6001 return const0_rtx;
6003 case BUILT_IN_SAVEREGS:
6004 return expand_builtin_saveregs ();
6006 case BUILT_IN_VA_ARG_PACK:
6007 /* All valid uses of __builtin_va_arg_pack () are removed during
6008 inlining. */
6009 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6010 return const0_rtx;
6012 case BUILT_IN_VA_ARG_PACK_LEN:
6013 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6014 inlining. */
6015 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6016 return const0_rtx;
6018 /* Return the address of the first anonymous stack arg. */
6019 case BUILT_IN_NEXT_ARG:
6020 if (fold_builtin_next_arg (exp, false))
6021 return const0_rtx;
6022 return expand_builtin_next_arg ();
6024 case BUILT_IN_CLEAR_CACHE:
6025 target = expand_builtin___clear_cache (exp);
6026 if (target)
6027 return target;
6028 break;
6030 case BUILT_IN_CLASSIFY_TYPE:
6031 return expand_builtin_classify_type (exp);
6033 case BUILT_IN_CONSTANT_P:
6034 return const0_rtx;
6036 case BUILT_IN_FRAME_ADDRESS:
6037 case BUILT_IN_RETURN_ADDRESS:
6038 return expand_builtin_frame_address (fndecl, exp);
6040 /* Returns the address of the area where the structure is returned.
6041 0 otherwise. */
6042 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6043 if (call_expr_nargs (exp) != 0
6044 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6045 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6046 return const0_rtx;
6047 else
6048 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6050 case BUILT_IN_ALLOCA:
6051 case BUILT_IN_ALLOCA_WITH_ALIGN:
6052 /* If the allocation stems from the declaration of a variable-sized
6053 object, it cannot accumulate. */
6054 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6055 if (target)
6056 return target;
6057 break;
6059 case BUILT_IN_STACK_SAVE:
6060 return expand_stack_save ();
6062 case BUILT_IN_STACK_RESTORE:
6063 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6064 return const0_rtx;
6066 case BUILT_IN_BSWAP16:
6067 case BUILT_IN_BSWAP32:
6068 case BUILT_IN_BSWAP64:
6069 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6070 if (target)
6071 return target;
6072 break;
6074 CASE_INT_FN (BUILT_IN_FFS):
6075 target = expand_builtin_unop (target_mode, exp, target,
6076 subtarget, ffs_optab);
6077 if (target)
6078 return target;
6079 break;
6081 CASE_INT_FN (BUILT_IN_CLZ):
6082 target = expand_builtin_unop (target_mode, exp, target,
6083 subtarget, clz_optab);
6084 if (target)
6085 return target;
6086 break;
6088 CASE_INT_FN (BUILT_IN_CTZ):
6089 target = expand_builtin_unop (target_mode, exp, target,
6090 subtarget, ctz_optab);
6091 if (target)
6092 return target;
6093 break;
6095 CASE_INT_FN (BUILT_IN_CLRSB):
6096 target = expand_builtin_unop (target_mode, exp, target,
6097 subtarget, clrsb_optab);
6098 if (target)
6099 return target;
6100 break;
6102 CASE_INT_FN (BUILT_IN_POPCOUNT):
6103 target = expand_builtin_unop (target_mode, exp, target,
6104 subtarget, popcount_optab);
6105 if (target)
6106 return target;
6107 break;
6109 CASE_INT_FN (BUILT_IN_PARITY):
6110 target = expand_builtin_unop (target_mode, exp, target,
6111 subtarget, parity_optab);
6112 if (target)
6113 return target;
6114 break;
6116 case BUILT_IN_STRLEN:
6117 target = expand_builtin_strlen (exp, target, target_mode);
6118 if (target)
6119 return target;
6120 break;
6122 case BUILT_IN_STRCPY:
6123 target = expand_builtin_strcpy (exp, target);
6124 if (target)
6125 return target;
6126 break;
6128 case BUILT_IN_STRNCPY:
6129 target = expand_builtin_strncpy (exp, target);
6130 if (target)
6131 return target;
6132 break;
6134 case BUILT_IN_STPCPY:
6135 target = expand_builtin_stpcpy (exp, target, mode);
6136 if (target)
6137 return target;
6138 break;
6140 case BUILT_IN_MEMCPY:
6141 target = expand_builtin_memcpy (exp, target);
6142 if (target)
6143 return target;
6144 break;
6146 case BUILT_IN_MEMPCPY:
6147 target = expand_builtin_mempcpy (exp, target, mode);
6148 if (target)
6149 return target;
6150 break;
6152 case BUILT_IN_MEMSET:
6153 target = expand_builtin_memset (exp, target, mode);
6154 if (target)
6155 return target;
6156 break;
6158 case BUILT_IN_BZERO:
6159 target = expand_builtin_bzero (exp);
6160 if (target)
6161 return target;
6162 break;
6164 case BUILT_IN_STRCMP:
6165 target = expand_builtin_strcmp (exp, target);
6166 if (target)
6167 return target;
6168 break;
6170 case BUILT_IN_STRNCMP:
6171 target = expand_builtin_strncmp (exp, target, mode);
6172 if (target)
6173 return target;
6174 break;
6176 case BUILT_IN_BCMP:
6177 case BUILT_IN_MEMCMP:
6178 case BUILT_IN_MEMCMP_EQ:
6179 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
6180 if (target)
6181 return target;
6182 if (fcode == BUILT_IN_MEMCMP_EQ)
6184 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
6185 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
6187 break;
6189 case BUILT_IN_SETJMP:
6190 /* This should have been lowered to the builtins below. */
6191 gcc_unreachable ();
6193 case BUILT_IN_SETJMP_SETUP:
6194 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6195 and the receiver label. */
6196 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6198 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6199 VOIDmode, EXPAND_NORMAL);
6200 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6201 rtx_insn *label_r = label_rtx (label);
6203 /* This is copied from the handling of non-local gotos. */
6204 expand_builtin_setjmp_setup (buf_addr, label_r);
6205 nonlocal_goto_handler_labels
6206 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6207 nonlocal_goto_handler_labels);
6208 /* ??? Do not let expand_label treat us as such since we would
6209 not want to be both on the list of non-local labels and on
6210 the list of forced labels. */
6211 FORCED_LABEL (label) = 0;
6212 return const0_rtx;
6214 break;
6216 case BUILT_IN_SETJMP_RECEIVER:
6217 /* __builtin_setjmp_receiver is passed the receiver label. */
6218 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6220 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6221 rtx_insn *label_r = label_rtx (label);
6223 expand_builtin_setjmp_receiver (label_r);
6224 return const0_rtx;
6226 break;
6228 /* __builtin_longjmp is passed a pointer to an array of five words.
6229 It's similar to the C library longjmp function but works with
6230 __builtin_setjmp above. */
6231 case BUILT_IN_LONGJMP:
6232 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6234 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6235 VOIDmode, EXPAND_NORMAL);
6236 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6238 if (value != const1_rtx)
6240 error ("%<__builtin_longjmp%> second argument must be 1");
6241 return const0_rtx;
6244 expand_builtin_longjmp (buf_addr, value);
6245 return const0_rtx;
6247 break;
6249 case BUILT_IN_NONLOCAL_GOTO:
6250 target = expand_builtin_nonlocal_goto (exp);
6251 if (target)
6252 return target;
6253 break;
6255 /* This updates the setjmp buffer that is its argument with the value
6256 of the current stack pointer. */
6257 case BUILT_IN_UPDATE_SETJMP_BUF:
6258 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6260 rtx buf_addr
6261 = expand_normal (CALL_EXPR_ARG (exp, 0));
6263 expand_builtin_update_setjmp_buf (buf_addr);
6264 return const0_rtx;
6266 break;
6268 case BUILT_IN_TRAP:
6269 expand_builtin_trap ();
6270 return const0_rtx;
6272 case BUILT_IN_UNREACHABLE:
6273 expand_builtin_unreachable ();
6274 return const0_rtx;
6276 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6277 case BUILT_IN_SIGNBITD32:
6278 case BUILT_IN_SIGNBITD64:
6279 case BUILT_IN_SIGNBITD128:
6280 target = expand_builtin_signbit (exp, target);
6281 if (target)
6282 return target;
6283 break;
6285 /* Various hooks for the DWARF 2 __throw routine. */
6286 case BUILT_IN_UNWIND_INIT:
6287 expand_builtin_unwind_init ();
6288 return const0_rtx;
6289 case BUILT_IN_DWARF_CFA:
6290 return virtual_cfa_rtx;
6291 #ifdef DWARF2_UNWIND_INFO
6292 case BUILT_IN_DWARF_SP_COLUMN:
6293 return expand_builtin_dwarf_sp_column ();
6294 case BUILT_IN_INIT_DWARF_REG_SIZES:
6295 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6296 return const0_rtx;
6297 #endif
6298 case BUILT_IN_FROB_RETURN_ADDR:
6299 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6300 case BUILT_IN_EXTRACT_RETURN_ADDR:
6301 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6302 case BUILT_IN_EH_RETURN:
6303 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6304 CALL_EXPR_ARG (exp, 1));
6305 return const0_rtx;
6306 case BUILT_IN_EH_RETURN_DATA_REGNO:
6307 return expand_builtin_eh_return_data_regno (exp);
6308 case BUILT_IN_EXTEND_POINTER:
6309 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6310 case BUILT_IN_EH_POINTER:
6311 return expand_builtin_eh_pointer (exp);
6312 case BUILT_IN_EH_FILTER:
6313 return expand_builtin_eh_filter (exp);
6314 case BUILT_IN_EH_COPY_VALUES:
6315 return expand_builtin_eh_copy_values (exp);
6317 case BUILT_IN_VA_START:
6318 return expand_builtin_va_start (exp);
6319 case BUILT_IN_VA_END:
6320 return expand_builtin_va_end (exp);
6321 case BUILT_IN_VA_COPY:
6322 return expand_builtin_va_copy (exp);
6323 case BUILT_IN_EXPECT:
6324 return expand_builtin_expect (exp, target);
6325 case BUILT_IN_ASSUME_ALIGNED:
6326 return expand_builtin_assume_aligned (exp, target);
6327 case BUILT_IN_PREFETCH:
6328 expand_builtin_prefetch (exp);
6329 return const0_rtx;
6331 case BUILT_IN_INIT_TRAMPOLINE:
6332 return expand_builtin_init_trampoline (exp, true);
6333 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6334 return expand_builtin_init_trampoline (exp, false);
6335 case BUILT_IN_ADJUST_TRAMPOLINE:
6336 return expand_builtin_adjust_trampoline (exp);
6338 case BUILT_IN_FORK:
6339 case BUILT_IN_EXECL:
6340 case BUILT_IN_EXECV:
6341 case BUILT_IN_EXECLP:
6342 case BUILT_IN_EXECLE:
6343 case BUILT_IN_EXECVP:
6344 case BUILT_IN_EXECVE:
6345 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6346 if (target)
6347 return target;
6348 break;
6350 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6351 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6352 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6353 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6354 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6355 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6356 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6357 if (target)
6358 return target;
6359 break;
6361 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6362 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6363 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6364 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6365 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6366 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6367 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6368 if (target)
6369 return target;
6370 break;
6372 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6373 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6374 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6375 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6376 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6377 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6378 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6379 if (target)
6380 return target;
6381 break;
6383 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6384 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6385 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6386 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6387 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6388 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6389 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6390 if (target)
6391 return target;
6392 break;
6394 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6395 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6396 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6397 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6398 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6399 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6400 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6401 if (target)
6402 return target;
6403 break;
6405 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6406 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6407 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6408 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6409 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6410 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6411 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6412 if (target)
6413 return target;
6414 break;
6416 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6417 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6418 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6419 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6420 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6421 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6422 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6423 if (target)
6424 return target;
6425 break;
6427 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6428 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6429 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6430 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6431 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6432 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6433 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6434 if (target)
6435 return target;
6436 break;
6438 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6439 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6440 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6441 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6442 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6443 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6444 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6445 if (target)
6446 return target;
6447 break;
6449 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6450 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6451 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6452 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6453 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6454 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6455 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6456 if (target)
6457 return target;
6458 break;
6460 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6461 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6462 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6463 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6464 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6465 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6466 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6467 if (target)
6468 return target;
6469 break;
6471 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6472 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6473 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6474 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6475 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6476 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6477 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6478 if (target)
6479 return target;
6480 break;
6482 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6483 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6484 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6485 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6486 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6487 if (mode == VOIDmode)
6488 mode = TYPE_MODE (boolean_type_node);
6489 if (!target || !register_operand (target, mode))
6490 target = gen_reg_rtx (mode);
6492 mode = get_builtin_sync_mode
6493 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6494 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6495 if (target)
6496 return target;
6497 break;
6499 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6500 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6501 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6502 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6503 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6504 mode = get_builtin_sync_mode
6505 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6506 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6507 if (target)
6508 return target;
6509 break;
6511 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6512 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6513 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6514 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6515 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6516 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6517 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6518 if (target)
6519 return target;
6520 break;
6522 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6523 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6524 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6525 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6526 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6527 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6528 expand_builtin_sync_lock_release (mode, exp);
6529 return const0_rtx;
6531 case BUILT_IN_SYNC_SYNCHRONIZE:
6532 expand_builtin_sync_synchronize ();
6533 return const0_rtx;
6535 case BUILT_IN_ATOMIC_EXCHANGE_1:
6536 case BUILT_IN_ATOMIC_EXCHANGE_2:
6537 case BUILT_IN_ATOMIC_EXCHANGE_4:
6538 case BUILT_IN_ATOMIC_EXCHANGE_8:
6539 case BUILT_IN_ATOMIC_EXCHANGE_16:
6540 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6541 target = expand_builtin_atomic_exchange (mode, exp, target);
6542 if (target)
6543 return target;
6544 break;
6546 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6547 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6548 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6549 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6550 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6552 unsigned int nargs, z;
6553 vec<tree, va_gc> *vec;
6555 mode =
6556 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6557 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6558 if (target)
6559 return target;
6561 /* If this is turned into an external library call, the weak parameter
6562 must be dropped to match the expected parameter list. */
6563 nargs = call_expr_nargs (exp);
6564 vec_alloc (vec, nargs - 1);
6565 for (z = 0; z < 3; z++)
6566 vec->quick_push (CALL_EXPR_ARG (exp, z));
6567 /* Skip the boolean weak parameter. */
6568 for (z = 4; z < 6; z++)
6569 vec->quick_push (CALL_EXPR_ARG (exp, z));
6570 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6571 break;
6574 case BUILT_IN_ATOMIC_LOAD_1:
6575 case BUILT_IN_ATOMIC_LOAD_2:
6576 case BUILT_IN_ATOMIC_LOAD_4:
6577 case BUILT_IN_ATOMIC_LOAD_8:
6578 case BUILT_IN_ATOMIC_LOAD_16:
6579 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6580 target = expand_builtin_atomic_load (mode, exp, target);
6581 if (target)
6582 return target;
6583 break;
6585 case BUILT_IN_ATOMIC_STORE_1:
6586 case BUILT_IN_ATOMIC_STORE_2:
6587 case BUILT_IN_ATOMIC_STORE_4:
6588 case BUILT_IN_ATOMIC_STORE_8:
6589 case BUILT_IN_ATOMIC_STORE_16:
6590 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6591 target = expand_builtin_atomic_store (mode, exp);
6592 if (target)
6593 return const0_rtx;
6594 break;
6596 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6597 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6598 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6599 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6600 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6602 enum built_in_function lib;
6603 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6604 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6605 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6606 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6607 ignore, lib);
6608 if (target)
6609 return target;
6610 break;
6612 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6613 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6614 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6615 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6616 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6618 enum built_in_function lib;
6619 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6620 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6621 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6622 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6623 ignore, lib);
6624 if (target)
6625 return target;
6626 break;
6628 case BUILT_IN_ATOMIC_AND_FETCH_1:
6629 case BUILT_IN_ATOMIC_AND_FETCH_2:
6630 case BUILT_IN_ATOMIC_AND_FETCH_4:
6631 case BUILT_IN_ATOMIC_AND_FETCH_8:
6632 case BUILT_IN_ATOMIC_AND_FETCH_16:
6634 enum built_in_function lib;
6635 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6636 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6637 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6638 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6639 ignore, lib);
6640 if (target)
6641 return target;
6642 break;
6644 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6645 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6646 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6647 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6648 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6650 enum built_in_function lib;
6651 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6652 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6653 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6654 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6655 ignore, lib);
6656 if (target)
6657 return target;
6658 break;
6660 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6661 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6662 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6663 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6664 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6666 enum built_in_function lib;
6667 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6668 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6669 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6670 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6671 ignore, lib);
6672 if (target)
6673 return target;
6674 break;
6676 case BUILT_IN_ATOMIC_OR_FETCH_1:
6677 case BUILT_IN_ATOMIC_OR_FETCH_2:
6678 case BUILT_IN_ATOMIC_OR_FETCH_4:
6679 case BUILT_IN_ATOMIC_OR_FETCH_8:
6680 case BUILT_IN_ATOMIC_OR_FETCH_16:
6682 enum built_in_function lib;
6683 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6684 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6685 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6686 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6687 ignore, lib);
6688 if (target)
6689 return target;
6690 break;
6692 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6693 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6694 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6695 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6696 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6697 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6698 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6699 ignore, BUILT_IN_NONE);
6700 if (target)
6701 return target;
6702 break;
6704 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6705 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6706 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6707 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6708 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6709 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6710 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6711 ignore, BUILT_IN_NONE);
6712 if (target)
6713 return target;
6714 break;
6716 case BUILT_IN_ATOMIC_FETCH_AND_1:
6717 case BUILT_IN_ATOMIC_FETCH_AND_2:
6718 case BUILT_IN_ATOMIC_FETCH_AND_4:
6719 case BUILT_IN_ATOMIC_FETCH_AND_8:
6720 case BUILT_IN_ATOMIC_FETCH_AND_16:
6721 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6722 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6723 ignore, BUILT_IN_NONE);
6724 if (target)
6725 return target;
6726 break;
6728 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6729 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6730 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6731 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6732 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6733 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6734 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6735 ignore, BUILT_IN_NONE);
6736 if (target)
6737 return target;
6738 break;
6740 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6741 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6742 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6743 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6744 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6745 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6746 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6747 ignore, BUILT_IN_NONE);
6748 if (target)
6749 return target;
6750 break;
6752 case BUILT_IN_ATOMIC_FETCH_OR_1:
6753 case BUILT_IN_ATOMIC_FETCH_OR_2:
6754 case BUILT_IN_ATOMIC_FETCH_OR_4:
6755 case BUILT_IN_ATOMIC_FETCH_OR_8:
6756 case BUILT_IN_ATOMIC_FETCH_OR_16:
6757 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6758 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6759 ignore, BUILT_IN_NONE);
6760 if (target)
6761 return target;
6762 break;
6764 case BUILT_IN_ATOMIC_TEST_AND_SET:
6765 return expand_builtin_atomic_test_and_set (exp, target);
6767 case BUILT_IN_ATOMIC_CLEAR:
6768 return expand_builtin_atomic_clear (exp);
6770 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6771 return expand_builtin_atomic_always_lock_free (exp);
6773 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6774 target = expand_builtin_atomic_is_lock_free (exp);
6775 if (target)
6776 return target;
6777 break;
6779 case BUILT_IN_ATOMIC_THREAD_FENCE:
6780 expand_builtin_atomic_thread_fence (exp);
6781 return const0_rtx;
6783 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6784 expand_builtin_atomic_signal_fence (exp);
6785 return const0_rtx;
6787 case BUILT_IN_OBJECT_SIZE:
6788 return expand_builtin_object_size (exp);
6790 case BUILT_IN_MEMCPY_CHK:
6791 case BUILT_IN_MEMPCPY_CHK:
6792 case BUILT_IN_MEMMOVE_CHK:
6793 case BUILT_IN_MEMSET_CHK:
6794 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6795 if (target)
6796 return target;
6797 break;
6799 case BUILT_IN_STRCPY_CHK:
6800 case BUILT_IN_STPCPY_CHK:
6801 case BUILT_IN_STRNCPY_CHK:
6802 case BUILT_IN_STPNCPY_CHK:
6803 case BUILT_IN_STRCAT_CHK:
6804 case BUILT_IN_STRNCAT_CHK:
6805 case BUILT_IN_SNPRINTF_CHK:
6806 case BUILT_IN_VSNPRINTF_CHK:
6807 maybe_emit_chk_warning (exp, fcode);
6808 break;
6810 case BUILT_IN_SPRINTF_CHK:
6811 case BUILT_IN_VSPRINTF_CHK:
6812 maybe_emit_sprintf_chk_warning (exp, fcode);
6813 break;
6815 case BUILT_IN_FREE:
6816 if (warn_free_nonheap_object)
6817 maybe_emit_free_warning (exp);
6818 break;
6820 case BUILT_IN_THREAD_POINTER:
6821 return expand_builtin_thread_pointer (exp, target);
6823 case BUILT_IN_SET_THREAD_POINTER:
6824 expand_builtin_set_thread_pointer (exp);
6825 return const0_rtx;
6827 case BUILT_IN_CILK_DETACH:
6828 expand_builtin_cilk_detach (exp);
6829 return const0_rtx;
6831 case BUILT_IN_CILK_POP_FRAME:
6832 expand_builtin_cilk_pop_frame (exp);
6833 return const0_rtx;
6835 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6836 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6837 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6838 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6839 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6840 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6841 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6842 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6843 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6844 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6845 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6846 /* We allow user CHKP builtins if Pointer Bounds
6847 Checker is off. */
6848 if (!chkp_function_instrumented_p (current_function_decl))
6850 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
6851 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6852 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
6853 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
6854 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
6855 return expand_normal (CALL_EXPR_ARG (exp, 0));
6856 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
6857 return expand_normal (size_zero_node);
6858 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
6859 return expand_normal (size_int (-1));
6860 else
6861 return const0_rtx;
6863 /* FALLTHROUGH */
6865 case BUILT_IN_CHKP_BNDMK:
6866 case BUILT_IN_CHKP_BNDSTX:
6867 case BUILT_IN_CHKP_BNDCL:
6868 case BUILT_IN_CHKP_BNDCU:
6869 case BUILT_IN_CHKP_BNDLDX:
6870 case BUILT_IN_CHKP_BNDRET:
6871 case BUILT_IN_CHKP_INTERSECT:
6872 case BUILT_IN_CHKP_NARROW:
6873 case BUILT_IN_CHKP_EXTRACT_LOWER:
6874 case BUILT_IN_CHKP_EXTRACT_UPPER:
6875 /* Software implementation of Pointer Bounds Checker is NYI.
6876 Target support is required. */
6877 error ("Your target platform does not support -fcheck-pointer-bounds");
6878 break;
6880 case BUILT_IN_ACC_ON_DEVICE:
6881 /* Do library call, if we failed to expand the builtin when
6882 folding. */
6883 break;
6885 default: /* just do library call, if unknown builtin */
6886 break;
6889 /* The switch statement above can drop through to cause the function
6890 to be called normally. */
6891 return expand_call (exp, target, ignore);
6894 /* Similar to expand_builtin but is used for instrumented calls. */
6897 expand_builtin_with_bounds (tree exp, rtx target,
6898 rtx subtarget ATTRIBUTE_UNUSED,
6899 machine_mode mode, int ignore)
6901 tree fndecl = get_callee_fndecl (exp);
6902 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6904 gcc_assert (CALL_WITH_BOUNDS_P (exp));
6906 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6907 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6909 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
6910 && fcode < END_CHKP_BUILTINS);
6912 switch (fcode)
6914 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
6915 target = expand_builtin_memcpy_with_bounds (exp, target);
6916 if (target)
6917 return target;
6918 break;
6920 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
6921 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
6922 if (target)
6923 return target;
6924 break;
6926 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
6927 target = expand_builtin_memset_with_bounds (exp, target, mode);
6928 if (target)
6929 return target;
6930 break;
6932 default:
6933 break;
6936 /* The switch statement above can drop through to cause the function
6937 to be called normally. */
6938 return expand_call (exp, target, ignore);
6941 /* Determine whether a tree node represents a call to a built-in
6942 function. If the tree T is a call to a built-in function with
6943 the right number of arguments of the appropriate types, return
6944 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6945 Otherwise the return value is END_BUILTINS. */
6947 enum built_in_function
6948 builtin_mathfn_code (const_tree t)
6950 const_tree fndecl, arg, parmlist;
6951 const_tree argtype, parmtype;
6952 const_call_expr_arg_iterator iter;
6954 if (TREE_CODE (t) != CALL_EXPR
6955 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6956 return END_BUILTINS;
6958 fndecl = get_callee_fndecl (t);
6959 if (fndecl == NULL_TREE
6960 || TREE_CODE (fndecl) != FUNCTION_DECL
6961 || ! DECL_BUILT_IN (fndecl)
6962 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6963 return END_BUILTINS;
6965 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6966 init_const_call_expr_arg_iterator (t, &iter);
6967 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6969 /* If a function doesn't take a variable number of arguments,
6970 the last element in the list will have type `void'. */
6971 parmtype = TREE_VALUE (parmlist);
6972 if (VOID_TYPE_P (parmtype))
6974 if (more_const_call_expr_args_p (&iter))
6975 return END_BUILTINS;
6976 return DECL_FUNCTION_CODE (fndecl);
6979 if (! more_const_call_expr_args_p (&iter))
6980 return END_BUILTINS;
6982 arg = next_const_call_expr_arg (&iter);
6983 argtype = TREE_TYPE (arg);
6985 if (SCALAR_FLOAT_TYPE_P (parmtype))
6987 if (! SCALAR_FLOAT_TYPE_P (argtype))
6988 return END_BUILTINS;
6990 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6992 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6993 return END_BUILTINS;
6995 else if (POINTER_TYPE_P (parmtype))
6997 if (! POINTER_TYPE_P (argtype))
6998 return END_BUILTINS;
7000 else if (INTEGRAL_TYPE_P (parmtype))
7002 if (! INTEGRAL_TYPE_P (argtype))
7003 return END_BUILTINS;
7005 else
7006 return END_BUILTINS;
7009 /* Variable-length argument list. */
7010 return DECL_FUNCTION_CODE (fndecl);
7013 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7014 evaluate to a constant. */
7016 static tree
7017 fold_builtin_constant_p (tree arg)
7019 /* We return 1 for a numeric type that's known to be a constant
7020 value at compile-time or for an aggregate type that's a
7021 literal constant. */
7022 STRIP_NOPS (arg);
7024 /* If we know this is a constant, emit the constant of one. */
7025 if (CONSTANT_CLASS_P (arg)
7026 || (TREE_CODE (arg) == CONSTRUCTOR
7027 && TREE_CONSTANT (arg)))
7028 return integer_one_node;
7029 if (TREE_CODE (arg) == ADDR_EXPR)
7031 tree op = TREE_OPERAND (arg, 0);
7032 if (TREE_CODE (op) == STRING_CST
7033 || (TREE_CODE (op) == ARRAY_REF
7034 && integer_zerop (TREE_OPERAND (op, 1))
7035 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7036 return integer_one_node;
7039 /* If this expression has side effects, show we don't know it to be a
7040 constant. Likewise if it's a pointer or aggregate type since in
7041 those case we only want literals, since those are only optimized
7042 when generating RTL, not later.
7043 And finally, if we are compiling an initializer, not code, we
7044 need to return a definite result now; there's not going to be any
7045 more optimization done. */
7046 if (TREE_SIDE_EFFECTS (arg)
7047 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7048 || POINTER_TYPE_P (TREE_TYPE (arg))
7049 || cfun == 0
7050 || folding_initializer
7051 || force_folding_builtin_constant_p)
7052 return integer_zero_node;
7054 return NULL_TREE;
7057 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7058 return it as a truthvalue. */
7060 static tree
7061 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7062 tree predictor)
7064 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7066 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7067 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7068 ret_type = TREE_TYPE (TREE_TYPE (fn));
7069 pred_type = TREE_VALUE (arg_types);
7070 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7072 pred = fold_convert_loc (loc, pred_type, pred);
7073 expected = fold_convert_loc (loc, expected_type, expected);
7074 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7075 predictor);
7077 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7078 build_int_cst (ret_type, 0));
7081 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7082 NULL_TREE if no simplification is possible. */
7084 tree
7085 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7087 tree inner, fndecl, inner_arg0;
7088 enum tree_code code;
7090 /* Distribute the expected value over short-circuiting operators.
7091 See through the cast from truthvalue_type_node to long. */
7092 inner_arg0 = arg0;
7093 while (CONVERT_EXPR_P (inner_arg0)
7094 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7095 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7096 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7098 /* If this is a builtin_expect within a builtin_expect keep the
7099 inner one. See through a comparison against a constant. It
7100 might have been added to create a thruthvalue. */
7101 inner = inner_arg0;
7103 if (COMPARISON_CLASS_P (inner)
7104 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7105 inner = TREE_OPERAND (inner, 0);
7107 if (TREE_CODE (inner) == CALL_EXPR
7108 && (fndecl = get_callee_fndecl (inner))
7109 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7110 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7111 return arg0;
7113 inner = inner_arg0;
7114 code = TREE_CODE (inner);
7115 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7117 tree op0 = TREE_OPERAND (inner, 0);
7118 tree op1 = TREE_OPERAND (inner, 1);
7120 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7121 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7122 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7124 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7127 /* If the argument isn't invariant then there's nothing else we can do. */
7128 if (!TREE_CONSTANT (inner_arg0))
7129 return NULL_TREE;
7131 /* If we expect that a comparison against the argument will fold to
7132 a constant return the constant. In practice, this means a true
7133 constant or the address of a non-weak symbol. */
7134 inner = inner_arg0;
7135 STRIP_NOPS (inner);
7136 if (TREE_CODE (inner) == ADDR_EXPR)
7140 inner = TREE_OPERAND (inner, 0);
7142 while (TREE_CODE (inner) == COMPONENT_REF
7143 || TREE_CODE (inner) == ARRAY_REF);
7144 if ((TREE_CODE (inner) == VAR_DECL
7145 || TREE_CODE (inner) == FUNCTION_DECL)
7146 && DECL_WEAK (inner))
7147 return NULL_TREE;
7150 /* Otherwise, ARG0 already has the proper type for the return value. */
7151 return arg0;
7154 /* Fold a call to __builtin_classify_type with argument ARG. */
7156 static tree
7157 fold_builtin_classify_type (tree arg)
7159 if (arg == 0)
7160 return build_int_cst (integer_type_node, no_type_class);
7162 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7165 /* Fold a call to __builtin_strlen with argument ARG. */
7167 static tree
7168 fold_builtin_strlen (location_t loc, tree type, tree arg)
7170 if (!validate_arg (arg, POINTER_TYPE))
7171 return NULL_TREE;
7172 else
7174 tree len = c_strlen (arg, 0);
7176 if (len)
7177 return fold_convert_loc (loc, type, len);
7179 return NULL_TREE;
7183 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7185 static tree
7186 fold_builtin_inf (location_t loc, tree type, int warn)
7188 REAL_VALUE_TYPE real;
7190 /* __builtin_inff is intended to be usable to define INFINITY on all
7191 targets. If an infinity is not available, INFINITY expands "to a
7192 positive constant of type float that overflows at translation
7193 time", footnote "In this case, using INFINITY will violate the
7194 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7195 Thus we pedwarn to ensure this constraint violation is
7196 diagnosed. */
7197 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7198 pedwarn (loc, 0, "target format does not support infinity");
7200 real_inf (&real);
7201 return build_real (type, real);
7204 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7205 NULL_TREE if no simplification can be made. */
7207 static tree
7208 fold_builtin_sincos (location_t loc,
7209 tree arg0, tree arg1, tree arg2)
7211 tree type;
7212 tree fndecl, call = NULL_TREE;
7214 if (!validate_arg (arg0, REAL_TYPE)
7215 || !validate_arg (arg1, POINTER_TYPE)
7216 || !validate_arg (arg2, POINTER_TYPE))
7217 return NULL_TREE;
7219 type = TREE_TYPE (arg0);
7221 /* Calculate the result when the argument is a constant. */
7222 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
7223 if (fn == END_BUILTINS)
7224 return NULL_TREE;
7226 /* Canonicalize sincos to cexpi. */
7227 if (TREE_CODE (arg0) == REAL_CST)
7229 tree complex_type = build_complex_type (type);
7230 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
7232 if (!call)
7234 if (!targetm.libc_has_function (function_c99_math_complex)
7235 || !builtin_decl_implicit_p (fn))
7236 return NULL_TREE;
7237 fndecl = builtin_decl_explicit (fn);
7238 call = build_call_expr_loc (loc, fndecl, 1, arg0);
7239 call = builtin_save_expr (call);
7242 return build2 (COMPOUND_EXPR, void_type_node,
7243 build2 (MODIFY_EXPR, void_type_node,
7244 build_fold_indirect_ref_loc (loc, arg1),
7245 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
7246 build2 (MODIFY_EXPR, void_type_node,
7247 build_fold_indirect_ref_loc (loc, arg2),
7248 fold_build1_loc (loc, REALPART_EXPR, type, call)));
7251 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
7252 arguments to the call, and TYPE is its return type.
7253 Return NULL_TREE if no simplification can be made. */
7255 static tree
7256 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
7258 if (!validate_arg (arg1, POINTER_TYPE)
7259 || !validate_arg (arg2, INTEGER_TYPE)
7260 || !validate_arg (len, INTEGER_TYPE))
7261 return NULL_TREE;
7262 else
7264 const char *p1;
7266 if (TREE_CODE (arg2) != INTEGER_CST
7267 || !tree_fits_uhwi_p (len))
7268 return NULL_TREE;
7270 p1 = c_getstr (arg1);
7271 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
7273 char c;
7274 const char *r;
7275 tree tem;
7277 if (target_char_cast (arg2, &c))
7278 return NULL_TREE;
7280 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
7282 if (r == NULL)
7283 return build_int_cst (TREE_TYPE (arg1), 0);
7285 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
7286 return fold_convert_loc (loc, type, tem);
7288 return NULL_TREE;
7292 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7293 Return NULL_TREE if no simplification can be made. */
7295 static tree
7296 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
7298 if (!validate_arg (arg1, POINTER_TYPE)
7299 || !validate_arg (arg2, POINTER_TYPE)
7300 || !validate_arg (len, INTEGER_TYPE))
7301 return NULL_TREE;
7303 /* If the LEN parameter is zero, return zero. */
7304 if (integer_zerop (len))
7305 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7306 arg1, arg2);
7308 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7309 if (operand_equal_p (arg1, arg2, 0))
7310 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7312 /* If len parameter is one, return an expression corresponding to
7313 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7314 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7316 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7317 tree cst_uchar_ptr_node
7318 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7320 tree ind1
7321 = fold_convert_loc (loc, integer_type_node,
7322 build1 (INDIRECT_REF, cst_uchar_node,
7323 fold_convert_loc (loc,
7324 cst_uchar_ptr_node,
7325 arg1)));
7326 tree ind2
7327 = fold_convert_loc (loc, integer_type_node,
7328 build1 (INDIRECT_REF, cst_uchar_node,
7329 fold_convert_loc (loc,
7330 cst_uchar_ptr_node,
7331 arg2)));
7332 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7335 return NULL_TREE;
7338 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
7339 Return NULL_TREE if no simplification can be made. */
7341 static tree
7342 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
7344 if (!validate_arg (arg1, POINTER_TYPE)
7345 || !validate_arg (arg2, POINTER_TYPE))
7346 return NULL_TREE;
7348 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7349 if (operand_equal_p (arg1, arg2, 0))
7350 return integer_zero_node;
7352 /* If the second arg is "", return *(const unsigned char*)arg1. */
7353 const char *p2 = c_getstr (arg2);
7354 if (p2 && *p2 == '\0')
7356 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7357 tree cst_uchar_ptr_node
7358 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7360 return fold_convert_loc (loc, integer_type_node,
7361 build1 (INDIRECT_REF, cst_uchar_node,
7362 fold_convert_loc (loc,
7363 cst_uchar_ptr_node,
7364 arg1)));
7367 /* If the first arg is "", return -*(const unsigned char*)arg2. */
7368 const char *p1 = c_getstr (arg1);
7369 if (p1 && *p1 == '\0')
7371 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7372 tree cst_uchar_ptr_node
7373 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7375 tree temp
7376 = fold_convert_loc (loc, integer_type_node,
7377 build1 (INDIRECT_REF, cst_uchar_node,
7378 fold_convert_loc (loc,
7379 cst_uchar_ptr_node,
7380 arg2)));
7381 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
7384 return NULL_TREE;
7387 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
7388 Return NULL_TREE if no simplification can be made. */
7390 static tree
7391 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
7393 if (!validate_arg (arg1, POINTER_TYPE)
7394 || !validate_arg (arg2, POINTER_TYPE)
7395 || !validate_arg (len, INTEGER_TYPE))
7396 return NULL_TREE;
7398 /* If the LEN parameter is zero, return zero. */
7399 if (integer_zerop (len))
7400 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7401 arg1, arg2);
7403 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7404 if (operand_equal_p (arg1, arg2, 0))
7405 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7407 /* If the second arg is "", and the length is greater than zero,
7408 return *(const unsigned char*)arg1. */
7409 const char *p2 = c_getstr (arg2);
7410 if (p2 && *p2 == '\0'
7411 && TREE_CODE (len) == INTEGER_CST
7412 && tree_int_cst_sgn (len) == 1)
7414 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7415 tree cst_uchar_ptr_node
7416 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7418 return fold_convert_loc (loc, integer_type_node,
7419 build1 (INDIRECT_REF, cst_uchar_node,
7420 fold_convert_loc (loc,
7421 cst_uchar_ptr_node,
7422 arg1)));
7425 /* If the first arg is "", and the length is greater than zero,
7426 return -*(const unsigned char*)arg2. */
7427 const char *p1 = c_getstr (arg1);
7428 if (p1 && *p1 == '\0'
7429 && TREE_CODE (len) == INTEGER_CST
7430 && tree_int_cst_sgn (len) == 1)
7432 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7433 tree cst_uchar_ptr_node
7434 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7436 tree temp = fold_convert_loc (loc, integer_type_node,
7437 build1 (INDIRECT_REF, cst_uchar_node,
7438 fold_convert_loc (loc,
7439 cst_uchar_ptr_node,
7440 arg2)));
7441 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
7444 /* If len parameter is one, return an expression corresponding to
7445 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7446 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7448 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7449 tree cst_uchar_ptr_node
7450 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7452 tree ind1 = fold_convert_loc (loc, integer_type_node,
7453 build1 (INDIRECT_REF, cst_uchar_node,
7454 fold_convert_loc (loc,
7455 cst_uchar_ptr_node,
7456 arg1)));
7457 tree ind2 = fold_convert_loc (loc, integer_type_node,
7458 build1 (INDIRECT_REF, cst_uchar_node,
7459 fold_convert_loc (loc,
7460 cst_uchar_ptr_node,
7461 arg2)));
7462 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7465 return NULL_TREE;
7468 /* Fold a call to builtin isascii with argument ARG. */
7470 static tree
7471 fold_builtin_isascii (location_t loc, tree arg)
7473 if (!validate_arg (arg, INTEGER_TYPE))
7474 return NULL_TREE;
7475 else
7477 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
7478 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
7479 build_int_cst (integer_type_node,
7480 ~ (unsigned HOST_WIDE_INT) 0x7f));
7481 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
7482 arg, integer_zero_node);
7486 /* Fold a call to builtin toascii with argument ARG. */
7488 static tree
7489 fold_builtin_toascii (location_t loc, tree arg)
7491 if (!validate_arg (arg, INTEGER_TYPE))
7492 return NULL_TREE;
7494 /* Transform toascii(c) -> (c & 0x7f). */
7495 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
7496 build_int_cst (integer_type_node, 0x7f));
7499 /* Fold a call to builtin isdigit with argument ARG. */
7501 static tree
7502 fold_builtin_isdigit (location_t loc, tree arg)
7504 if (!validate_arg (arg, INTEGER_TYPE))
7505 return NULL_TREE;
7506 else
7508 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
7509 /* According to the C standard, isdigit is unaffected by locale.
7510 However, it definitely is affected by the target character set. */
7511 unsigned HOST_WIDE_INT target_digit0
7512 = lang_hooks.to_target_charset ('0');
7514 if (target_digit0 == 0)
7515 return NULL_TREE;
7517 arg = fold_convert_loc (loc, unsigned_type_node, arg);
7518 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
7519 build_int_cst (unsigned_type_node, target_digit0));
7520 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
7521 build_int_cst (unsigned_type_node, 9));
7525 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
7527 static tree
7528 fold_builtin_fabs (location_t loc, tree arg, tree type)
7530 if (!validate_arg (arg, REAL_TYPE))
7531 return NULL_TREE;
7533 arg = fold_convert_loc (loc, type, arg);
7534 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7537 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
7539 static tree
7540 fold_builtin_abs (location_t loc, tree arg, tree type)
7542 if (!validate_arg (arg, INTEGER_TYPE))
7543 return NULL_TREE;
7545 arg = fold_convert_loc (loc, type, arg);
7546 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7549 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
7551 static tree
7552 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
7554 /* ??? Only expand to FMA_EXPR if it's directly supported. */
7555 if (validate_arg (arg0, REAL_TYPE)
7556 && validate_arg (arg1, REAL_TYPE)
7557 && validate_arg (arg2, REAL_TYPE)
7558 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
7559 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
7561 return NULL_TREE;
7564 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
7566 static tree
7567 fold_builtin_carg (location_t loc, tree arg, tree type)
7569 if (validate_arg (arg, COMPLEX_TYPE)
7570 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7572 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
7574 if (atan2_fn)
7576 tree new_arg = builtin_save_expr (arg);
7577 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
7578 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
7579 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
7583 return NULL_TREE;
7586 /* Fold a call to builtin frexp, we can assume the base is 2. */
7588 static tree
7589 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
7591 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7592 return NULL_TREE;
7594 STRIP_NOPS (arg0);
7596 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
7597 return NULL_TREE;
7599 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7601 /* Proceed if a valid pointer type was passed in. */
7602 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
7604 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
7605 tree frac, exp;
7607 switch (value->cl)
7609 case rvc_zero:
7610 /* For +-0, return (*exp = 0, +-0). */
7611 exp = integer_zero_node;
7612 frac = arg0;
7613 break;
7614 case rvc_nan:
7615 case rvc_inf:
7616 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
7617 return omit_one_operand_loc (loc, rettype, arg0, arg1);
7618 case rvc_normal:
7620 /* Since the frexp function always expects base 2, and in
7621 GCC normalized significands are already in the range
7622 [0.5, 1.0), we have exactly what frexp wants. */
7623 REAL_VALUE_TYPE frac_rvt = *value;
7624 SET_REAL_EXP (&frac_rvt, 0);
7625 frac = build_real (rettype, frac_rvt);
7626 exp = build_int_cst (integer_type_node, REAL_EXP (value));
7628 break;
7629 default:
7630 gcc_unreachable ();
7633 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7634 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
7635 TREE_SIDE_EFFECTS (arg1) = 1;
7636 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
7639 return NULL_TREE;
7642 /* Fold a call to builtin modf. */
7644 static tree
7645 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
7647 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7648 return NULL_TREE;
7650 STRIP_NOPS (arg0);
7652 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
7653 return NULL_TREE;
7655 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7657 /* Proceed if a valid pointer type was passed in. */
7658 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
7660 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
7661 REAL_VALUE_TYPE trunc, frac;
7663 switch (value->cl)
7665 case rvc_nan:
7666 case rvc_zero:
7667 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
7668 trunc = frac = *value;
7669 break;
7670 case rvc_inf:
7671 /* For +-Inf, return (*arg1 = arg0, +-0). */
7672 frac = dconst0;
7673 frac.sign = value->sign;
7674 trunc = *value;
7675 break;
7676 case rvc_normal:
7677 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
7678 real_trunc (&trunc, VOIDmode, value);
7679 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
7680 /* If the original number was negative and already
7681 integral, then the fractional part is -0.0. */
7682 if (value->sign && frac.cl == rvc_zero)
7683 frac.sign = value->sign;
7684 break;
7687 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7688 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
7689 build_real (rettype, trunc));
7690 TREE_SIDE_EFFECTS (arg1) = 1;
7691 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
7692 build_real (rettype, frac));
7695 return NULL_TREE;
7698 /* Given a location LOC, an interclass builtin function decl FNDECL
7699 and its single argument ARG, return an folded expression computing
7700 the same, or NULL_TREE if we either couldn't or didn't want to fold
7701 (the latter happen if there's an RTL instruction available). */
7703 static tree
7704 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
7706 machine_mode mode;
7708 if (!validate_arg (arg, REAL_TYPE))
7709 return NULL_TREE;
7711 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
7712 return NULL_TREE;
7714 mode = TYPE_MODE (TREE_TYPE (arg));
7716 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
7718 /* If there is no optab, try generic code. */
7719 switch (DECL_FUNCTION_CODE (fndecl))
7721 tree result;
7723 CASE_FLT_FN (BUILT_IN_ISINF):
7725 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
7726 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
7727 tree type = TREE_TYPE (arg);
7728 REAL_VALUE_TYPE r;
7729 char buf[128];
7731 if (is_ibm_extended)
7733 /* NaN and Inf are encoded in the high-order double value
7734 only. The low-order value is not significant. */
7735 type = double_type_node;
7736 mode = DFmode;
7737 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7739 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7740 real_from_string (&r, buf);
7741 result = build_call_expr (isgr_fn, 2,
7742 fold_build1_loc (loc, ABS_EXPR, type, arg),
7743 build_real (type, r));
7744 return result;
7746 CASE_FLT_FN (BUILT_IN_FINITE):
7747 case BUILT_IN_ISFINITE:
7749 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
7750 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7751 tree type = TREE_TYPE (arg);
7752 REAL_VALUE_TYPE r;
7753 char buf[128];
7755 if (is_ibm_extended)
7757 /* NaN and Inf are encoded in the high-order double value
7758 only. The low-order value is not significant. */
7759 type = double_type_node;
7760 mode = DFmode;
7761 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7763 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7764 real_from_string (&r, buf);
7765 result = build_call_expr (isle_fn, 2,
7766 fold_build1_loc (loc, ABS_EXPR, type, arg),
7767 build_real (type, r));
7768 /*result = fold_build2_loc (loc, UNGT_EXPR,
7769 TREE_TYPE (TREE_TYPE (fndecl)),
7770 fold_build1_loc (loc, ABS_EXPR, type, arg),
7771 build_real (type, r));
7772 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
7773 TREE_TYPE (TREE_TYPE (fndecl)),
7774 result);*/
7775 return result;
7777 case BUILT_IN_ISNORMAL:
7779 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
7780 islessequal(fabs(x),DBL_MAX). */
7781 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7782 tree type = TREE_TYPE (arg);
7783 tree orig_arg, max_exp, min_exp;
7784 machine_mode orig_mode = mode;
7785 REAL_VALUE_TYPE rmax, rmin;
7786 char buf[128];
7788 orig_arg = arg = builtin_save_expr (arg);
7789 if (is_ibm_extended)
7791 /* Use double to test the normal range of IBM extended
7792 precision. Emin for IBM extended precision is
7793 different to emin for IEEE double, being 53 higher
7794 since the low double exponent is at least 53 lower
7795 than the high double exponent. */
7796 type = double_type_node;
7797 mode = DFmode;
7798 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7800 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
7802 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7803 real_from_string (&rmax, buf);
7804 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
7805 real_from_string (&rmin, buf);
7806 max_exp = build_real (type, rmax);
7807 min_exp = build_real (type, rmin);
7809 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
7810 if (is_ibm_extended)
7812 /* Testing the high end of the range is done just using
7813 the high double, using the same test as isfinite().
7814 For the subnormal end of the range we first test the
7815 high double, then if its magnitude is equal to the
7816 limit of 0x1p-969, we test whether the low double is
7817 non-zero and opposite sign to the high double. */
7818 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
7819 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
7820 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
7821 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
7822 arg, min_exp);
7823 tree as_complex = build1 (VIEW_CONVERT_EXPR,
7824 complex_double_type_node, orig_arg);
7825 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
7826 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
7827 tree zero = build_real (type, dconst0);
7828 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
7829 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
7830 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
7831 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
7832 fold_build3 (COND_EXPR,
7833 integer_type_node,
7834 hilt, logt, lolt));
7835 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
7836 eq_min, ok_lo);
7837 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
7838 gt_min, eq_min);
7840 else
7842 tree const isge_fn
7843 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
7844 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
7846 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
7847 max_exp, min_exp);
7848 return result;
7850 default:
7851 break;
7854 return NULL_TREE;
7857 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
7858 ARG is the argument for the call. */
7860 static tree
7861 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
7863 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7865 if (!validate_arg (arg, REAL_TYPE))
7866 return NULL_TREE;
7868 switch (builtin_index)
7870 case BUILT_IN_ISINF:
7871 if (!HONOR_INFINITIES (arg))
7872 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
7874 return NULL_TREE;
7876 case BUILT_IN_ISINF_SIGN:
7878 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
7879 /* In a boolean context, GCC will fold the inner COND_EXPR to
7880 1. So e.g. "if (isinf_sign(x))" would be folded to just
7881 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
7882 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
7883 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
7884 tree tmp = NULL_TREE;
7886 arg = builtin_save_expr (arg);
7888 if (signbit_fn && isinf_fn)
7890 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
7891 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
7893 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
7894 signbit_call, integer_zero_node);
7895 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
7896 isinf_call, integer_zero_node);
7898 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
7899 integer_minus_one_node, integer_one_node);
7900 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
7901 isinf_call, tmp,
7902 integer_zero_node);
7905 return tmp;
7908 case BUILT_IN_ISFINITE:
7909 if (!HONOR_NANS (arg)
7910 && !HONOR_INFINITIES (arg))
7911 return omit_one_operand_loc (loc, type, integer_one_node, arg);
7913 return NULL_TREE;
7915 case BUILT_IN_ISNAN:
7916 if (!HONOR_NANS (arg))
7917 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
7920 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
7921 if (is_ibm_extended)
7923 /* NaN and Inf are encoded in the high-order double value
7924 only. The low-order value is not significant. */
7925 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
7928 arg = builtin_save_expr (arg);
7929 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
7931 default:
7932 gcc_unreachable ();
7936 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
7937 This builtin will generate code to return the appropriate floating
7938 point classification depending on the value of the floating point
7939 number passed in. The possible return values must be supplied as
7940 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
7941 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
7942 one floating point argument which is "type generic". */
7944 static tree
7945 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
7947 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
7948 arg, type, res, tmp;
7949 machine_mode mode;
7950 REAL_VALUE_TYPE r;
7951 char buf[128];
7953 /* Verify the required arguments in the original call. */
7954 if (nargs != 6
7955 || !validate_arg (args[0], INTEGER_TYPE)
7956 || !validate_arg (args[1], INTEGER_TYPE)
7957 || !validate_arg (args[2], INTEGER_TYPE)
7958 || !validate_arg (args[3], INTEGER_TYPE)
7959 || !validate_arg (args[4], INTEGER_TYPE)
7960 || !validate_arg (args[5], REAL_TYPE))
7961 return NULL_TREE;
7963 fp_nan = args[0];
7964 fp_infinite = args[1];
7965 fp_normal = args[2];
7966 fp_subnormal = args[3];
7967 fp_zero = args[4];
7968 arg = args[5];
7969 type = TREE_TYPE (arg);
7970 mode = TYPE_MODE (type);
7971 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
7973 /* fpclassify(x) ->
7974 isnan(x) ? FP_NAN :
7975 (fabs(x) == Inf ? FP_INFINITE :
7976 (fabs(x) >= DBL_MIN ? FP_NORMAL :
7977 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
7979 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
7980 build_real (type, dconst0));
7981 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
7982 tmp, fp_zero, fp_subnormal);
7984 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
7985 real_from_string (&r, buf);
7986 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
7987 arg, build_real (type, r));
7988 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
7990 if (HONOR_INFINITIES (mode))
7992 real_inf (&r);
7993 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
7994 build_real (type, r));
7995 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
7996 fp_infinite, res);
7999 if (HONOR_NANS (mode))
8001 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8002 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8005 return res;
8008 /* Fold a call to an unordered comparison function such as
8009 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8010 being called and ARG0 and ARG1 are the arguments for the call.
8011 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8012 the opposite of the desired result. UNORDERED_CODE is used
8013 for modes that can hold NaNs and ORDERED_CODE is used for
8014 the rest. */
8016 static tree
8017 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8018 enum tree_code unordered_code,
8019 enum tree_code ordered_code)
8021 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8022 enum tree_code code;
8023 tree type0, type1;
8024 enum tree_code code0, code1;
8025 tree cmp_type = NULL_TREE;
8027 type0 = TREE_TYPE (arg0);
8028 type1 = TREE_TYPE (arg1);
8030 code0 = TREE_CODE (type0);
8031 code1 = TREE_CODE (type1);
8033 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8034 /* Choose the wider of two real types. */
8035 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8036 ? type0 : type1;
8037 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8038 cmp_type = type0;
8039 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8040 cmp_type = type1;
8042 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8043 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8045 if (unordered_code == UNORDERED_EXPR)
8047 if (!HONOR_NANS (arg0))
8048 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8049 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8052 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8053 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8054 fold_build2_loc (loc, code, type, arg0, arg1));
8057 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8058 arithmetics if it can never overflow, or into internal functions that
8059 return both result of arithmetics and overflowed boolean flag in
8060 a complex integer result, or some other check for overflow.
8061 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8062 checking part of that. */
8064 static tree
8065 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8066 tree arg0, tree arg1, tree arg2)
8068 enum internal_fn ifn = IFN_LAST;
8069 /* The code of the expression corresponding to the type-generic
8070 built-in, or ERROR_MARK for the type-specific ones. */
8071 enum tree_code opcode = ERROR_MARK;
8072 bool ovf_only = false;
8074 switch (fcode)
8076 case BUILT_IN_ADD_OVERFLOW_P:
8077 ovf_only = true;
8078 /* FALLTHRU */
8079 case BUILT_IN_ADD_OVERFLOW:
8080 opcode = PLUS_EXPR;
8081 /* FALLTHRU */
8082 case BUILT_IN_SADD_OVERFLOW:
8083 case BUILT_IN_SADDL_OVERFLOW:
8084 case BUILT_IN_SADDLL_OVERFLOW:
8085 case BUILT_IN_UADD_OVERFLOW:
8086 case BUILT_IN_UADDL_OVERFLOW:
8087 case BUILT_IN_UADDLL_OVERFLOW:
8088 ifn = IFN_ADD_OVERFLOW;
8089 break;
8090 case BUILT_IN_SUB_OVERFLOW_P:
8091 ovf_only = true;
8092 /* FALLTHRU */
8093 case BUILT_IN_SUB_OVERFLOW:
8094 opcode = MINUS_EXPR;
8095 /* FALLTHRU */
8096 case BUILT_IN_SSUB_OVERFLOW:
8097 case BUILT_IN_SSUBL_OVERFLOW:
8098 case BUILT_IN_SSUBLL_OVERFLOW:
8099 case BUILT_IN_USUB_OVERFLOW:
8100 case BUILT_IN_USUBL_OVERFLOW:
8101 case BUILT_IN_USUBLL_OVERFLOW:
8102 ifn = IFN_SUB_OVERFLOW;
8103 break;
8104 case BUILT_IN_MUL_OVERFLOW_P:
8105 ovf_only = true;
8106 /* FALLTHRU */
8107 case BUILT_IN_MUL_OVERFLOW:
8108 opcode = MULT_EXPR;
8109 /* FALLTHRU */
8110 case BUILT_IN_SMUL_OVERFLOW:
8111 case BUILT_IN_SMULL_OVERFLOW:
8112 case BUILT_IN_SMULLL_OVERFLOW:
8113 case BUILT_IN_UMUL_OVERFLOW:
8114 case BUILT_IN_UMULL_OVERFLOW:
8115 case BUILT_IN_UMULLL_OVERFLOW:
8116 ifn = IFN_MUL_OVERFLOW;
8117 break;
8118 default:
8119 gcc_unreachable ();
8122 /* For the "generic" overloads, the first two arguments can have different
8123 types and the last argument determines the target type to use to check
8124 for overflow. The arguments of the other overloads all have the same
8125 type. */
8126 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
8128 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8129 arguments are constant, attempt to fold the built-in call into a constant
8130 expression indicating whether or not it detected an overflow. */
8131 if (ovf_only
8132 && TREE_CODE (arg0) == INTEGER_CST
8133 && TREE_CODE (arg1) == INTEGER_CST)
8134 /* Perform the computation in the target type and check for overflow. */
8135 return omit_one_operand_loc (loc, boolean_type_node,
8136 arith_overflowed_p (opcode, type, arg0, arg1)
8137 ? boolean_true_node : boolean_false_node,
8138 arg2);
8140 tree ctype = build_complex_type (type);
8141 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8142 2, arg0, arg1);
8143 tree tgt = save_expr (call);
8144 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8145 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8146 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8148 if (ovf_only)
8149 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
8151 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8152 tree store
8153 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8154 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8157 /* Fold a call to __builtin_FILE to a constant string. */
8159 static inline tree
8160 fold_builtin_FILE (location_t loc)
8162 if (const char *fname = LOCATION_FILE (loc))
8163 return build_string_literal (strlen (fname) + 1, fname);
8165 return build_string_literal (1, "");
8168 /* Fold a call to __builtin_FUNCTION to a constant string. */
8170 static inline tree
8171 fold_builtin_FUNCTION ()
8173 if (current_function_decl)
8175 const char *name = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
8176 return build_string_literal (strlen (name) + 1, name);
8179 return build_string_literal (1, "");
8182 /* Fold a call to __builtin_LINE to an integer constant. */
8184 static inline tree
8185 fold_builtin_LINE (location_t loc, tree type)
8187 return build_int_cst (type, LOCATION_LINE (loc));
8190 /* Fold a call to built-in function FNDECL with 0 arguments.
8191 This function returns NULL_TREE if no simplification was possible. */
8193 static tree
8194 fold_builtin_0 (location_t loc, tree fndecl)
8196 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8197 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8198 switch (fcode)
8200 case BUILT_IN_FILE:
8201 return fold_builtin_FILE (loc);
8203 case BUILT_IN_FUNCTION:
8204 return fold_builtin_FUNCTION ();
8206 case BUILT_IN_LINE:
8207 return fold_builtin_LINE (loc, type);
8209 CASE_FLT_FN (BUILT_IN_INF):
8210 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
8211 case BUILT_IN_INFD32:
8212 case BUILT_IN_INFD64:
8213 case BUILT_IN_INFD128:
8214 return fold_builtin_inf (loc, type, true);
8216 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8217 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
8218 return fold_builtin_inf (loc, type, false);
8220 case BUILT_IN_CLASSIFY_TYPE:
8221 return fold_builtin_classify_type (NULL_TREE);
8223 default:
8224 break;
8226 return NULL_TREE;
8229 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8230 This function returns NULL_TREE if no simplification was possible. */
8232 static tree
8233 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8235 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8236 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8238 if (TREE_CODE (arg0) == ERROR_MARK)
8239 return NULL_TREE;
8241 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
8242 return ret;
8244 switch (fcode)
8246 case BUILT_IN_CONSTANT_P:
8248 tree val = fold_builtin_constant_p (arg0);
8250 /* Gimplification will pull the CALL_EXPR for the builtin out of
8251 an if condition. When not optimizing, we'll not CSE it back.
8252 To avoid link error types of regressions, return false now. */
8253 if (!val && !optimize)
8254 val = integer_zero_node;
8256 return val;
8259 case BUILT_IN_CLASSIFY_TYPE:
8260 return fold_builtin_classify_type (arg0);
8262 case BUILT_IN_STRLEN:
8263 return fold_builtin_strlen (loc, type, arg0);
8265 CASE_FLT_FN (BUILT_IN_FABS):
8266 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8267 case BUILT_IN_FABSD32:
8268 case BUILT_IN_FABSD64:
8269 case BUILT_IN_FABSD128:
8270 return fold_builtin_fabs (loc, arg0, type);
8272 case BUILT_IN_ABS:
8273 case BUILT_IN_LABS:
8274 case BUILT_IN_LLABS:
8275 case BUILT_IN_IMAXABS:
8276 return fold_builtin_abs (loc, arg0, type);
8278 CASE_FLT_FN (BUILT_IN_CONJ):
8279 if (validate_arg (arg0, COMPLEX_TYPE)
8280 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8281 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8282 break;
8284 CASE_FLT_FN (BUILT_IN_CREAL):
8285 if (validate_arg (arg0, COMPLEX_TYPE)
8286 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8287 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8288 break;
8290 CASE_FLT_FN (BUILT_IN_CIMAG):
8291 if (validate_arg (arg0, COMPLEX_TYPE)
8292 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8293 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8294 break;
8296 CASE_FLT_FN (BUILT_IN_CARG):
8297 return fold_builtin_carg (loc, arg0, type);
8299 case BUILT_IN_ISASCII:
8300 return fold_builtin_isascii (loc, arg0);
8302 case BUILT_IN_TOASCII:
8303 return fold_builtin_toascii (loc, arg0);
8305 case BUILT_IN_ISDIGIT:
8306 return fold_builtin_isdigit (loc, arg0);
8308 CASE_FLT_FN (BUILT_IN_FINITE):
8309 case BUILT_IN_FINITED32:
8310 case BUILT_IN_FINITED64:
8311 case BUILT_IN_FINITED128:
8312 case BUILT_IN_ISFINITE:
8314 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
8315 if (ret)
8316 return ret;
8317 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8320 CASE_FLT_FN (BUILT_IN_ISINF):
8321 case BUILT_IN_ISINFD32:
8322 case BUILT_IN_ISINFD64:
8323 case BUILT_IN_ISINFD128:
8325 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
8326 if (ret)
8327 return ret;
8328 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8331 case BUILT_IN_ISNORMAL:
8332 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8334 case BUILT_IN_ISINF_SIGN:
8335 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
8337 CASE_FLT_FN (BUILT_IN_ISNAN):
8338 case BUILT_IN_ISNAND32:
8339 case BUILT_IN_ISNAND64:
8340 case BUILT_IN_ISNAND128:
8341 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
8343 case BUILT_IN_FREE:
8344 if (integer_zerop (arg0))
8345 return build_empty_stmt (loc);
8346 break;
8348 default:
8349 break;
8352 return NULL_TREE;
8356 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8357 This function returns NULL_TREE if no simplification was possible. */
8359 static tree
8360 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
8362 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8363 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8365 if (TREE_CODE (arg0) == ERROR_MARK
8366 || TREE_CODE (arg1) == ERROR_MARK)
8367 return NULL_TREE;
8369 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
8370 return ret;
8372 switch (fcode)
8374 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
8375 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
8376 if (validate_arg (arg0, REAL_TYPE)
8377 && validate_arg (arg1, POINTER_TYPE))
8378 return do_mpfr_lgamma_r (arg0, arg1, type);
8379 break;
8381 CASE_FLT_FN (BUILT_IN_FREXP):
8382 return fold_builtin_frexp (loc, arg0, arg1, type);
8384 CASE_FLT_FN (BUILT_IN_MODF):
8385 return fold_builtin_modf (loc, arg0, arg1, type);
8387 case BUILT_IN_STRSTR:
8388 return fold_builtin_strstr (loc, arg0, arg1, type);
8390 case BUILT_IN_STRSPN:
8391 return fold_builtin_strspn (loc, arg0, arg1);
8393 case BUILT_IN_STRCSPN:
8394 return fold_builtin_strcspn (loc, arg0, arg1);
8396 case BUILT_IN_STRCHR:
8397 case BUILT_IN_INDEX:
8398 return fold_builtin_strchr (loc, arg0, arg1, type);
8400 case BUILT_IN_STRRCHR:
8401 case BUILT_IN_RINDEX:
8402 return fold_builtin_strrchr (loc, arg0, arg1, type);
8404 case BUILT_IN_STRCMP:
8405 return fold_builtin_strcmp (loc, arg0, arg1);
8407 case BUILT_IN_STRPBRK:
8408 return fold_builtin_strpbrk (loc, arg0, arg1, type);
8410 case BUILT_IN_EXPECT:
8411 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
8413 case BUILT_IN_ISGREATER:
8414 return fold_builtin_unordered_cmp (loc, fndecl,
8415 arg0, arg1, UNLE_EXPR, LE_EXPR);
8416 case BUILT_IN_ISGREATEREQUAL:
8417 return fold_builtin_unordered_cmp (loc, fndecl,
8418 arg0, arg1, UNLT_EXPR, LT_EXPR);
8419 case BUILT_IN_ISLESS:
8420 return fold_builtin_unordered_cmp (loc, fndecl,
8421 arg0, arg1, UNGE_EXPR, GE_EXPR);
8422 case BUILT_IN_ISLESSEQUAL:
8423 return fold_builtin_unordered_cmp (loc, fndecl,
8424 arg0, arg1, UNGT_EXPR, GT_EXPR);
8425 case BUILT_IN_ISLESSGREATER:
8426 return fold_builtin_unordered_cmp (loc, fndecl,
8427 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
8428 case BUILT_IN_ISUNORDERED:
8429 return fold_builtin_unordered_cmp (loc, fndecl,
8430 arg0, arg1, UNORDERED_EXPR,
8431 NOP_EXPR);
8433 /* We do the folding for va_start in the expander. */
8434 case BUILT_IN_VA_START:
8435 break;
8437 case BUILT_IN_OBJECT_SIZE:
8438 return fold_builtin_object_size (arg0, arg1);
8440 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8441 return fold_builtin_atomic_always_lock_free (arg0, arg1);
8443 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8444 return fold_builtin_atomic_is_lock_free (arg0, arg1);
8446 default:
8447 break;
8449 return NULL_TREE;
8452 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
8453 and ARG2.
8454 This function returns NULL_TREE if no simplification was possible. */
8456 static tree
8457 fold_builtin_3 (location_t loc, tree fndecl,
8458 tree arg0, tree arg1, tree arg2)
8460 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8461 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8463 if (TREE_CODE (arg0) == ERROR_MARK
8464 || TREE_CODE (arg1) == ERROR_MARK
8465 || TREE_CODE (arg2) == ERROR_MARK)
8466 return NULL_TREE;
8468 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
8469 arg0, arg1, arg2))
8470 return ret;
8472 switch (fcode)
8475 CASE_FLT_FN (BUILT_IN_SINCOS):
8476 return fold_builtin_sincos (loc, arg0, arg1, arg2);
8478 CASE_FLT_FN (BUILT_IN_FMA):
8479 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
8481 CASE_FLT_FN (BUILT_IN_REMQUO):
8482 if (validate_arg (arg0, REAL_TYPE)
8483 && validate_arg (arg1, REAL_TYPE)
8484 && validate_arg (arg2, POINTER_TYPE))
8485 return do_mpfr_remquo (arg0, arg1, arg2);
8486 break;
8488 case BUILT_IN_STRNCMP:
8489 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
8491 case BUILT_IN_MEMCHR:
8492 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
8494 case BUILT_IN_BCMP:
8495 case BUILT_IN_MEMCMP:
8496 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
8498 case BUILT_IN_EXPECT:
8499 return fold_builtin_expect (loc, arg0, arg1, arg2);
8501 case BUILT_IN_ADD_OVERFLOW:
8502 case BUILT_IN_SUB_OVERFLOW:
8503 case BUILT_IN_MUL_OVERFLOW:
8504 case BUILT_IN_ADD_OVERFLOW_P:
8505 case BUILT_IN_SUB_OVERFLOW_P:
8506 case BUILT_IN_MUL_OVERFLOW_P:
8507 case BUILT_IN_SADD_OVERFLOW:
8508 case BUILT_IN_SADDL_OVERFLOW:
8509 case BUILT_IN_SADDLL_OVERFLOW:
8510 case BUILT_IN_SSUB_OVERFLOW:
8511 case BUILT_IN_SSUBL_OVERFLOW:
8512 case BUILT_IN_SSUBLL_OVERFLOW:
8513 case BUILT_IN_SMUL_OVERFLOW:
8514 case BUILT_IN_SMULL_OVERFLOW:
8515 case BUILT_IN_SMULLL_OVERFLOW:
8516 case BUILT_IN_UADD_OVERFLOW:
8517 case BUILT_IN_UADDL_OVERFLOW:
8518 case BUILT_IN_UADDLL_OVERFLOW:
8519 case BUILT_IN_USUB_OVERFLOW:
8520 case BUILT_IN_USUBL_OVERFLOW:
8521 case BUILT_IN_USUBLL_OVERFLOW:
8522 case BUILT_IN_UMUL_OVERFLOW:
8523 case BUILT_IN_UMULL_OVERFLOW:
8524 case BUILT_IN_UMULLL_OVERFLOW:
8525 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
8527 default:
8528 break;
8530 return NULL_TREE;
8533 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
8534 arguments. IGNORE is true if the result of the
8535 function call is ignored. This function returns NULL_TREE if no
8536 simplification was possible. */
8538 tree
8539 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
8541 tree ret = NULL_TREE;
8543 switch (nargs)
8545 case 0:
8546 ret = fold_builtin_0 (loc, fndecl);
8547 break;
8548 case 1:
8549 ret = fold_builtin_1 (loc, fndecl, args[0]);
8550 break;
8551 case 2:
8552 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
8553 break;
8554 case 3:
8555 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
8556 break;
8557 default:
8558 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
8559 break;
8561 if (ret)
8563 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
8564 SET_EXPR_LOCATION (ret, loc);
8565 TREE_NO_WARNING (ret) = 1;
8566 return ret;
8568 return NULL_TREE;
8571 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
8572 list ARGS along with N new arguments in NEWARGS. SKIP is the number
8573 of arguments in ARGS to be omitted. OLDNARGS is the number of
8574 elements in ARGS. */
8576 static tree
8577 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
8578 int skip, tree fndecl, int n, va_list newargs)
8580 int nargs = oldnargs - skip + n;
8581 tree *buffer;
8583 if (n > 0)
8585 int i, j;
8587 buffer = XALLOCAVEC (tree, nargs);
8588 for (i = 0; i < n; i++)
8589 buffer[i] = va_arg (newargs, tree);
8590 for (j = skip; j < oldnargs; j++, i++)
8591 buffer[i] = args[j];
8593 else
8594 buffer = args + skip;
8596 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
8599 /* Return true if FNDECL shouldn't be folded right now.
8600 If a built-in function has an inline attribute always_inline
8601 wrapper, defer folding it after always_inline functions have
8602 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
8603 might not be performed. */
8605 bool
8606 avoid_folding_inline_builtin (tree fndecl)
8608 return (DECL_DECLARED_INLINE_P (fndecl)
8609 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
8610 && cfun
8611 && !cfun->always_inline_functions_inlined
8612 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
8615 /* A wrapper function for builtin folding that prevents warnings for
8616 "statement without effect" and the like, caused by removing the
8617 call node earlier than the warning is generated. */
8619 tree
8620 fold_call_expr (location_t loc, tree exp, bool ignore)
8622 tree ret = NULL_TREE;
8623 tree fndecl = get_callee_fndecl (exp);
8624 if (fndecl
8625 && TREE_CODE (fndecl) == FUNCTION_DECL
8626 && DECL_BUILT_IN (fndecl)
8627 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
8628 yet. Defer folding until we see all the arguments
8629 (after inlining). */
8630 && !CALL_EXPR_VA_ARG_PACK (exp))
8632 int nargs = call_expr_nargs (exp);
8634 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
8635 instead last argument is __builtin_va_arg_pack (). Defer folding
8636 even in that case, until arguments are finalized. */
8637 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
8639 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
8640 if (fndecl2
8641 && TREE_CODE (fndecl2) == FUNCTION_DECL
8642 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8643 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8644 return NULL_TREE;
8647 if (avoid_folding_inline_builtin (fndecl))
8648 return NULL_TREE;
8650 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8651 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
8652 CALL_EXPR_ARGP (exp), ignore);
8653 else
8655 tree *args = CALL_EXPR_ARGP (exp);
8656 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
8657 if (ret)
8658 return ret;
8661 return NULL_TREE;
8664 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
8665 N arguments are passed in the array ARGARRAY. Return a folded
8666 expression or NULL_TREE if no simplification was possible. */
8668 tree
8669 fold_builtin_call_array (location_t loc, tree,
8670 tree fn,
8671 int n,
8672 tree *argarray)
8674 if (TREE_CODE (fn) != ADDR_EXPR)
8675 return NULL_TREE;
8677 tree fndecl = TREE_OPERAND (fn, 0);
8678 if (TREE_CODE (fndecl) == FUNCTION_DECL
8679 && DECL_BUILT_IN (fndecl))
8681 /* If last argument is __builtin_va_arg_pack (), arguments to this
8682 function are not finalized yet. Defer folding until they are. */
8683 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
8685 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
8686 if (fndecl2
8687 && TREE_CODE (fndecl2) == FUNCTION_DECL
8688 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8689 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8690 return NULL_TREE;
8692 if (avoid_folding_inline_builtin (fndecl))
8693 return NULL_TREE;
8694 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8695 return targetm.fold_builtin (fndecl, n, argarray, false);
8696 else
8697 return fold_builtin_n (loc, fndecl, argarray, n, false);
8700 return NULL_TREE;
8703 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
8704 along with N new arguments specified as the "..." parameters. SKIP
8705 is the number of arguments in EXP to be omitted. This function is used
8706 to do varargs-to-varargs transformations. */
8708 static tree
8709 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
8711 va_list ap;
8712 tree t;
8714 va_start (ap, n);
8715 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
8716 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
8717 va_end (ap);
8719 return t;
8722 /* Validate a single argument ARG against a tree code CODE representing
8723 a type. */
8725 static bool
8726 validate_arg (const_tree arg, enum tree_code code)
8728 if (!arg)
8729 return false;
8730 else if (code == POINTER_TYPE)
8731 return POINTER_TYPE_P (TREE_TYPE (arg));
8732 else if (code == INTEGER_TYPE)
8733 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
8734 return code == TREE_CODE (TREE_TYPE (arg));
8737 /* This function validates the types of a function call argument list
8738 against a specified list of tree_codes. If the last specifier is a 0,
8739 that represents an ellipses, otherwise the last specifier must be a
8740 VOID_TYPE.
8742 This is the GIMPLE version of validate_arglist. Eventually we want to
8743 completely convert builtins.c to work from GIMPLEs and the tree based
8744 validate_arglist will then be removed. */
8746 bool
8747 validate_gimple_arglist (const gcall *call, ...)
8749 enum tree_code code;
8750 bool res = 0;
8751 va_list ap;
8752 const_tree arg;
8753 size_t i;
8755 va_start (ap, call);
8756 i = 0;
8760 code = (enum tree_code) va_arg (ap, int);
8761 switch (code)
8763 case 0:
8764 /* This signifies an ellipses, any further arguments are all ok. */
8765 res = true;
8766 goto end;
8767 case VOID_TYPE:
8768 /* This signifies an endlink, if no arguments remain, return
8769 true, otherwise return false. */
8770 res = (i == gimple_call_num_args (call));
8771 goto end;
8772 default:
8773 /* If no parameters remain or the parameter's code does not
8774 match the specified code, return false. Otherwise continue
8775 checking any remaining arguments. */
8776 arg = gimple_call_arg (call, i++);
8777 if (!validate_arg (arg, code))
8778 goto end;
8779 break;
8782 while (1);
8784 /* We need gotos here since we can only have one VA_CLOSE in a
8785 function. */
8786 end: ;
8787 va_end (ap);
8789 return res;
8792 /* Default target-specific builtin expander that does nothing. */
8795 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
8796 rtx target ATTRIBUTE_UNUSED,
8797 rtx subtarget ATTRIBUTE_UNUSED,
8798 machine_mode mode ATTRIBUTE_UNUSED,
8799 int ignore ATTRIBUTE_UNUSED)
8801 return NULL_RTX;
8804 /* Returns true is EXP represents data that would potentially reside
8805 in a readonly section. */
8807 bool
8808 readonly_data_expr (tree exp)
8810 STRIP_NOPS (exp);
8812 if (TREE_CODE (exp) != ADDR_EXPR)
8813 return false;
8815 exp = get_base_address (TREE_OPERAND (exp, 0));
8816 if (!exp)
8817 return false;
8819 /* Make sure we call decl_readonly_section only for trees it
8820 can handle (since it returns true for everything it doesn't
8821 understand). */
8822 if (TREE_CODE (exp) == STRING_CST
8823 || TREE_CODE (exp) == CONSTRUCTOR
8824 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
8825 return decl_readonly_section (exp, 0);
8826 else
8827 return false;
8830 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
8831 to the call, and TYPE is its return type.
8833 Return NULL_TREE if no simplification was possible, otherwise return the
8834 simplified form of the call as a tree.
8836 The simplified form may be a constant or other expression which
8837 computes the same value, but in a more efficient manner (including
8838 calls to other builtin functions).
8840 The call may contain arguments which need to be evaluated, but
8841 which are not useful to determine the result of the call. In
8842 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8843 COMPOUND_EXPR will be an argument which must be evaluated.
8844 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8845 COMPOUND_EXPR in the chain will contain the tree for the simplified
8846 form of the builtin function call. */
8848 static tree
8849 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
8851 if (!validate_arg (s1, POINTER_TYPE)
8852 || !validate_arg (s2, POINTER_TYPE))
8853 return NULL_TREE;
8854 else
8856 tree fn;
8857 const char *p1, *p2;
8859 p2 = c_getstr (s2);
8860 if (p2 == NULL)
8861 return NULL_TREE;
8863 p1 = c_getstr (s1);
8864 if (p1 != NULL)
8866 const char *r = strstr (p1, p2);
8867 tree tem;
8869 if (r == NULL)
8870 return build_int_cst (TREE_TYPE (s1), 0);
8872 /* Return an offset into the constant string argument. */
8873 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8874 return fold_convert_loc (loc, type, tem);
8877 /* The argument is const char *, and the result is char *, so we need
8878 a type conversion here to avoid a warning. */
8879 if (p2[0] == '\0')
8880 return fold_convert_loc (loc, type, s1);
8882 if (p2[1] != '\0')
8883 return NULL_TREE;
8885 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
8886 if (!fn)
8887 return NULL_TREE;
8889 /* New argument list transforming strstr(s1, s2) to
8890 strchr(s1, s2[0]). */
8891 return build_call_expr_loc (loc, fn, 2, s1,
8892 build_int_cst (integer_type_node, p2[0]));
8896 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
8897 the call, and TYPE is its return type.
8899 Return NULL_TREE if no simplification was possible, otherwise return the
8900 simplified form of the call as a tree.
8902 The simplified form may be a constant or other expression which
8903 computes the same value, but in a more efficient manner (including
8904 calls to other builtin functions).
8906 The call may contain arguments which need to be evaluated, but
8907 which are not useful to determine the result of the call. In
8908 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8909 COMPOUND_EXPR will be an argument which must be evaluated.
8910 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8911 COMPOUND_EXPR in the chain will contain the tree for the simplified
8912 form of the builtin function call. */
8914 static tree
8915 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
8917 if (!validate_arg (s1, POINTER_TYPE)
8918 || !validate_arg (s2, INTEGER_TYPE))
8919 return NULL_TREE;
8920 else
8922 const char *p1;
8924 if (TREE_CODE (s2) != INTEGER_CST)
8925 return NULL_TREE;
8927 p1 = c_getstr (s1);
8928 if (p1 != NULL)
8930 char c;
8931 const char *r;
8932 tree tem;
8934 if (target_char_cast (s2, &c))
8935 return NULL_TREE;
8937 r = strchr (p1, c);
8939 if (r == NULL)
8940 return build_int_cst (TREE_TYPE (s1), 0);
8942 /* Return an offset into the constant string argument. */
8943 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8944 return fold_convert_loc (loc, type, tem);
8946 return NULL_TREE;
8950 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
8951 the call, and TYPE is its return type.
8953 Return NULL_TREE if no simplification was possible, otherwise return the
8954 simplified form of the call as a tree.
8956 The simplified form may be a constant or other expression which
8957 computes the same value, but in a more efficient manner (including
8958 calls to other builtin functions).
8960 The call may contain arguments which need to be evaluated, but
8961 which are not useful to determine the result of the call. In
8962 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8963 COMPOUND_EXPR will be an argument which must be evaluated.
8964 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8965 COMPOUND_EXPR in the chain will contain the tree for the simplified
8966 form of the builtin function call. */
8968 static tree
8969 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
8971 if (!validate_arg (s1, POINTER_TYPE)
8972 || !validate_arg (s2, INTEGER_TYPE))
8973 return NULL_TREE;
8974 else
8976 tree fn;
8977 const char *p1;
8979 if (TREE_CODE (s2) != INTEGER_CST)
8980 return NULL_TREE;
8982 p1 = c_getstr (s1);
8983 if (p1 != NULL)
8985 char c;
8986 const char *r;
8987 tree tem;
8989 if (target_char_cast (s2, &c))
8990 return NULL_TREE;
8992 r = strrchr (p1, c);
8994 if (r == NULL)
8995 return build_int_cst (TREE_TYPE (s1), 0);
8997 /* Return an offset into the constant string argument. */
8998 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8999 return fold_convert_loc (loc, type, tem);
9002 if (! integer_zerop (s2))
9003 return NULL_TREE;
9005 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9006 if (!fn)
9007 return NULL_TREE;
9009 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
9010 return build_call_expr_loc (loc, fn, 2, s1, s2);
9014 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9015 to the call, and TYPE is its return type.
9017 Return NULL_TREE if no simplification was possible, otherwise return the
9018 simplified form of the call as a tree.
9020 The simplified form may be a constant or other expression which
9021 computes the same value, but in a more efficient manner (including
9022 calls to other builtin functions).
9024 The call may contain arguments which need to be evaluated, but
9025 which are not useful to determine the result of the call. In
9026 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9027 COMPOUND_EXPR will be an argument which must be evaluated.
9028 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9029 COMPOUND_EXPR in the chain will contain the tree for the simplified
9030 form of the builtin function call. */
9032 static tree
9033 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9035 if (!validate_arg (s1, POINTER_TYPE)
9036 || !validate_arg (s2, POINTER_TYPE))
9037 return NULL_TREE;
9038 else
9040 tree fn;
9041 const char *p1, *p2;
9043 p2 = c_getstr (s2);
9044 if (p2 == NULL)
9045 return NULL_TREE;
9047 p1 = c_getstr (s1);
9048 if (p1 != NULL)
9050 const char *r = strpbrk (p1, p2);
9051 tree tem;
9053 if (r == NULL)
9054 return build_int_cst (TREE_TYPE (s1), 0);
9056 /* Return an offset into the constant string argument. */
9057 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9058 return fold_convert_loc (loc, type, tem);
9061 if (p2[0] == '\0')
9062 /* strpbrk(x, "") == NULL.
9063 Evaluate and ignore s1 in case it had side-effects. */
9064 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
9066 if (p2[1] != '\0')
9067 return NULL_TREE; /* Really call strpbrk. */
9069 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9070 if (!fn)
9071 return NULL_TREE;
9073 /* New argument list transforming strpbrk(s1, s2) to
9074 strchr(s1, s2[0]). */
9075 return build_call_expr_loc (loc, fn, 2, s1,
9076 build_int_cst (integer_type_node, p2[0]));
9080 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9081 to the call.
9083 Return NULL_TREE if no simplification was possible, otherwise return the
9084 simplified form of the call as a tree.
9086 The simplified form may be a constant or other expression which
9087 computes the same value, but in a more efficient manner (including
9088 calls to other builtin functions).
9090 The call may contain arguments which need to be evaluated, but
9091 which are not useful to determine the result of the call. In
9092 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9093 COMPOUND_EXPR will be an argument which must be evaluated.
9094 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9095 COMPOUND_EXPR in the chain will contain the tree for the simplified
9096 form of the builtin function call. */
9098 static tree
9099 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9101 if (!validate_arg (s1, POINTER_TYPE)
9102 || !validate_arg (s2, POINTER_TYPE))
9103 return NULL_TREE;
9104 else
9106 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9108 /* If either argument is "", return NULL_TREE. */
9109 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9110 /* Evaluate and ignore both arguments in case either one has
9111 side-effects. */
9112 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9113 s1, s2);
9114 return NULL_TREE;
9118 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9119 to the call.
9121 Return NULL_TREE if no simplification was possible, otherwise return the
9122 simplified form of the call as a tree.
9124 The simplified form may be a constant or other expression which
9125 computes the same value, but in a more efficient manner (including
9126 calls to other builtin functions).
9128 The call may contain arguments which need to be evaluated, but
9129 which are not useful to determine the result of the call. In
9130 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9131 COMPOUND_EXPR will be an argument which must be evaluated.
9132 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9133 COMPOUND_EXPR in the chain will contain the tree for the simplified
9134 form of the builtin function call. */
9136 static tree
9137 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9139 if (!validate_arg (s1, POINTER_TYPE)
9140 || !validate_arg (s2, POINTER_TYPE))
9141 return NULL_TREE;
9142 else
9144 /* If the first argument is "", return NULL_TREE. */
9145 const char *p1 = c_getstr (s1);
9146 if (p1 && *p1 == '\0')
9148 /* Evaluate and ignore argument s2 in case it has
9149 side-effects. */
9150 return omit_one_operand_loc (loc, size_type_node,
9151 size_zero_node, s2);
9154 /* If the second argument is "", return __builtin_strlen(s1). */
9155 const char *p2 = c_getstr (s2);
9156 if (p2 && *p2 == '\0')
9158 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9160 /* If the replacement _DECL isn't initialized, don't do the
9161 transformation. */
9162 if (!fn)
9163 return NULL_TREE;
9165 return build_call_expr_loc (loc, fn, 1, s1);
9167 return NULL_TREE;
9171 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9172 produced. False otherwise. This is done so that we don't output the error
9173 or warning twice or three times. */
9175 bool
9176 fold_builtin_next_arg (tree exp, bool va_start_p)
9178 tree fntype = TREE_TYPE (current_function_decl);
9179 int nargs = call_expr_nargs (exp);
9180 tree arg;
9181 /* There is good chance the current input_location points inside the
9182 definition of the va_start macro (perhaps on the token for
9183 builtin) in a system header, so warnings will not be emitted.
9184 Use the location in real source code. */
9185 source_location current_location =
9186 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9187 NULL);
9189 if (!stdarg_p (fntype))
9191 error ("%<va_start%> used in function with fixed args");
9192 return true;
9195 if (va_start_p)
9197 if (va_start_p && (nargs != 2))
9199 error ("wrong number of arguments to function %<va_start%>");
9200 return true;
9202 arg = CALL_EXPR_ARG (exp, 1);
9204 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9205 when we checked the arguments and if needed issued a warning. */
9206 else
9208 if (nargs == 0)
9210 /* Evidently an out of date version of <stdarg.h>; can't validate
9211 va_start's second argument, but can still work as intended. */
9212 warning_at (current_location,
9213 OPT_Wvarargs,
9214 "%<__builtin_next_arg%> called without an argument");
9215 return true;
9217 else if (nargs > 1)
9219 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9220 return true;
9222 arg = CALL_EXPR_ARG (exp, 0);
9225 if (TREE_CODE (arg) == SSA_NAME)
9226 arg = SSA_NAME_VAR (arg);
9228 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9229 or __builtin_next_arg (0) the first time we see it, after checking
9230 the arguments and if needed issuing a warning. */
9231 if (!integer_zerop (arg))
9233 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9235 /* Strip off all nops for the sake of the comparison. This
9236 is not quite the same as STRIP_NOPS. It does more.
9237 We must also strip off INDIRECT_EXPR for C++ reference
9238 parameters. */
9239 while (CONVERT_EXPR_P (arg)
9240 || TREE_CODE (arg) == INDIRECT_REF)
9241 arg = TREE_OPERAND (arg, 0);
9242 if (arg != last_parm)
9244 /* FIXME: Sometimes with the tree optimizers we can get the
9245 not the last argument even though the user used the last
9246 argument. We just warn and set the arg to be the last
9247 argument so that we will get wrong-code because of
9248 it. */
9249 warning_at (current_location,
9250 OPT_Wvarargs,
9251 "second parameter of %<va_start%> not last named argument");
9254 /* Undefined by C99 7.15.1.4p4 (va_start):
9255 "If the parameter parmN is declared with the register storage
9256 class, with a function or array type, or with a type that is
9257 not compatible with the type that results after application of
9258 the default argument promotions, the behavior is undefined."
9260 else if (DECL_REGISTER (arg))
9262 warning_at (current_location,
9263 OPT_Wvarargs,
9264 "undefined behavior when second parameter of "
9265 "%<va_start%> is declared with %<register%> storage");
9268 /* We want to verify the second parameter just once before the tree
9269 optimizers are run and then avoid keeping it in the tree,
9270 as otherwise we could warn even for correct code like:
9271 void foo (int i, ...)
9272 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9273 if (va_start_p)
9274 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9275 else
9276 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9278 return false;
9282 /* Expand a call EXP to __builtin_object_size. */
9284 static rtx
9285 expand_builtin_object_size (tree exp)
9287 tree ost;
9288 int object_size_type;
9289 tree fndecl = get_callee_fndecl (exp);
9291 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9293 error ("%Kfirst argument of %D must be a pointer, second integer constant",
9294 exp, fndecl);
9295 expand_builtin_trap ();
9296 return const0_rtx;
9299 ost = CALL_EXPR_ARG (exp, 1);
9300 STRIP_NOPS (ost);
9302 if (TREE_CODE (ost) != INTEGER_CST
9303 || tree_int_cst_sgn (ost) < 0
9304 || compare_tree_int (ost, 3) > 0)
9306 error ("%Klast argument of %D is not integer constant between 0 and 3",
9307 exp, fndecl);
9308 expand_builtin_trap ();
9309 return const0_rtx;
9312 object_size_type = tree_to_shwi (ost);
9314 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9317 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9318 FCODE is the BUILT_IN_* to use.
9319 Return NULL_RTX if we failed; the caller should emit a normal call,
9320 otherwise try to get the result in TARGET, if convenient (and in
9321 mode MODE if that's convenient). */
9323 static rtx
9324 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9325 enum built_in_function fcode)
9327 tree dest, src, len, size;
9329 if (!validate_arglist (exp,
9330 POINTER_TYPE,
9331 fcode == BUILT_IN_MEMSET_CHK
9332 ? INTEGER_TYPE : POINTER_TYPE,
9333 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9334 return NULL_RTX;
9336 dest = CALL_EXPR_ARG (exp, 0);
9337 src = CALL_EXPR_ARG (exp, 1);
9338 len = CALL_EXPR_ARG (exp, 2);
9339 size = CALL_EXPR_ARG (exp, 3);
9341 if (! tree_fits_uhwi_p (size))
9342 return NULL_RTX;
9344 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
9346 tree fn;
9348 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
9350 warning_at (tree_nonartificial_location (exp),
9351 0, "%Kcall to %D will always overflow destination buffer",
9352 exp, get_callee_fndecl (exp));
9353 return NULL_RTX;
9356 fn = NULL_TREE;
9357 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9358 mem{cpy,pcpy,move,set} is available. */
9359 switch (fcode)
9361 case BUILT_IN_MEMCPY_CHK:
9362 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
9363 break;
9364 case BUILT_IN_MEMPCPY_CHK:
9365 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
9366 break;
9367 case BUILT_IN_MEMMOVE_CHK:
9368 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
9369 break;
9370 case BUILT_IN_MEMSET_CHK:
9371 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
9372 break;
9373 default:
9374 break;
9377 if (! fn)
9378 return NULL_RTX;
9380 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
9381 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9382 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9383 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9385 else if (fcode == BUILT_IN_MEMSET_CHK)
9386 return NULL_RTX;
9387 else
9389 unsigned int dest_align = get_pointer_alignment (dest);
9391 /* If DEST is not a pointer type, call the normal function. */
9392 if (dest_align == 0)
9393 return NULL_RTX;
9395 /* If SRC and DEST are the same (and not volatile), do nothing. */
9396 if (operand_equal_p (src, dest, 0))
9398 tree expr;
9400 if (fcode != BUILT_IN_MEMPCPY_CHK)
9402 /* Evaluate and ignore LEN in case it has side-effects. */
9403 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9404 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9407 expr = fold_build_pointer_plus (dest, len);
9408 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9411 /* __memmove_chk special case. */
9412 if (fcode == BUILT_IN_MEMMOVE_CHK)
9414 unsigned int src_align = get_pointer_alignment (src);
9416 if (src_align == 0)
9417 return NULL_RTX;
9419 /* If src is categorized for a readonly section we can use
9420 normal __memcpy_chk. */
9421 if (readonly_data_expr (src))
9423 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
9424 if (!fn)
9425 return NULL_RTX;
9426 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9427 dest, src, len, size);
9428 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9429 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9430 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9433 return NULL_RTX;
9437 /* Emit warning if a buffer overflow is detected at compile time. */
9439 static void
9440 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9442 int is_strlen = 0;
9443 tree len, size;
9444 location_t loc = tree_nonartificial_location (exp);
9446 switch (fcode)
9448 case BUILT_IN_STRCPY_CHK:
9449 case BUILT_IN_STPCPY_CHK:
9450 /* For __strcat_chk the warning will be emitted only if overflowing
9451 by at least strlen (dest) + 1 bytes. */
9452 case BUILT_IN_STRCAT_CHK:
9453 len = CALL_EXPR_ARG (exp, 1);
9454 size = CALL_EXPR_ARG (exp, 2);
9455 is_strlen = 1;
9456 break;
9457 case BUILT_IN_STRNCAT_CHK:
9458 case BUILT_IN_STRNCPY_CHK:
9459 case BUILT_IN_STPNCPY_CHK:
9460 len = CALL_EXPR_ARG (exp, 2);
9461 size = CALL_EXPR_ARG (exp, 3);
9462 break;
9463 case BUILT_IN_SNPRINTF_CHK:
9464 case BUILT_IN_VSNPRINTF_CHK:
9465 len = CALL_EXPR_ARG (exp, 1);
9466 size = CALL_EXPR_ARG (exp, 3);
9467 break;
9468 default:
9469 gcc_unreachable ();
9472 if (!len || !size)
9473 return;
9475 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9476 return;
9478 if (is_strlen)
9480 len = c_strlen (len, 1);
9481 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
9482 return;
9484 else if (fcode == BUILT_IN_STRNCAT_CHK)
9486 tree src = CALL_EXPR_ARG (exp, 1);
9487 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
9488 return;
9489 src = c_strlen (src, 1);
9490 if (! src || ! tree_fits_uhwi_p (src))
9492 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
9493 exp, get_callee_fndecl (exp));
9494 return;
9496 else if (tree_int_cst_lt (src, size))
9497 return;
9499 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
9500 return;
9502 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
9503 exp, get_callee_fndecl (exp));
9506 /* Emit warning if a buffer overflow is detected at compile time
9507 in __sprintf_chk/__vsprintf_chk calls. */
9509 static void
9510 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
9512 tree size, len, fmt;
9513 const char *fmt_str;
9514 int nargs = call_expr_nargs (exp);
9516 /* Verify the required arguments in the original call. */
9518 if (nargs < 4)
9519 return;
9520 size = CALL_EXPR_ARG (exp, 2);
9521 fmt = CALL_EXPR_ARG (exp, 3);
9523 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9524 return;
9526 /* Check whether the format is a literal string constant. */
9527 fmt_str = c_getstr (fmt);
9528 if (fmt_str == NULL)
9529 return;
9531 if (!init_target_chars ())
9532 return;
9534 /* If the format doesn't contain % args or %%, we know its size. */
9535 if (strchr (fmt_str, target_percent) == 0)
9536 len = build_int_cstu (size_type_node, strlen (fmt_str));
9537 /* If the format is "%s" and first ... argument is a string literal,
9538 we know it too. */
9539 else if (fcode == BUILT_IN_SPRINTF_CHK
9540 && strcmp (fmt_str, target_percent_s) == 0)
9542 tree arg;
9544 if (nargs < 5)
9545 return;
9546 arg = CALL_EXPR_ARG (exp, 4);
9547 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
9548 return;
9550 len = c_strlen (arg, 1);
9551 if (!len || ! tree_fits_uhwi_p (len))
9552 return;
9554 else
9555 return;
9557 if (! tree_int_cst_lt (len, size))
9558 warning_at (tree_nonartificial_location (exp),
9559 0, "%Kcall to %D will always overflow destination buffer",
9560 exp, get_callee_fndecl (exp));
9563 /* Emit warning if a free is called with address of a variable. */
9565 static void
9566 maybe_emit_free_warning (tree exp)
9568 tree arg = CALL_EXPR_ARG (exp, 0);
9570 STRIP_NOPS (arg);
9571 if (TREE_CODE (arg) != ADDR_EXPR)
9572 return;
9574 arg = get_base_address (TREE_OPERAND (arg, 0));
9575 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
9576 return;
9578 if (SSA_VAR_P (arg))
9579 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9580 "%Kattempt to free a non-heap object %qD", exp, arg);
9581 else
9582 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9583 "%Kattempt to free a non-heap object", exp);
9586 /* Fold a call to __builtin_object_size with arguments PTR and OST,
9587 if possible. */
9589 static tree
9590 fold_builtin_object_size (tree ptr, tree ost)
9592 unsigned HOST_WIDE_INT bytes;
9593 int object_size_type;
9595 if (!validate_arg (ptr, POINTER_TYPE)
9596 || !validate_arg (ost, INTEGER_TYPE))
9597 return NULL_TREE;
9599 STRIP_NOPS (ost);
9601 if (TREE_CODE (ost) != INTEGER_CST
9602 || tree_int_cst_sgn (ost) < 0
9603 || compare_tree_int (ost, 3) > 0)
9604 return NULL_TREE;
9606 object_size_type = tree_to_shwi (ost);
9608 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
9609 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
9610 and (size_t) 0 for types 2 and 3. */
9611 if (TREE_SIDE_EFFECTS (ptr))
9612 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
9614 if (TREE_CODE (ptr) == ADDR_EXPR)
9616 compute_builtin_object_size (ptr, object_size_type, &bytes);
9617 if (wi::fits_to_tree_p (bytes, size_type_node))
9618 return build_int_cstu (size_type_node, bytes);
9620 else if (TREE_CODE (ptr) == SSA_NAME)
9622 /* If object size is not known yet, delay folding until
9623 later. Maybe subsequent passes will help determining
9624 it. */
9625 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
9626 && wi::fits_to_tree_p (bytes, size_type_node))
9627 return build_int_cstu (size_type_node, bytes);
9630 return NULL_TREE;
9633 /* Builtins with folding operations that operate on "..." arguments
9634 need special handling; we need to store the arguments in a convenient
9635 data structure before attempting any folding. Fortunately there are
9636 only a few builtins that fall into this category. FNDECL is the
9637 function, EXP is the CALL_EXPR for the call. */
9639 static tree
9640 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
9642 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9643 tree ret = NULL_TREE;
9645 switch (fcode)
9647 case BUILT_IN_FPCLASSIFY:
9648 ret = fold_builtin_fpclassify (loc, args, nargs);
9649 break;
9651 default:
9652 break;
9654 if (ret)
9656 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9657 SET_EXPR_LOCATION (ret, loc);
9658 TREE_NO_WARNING (ret) = 1;
9659 return ret;
9661 return NULL_TREE;
9664 /* Initialize format string characters in the target charset. */
9666 bool
9667 init_target_chars (void)
9669 static bool init;
9670 if (!init)
9672 target_newline = lang_hooks.to_target_charset ('\n');
9673 target_percent = lang_hooks.to_target_charset ('%');
9674 target_c = lang_hooks.to_target_charset ('c');
9675 target_s = lang_hooks.to_target_charset ('s');
9676 if (target_newline == 0 || target_percent == 0 || target_c == 0
9677 || target_s == 0)
9678 return false;
9680 target_percent_c[0] = target_percent;
9681 target_percent_c[1] = target_c;
9682 target_percent_c[2] = '\0';
9684 target_percent_s[0] = target_percent;
9685 target_percent_s[1] = target_s;
9686 target_percent_s[2] = '\0';
9688 target_percent_s_newline[0] = target_percent;
9689 target_percent_s_newline[1] = target_s;
9690 target_percent_s_newline[2] = target_newline;
9691 target_percent_s_newline[3] = '\0';
9693 init = true;
9695 return true;
9698 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
9699 and no overflow/underflow occurred. INEXACT is true if M was not
9700 exactly calculated. TYPE is the tree type for the result. This
9701 function assumes that you cleared the MPFR flags and then
9702 calculated M to see if anything subsequently set a flag prior to
9703 entering this function. Return NULL_TREE if any checks fail. */
9705 static tree
9706 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
9708 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9709 overflow/underflow occurred. If -frounding-math, proceed iff the
9710 result of calling FUNC was exact. */
9711 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
9712 && (!flag_rounding_math || !inexact))
9714 REAL_VALUE_TYPE rr;
9716 real_from_mpfr (&rr, m, type, GMP_RNDN);
9717 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
9718 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9719 but the mpft_t is not, then we underflowed in the
9720 conversion. */
9721 if (real_isfinite (&rr)
9722 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
9724 REAL_VALUE_TYPE rmode;
9726 real_convert (&rmode, TYPE_MODE (type), &rr);
9727 /* Proceed iff the specified mode can hold the value. */
9728 if (real_identical (&rmode, &rr))
9729 return build_real (type, rmode);
9732 return NULL_TREE;
9735 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
9736 number and no overflow/underflow occurred. INEXACT is true if M
9737 was not exactly calculated. TYPE is the tree type for the result.
9738 This function assumes that you cleared the MPFR flags and then
9739 calculated M to see if anything subsequently set a flag prior to
9740 entering this function. Return NULL_TREE if any checks fail, if
9741 FORCE_CONVERT is true, then bypass the checks. */
9743 static tree
9744 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
9746 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9747 overflow/underflow occurred. If -frounding-math, proceed iff the
9748 result of calling FUNC was exact. */
9749 if (force_convert
9750 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
9751 && !mpfr_overflow_p () && !mpfr_underflow_p ()
9752 && (!flag_rounding_math || !inexact)))
9754 REAL_VALUE_TYPE re, im;
9756 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
9757 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
9758 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
9759 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9760 but the mpft_t is not, then we underflowed in the
9761 conversion. */
9762 if (force_convert
9763 || (real_isfinite (&re) && real_isfinite (&im)
9764 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
9765 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
9767 REAL_VALUE_TYPE re_mode, im_mode;
9769 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
9770 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
9771 /* Proceed iff the specified mode can hold the value. */
9772 if (force_convert
9773 || (real_identical (&re_mode, &re)
9774 && real_identical (&im_mode, &im)))
9775 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
9776 build_real (TREE_TYPE (type), im_mode));
9779 return NULL_TREE;
9782 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
9783 the pointer *(ARG_QUO) and return the result. The type is taken
9784 from the type of ARG0 and is used for setting the precision of the
9785 calculation and results. */
9787 static tree
9788 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
9790 tree const type = TREE_TYPE (arg0);
9791 tree result = NULL_TREE;
9793 STRIP_NOPS (arg0);
9794 STRIP_NOPS (arg1);
9796 /* To proceed, MPFR must exactly represent the target floating point
9797 format, which only happens when the target base equals two. */
9798 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9799 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9800 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
9802 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
9803 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
9805 if (real_isfinite (ra0) && real_isfinite (ra1))
9807 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9808 const int prec = fmt->p;
9809 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9810 tree result_rem;
9811 long integer_quo;
9812 mpfr_t m0, m1;
9814 mpfr_inits2 (prec, m0, m1, NULL);
9815 mpfr_from_real (m0, ra0, GMP_RNDN);
9816 mpfr_from_real (m1, ra1, GMP_RNDN);
9817 mpfr_clear_flags ();
9818 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
9819 /* Remquo is independent of the rounding mode, so pass
9820 inexact=0 to do_mpfr_ckconv(). */
9821 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
9822 mpfr_clears (m0, m1, NULL);
9823 if (result_rem)
9825 /* MPFR calculates quo in the host's long so it may
9826 return more bits in quo than the target int can hold
9827 if sizeof(host long) > sizeof(target int). This can
9828 happen even for native compilers in LP64 mode. In
9829 these cases, modulo the quo value with the largest
9830 number that the target int can hold while leaving one
9831 bit for the sign. */
9832 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
9833 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
9835 /* Dereference the quo pointer argument. */
9836 arg_quo = build_fold_indirect_ref (arg_quo);
9837 /* Proceed iff a valid pointer type was passed in. */
9838 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
9840 /* Set the value. */
9841 tree result_quo
9842 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
9843 build_int_cst (TREE_TYPE (arg_quo),
9844 integer_quo));
9845 TREE_SIDE_EFFECTS (result_quo) = 1;
9846 /* Combine the quo assignment with the rem. */
9847 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9848 result_quo, result_rem));
9853 return result;
9856 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
9857 resulting value as a tree with type TYPE. The mpfr precision is
9858 set to the precision of TYPE. We assume that this mpfr function
9859 returns zero if the result could be calculated exactly within the
9860 requested precision. In addition, the integer pointer represented
9861 by ARG_SG will be dereferenced and set to the appropriate signgam
9862 (-1,1) value. */
9864 static tree
9865 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
9867 tree result = NULL_TREE;
9869 STRIP_NOPS (arg);
9871 /* To proceed, MPFR must exactly represent the target floating point
9872 format, which only happens when the target base equals two. Also
9873 verify ARG is a constant and that ARG_SG is an int pointer. */
9874 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9875 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
9876 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
9877 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
9879 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
9881 /* In addition to NaN and Inf, the argument cannot be zero or a
9882 negative integer. */
9883 if (real_isfinite (ra)
9884 && ra->cl != rvc_zero
9885 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
9887 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9888 const int prec = fmt->p;
9889 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9890 int inexact, sg;
9891 mpfr_t m;
9892 tree result_lg;
9894 mpfr_init2 (m, prec);
9895 mpfr_from_real (m, ra, GMP_RNDN);
9896 mpfr_clear_flags ();
9897 inexact = mpfr_lgamma (m, &sg, m, rnd);
9898 result_lg = do_mpfr_ckconv (m, type, inexact);
9899 mpfr_clear (m);
9900 if (result_lg)
9902 tree result_sg;
9904 /* Dereference the arg_sg pointer argument. */
9905 arg_sg = build_fold_indirect_ref (arg_sg);
9906 /* Assign the signgam value into *arg_sg. */
9907 result_sg = fold_build2 (MODIFY_EXPR,
9908 TREE_TYPE (arg_sg), arg_sg,
9909 build_int_cst (TREE_TYPE (arg_sg), sg));
9910 TREE_SIDE_EFFECTS (result_sg) = 1;
9911 /* Combine the signgam assignment with the lgamma result. */
9912 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9913 result_sg, result_lg));
9918 return result;
9921 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
9922 mpc function FUNC on it and return the resulting value as a tree
9923 with type TYPE. The mpfr precision is set to the precision of
9924 TYPE. We assume that function FUNC returns zero if the result
9925 could be calculated exactly within the requested precision. If
9926 DO_NONFINITE is true, then fold expressions containing Inf or NaN
9927 in the arguments and/or results. */
9929 tree
9930 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
9931 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
9933 tree result = NULL_TREE;
9935 STRIP_NOPS (arg0);
9936 STRIP_NOPS (arg1);
9938 /* To proceed, MPFR must exactly represent the target floating point
9939 format, which only happens when the target base equals two. */
9940 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
9941 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
9942 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
9943 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
9944 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
9946 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
9947 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
9948 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
9949 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
9951 if (do_nonfinite
9952 || (real_isfinite (re0) && real_isfinite (im0)
9953 && real_isfinite (re1) && real_isfinite (im1)))
9955 const struct real_format *const fmt =
9956 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
9957 const int prec = fmt->p;
9958 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
9959 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
9960 int inexact;
9961 mpc_t m0, m1;
9963 mpc_init2 (m0, prec);
9964 mpc_init2 (m1, prec);
9965 mpfr_from_real (mpc_realref (m0), re0, rnd);
9966 mpfr_from_real (mpc_imagref (m0), im0, rnd);
9967 mpfr_from_real (mpc_realref (m1), re1, rnd);
9968 mpfr_from_real (mpc_imagref (m1), im1, rnd);
9969 mpfr_clear_flags ();
9970 inexact = func (m0, m0, m1, crnd);
9971 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
9972 mpc_clear (m0);
9973 mpc_clear (m1);
9977 return result;
9980 /* A wrapper function for builtin folding that prevents warnings for
9981 "statement without effect" and the like, caused by removing the
9982 call node earlier than the warning is generated. */
9984 tree
9985 fold_call_stmt (gcall *stmt, bool ignore)
9987 tree ret = NULL_TREE;
9988 tree fndecl = gimple_call_fndecl (stmt);
9989 location_t loc = gimple_location (stmt);
9990 if (fndecl
9991 && TREE_CODE (fndecl) == FUNCTION_DECL
9992 && DECL_BUILT_IN (fndecl)
9993 && !gimple_call_va_arg_pack_p (stmt))
9995 int nargs = gimple_call_num_args (stmt);
9996 tree *args = (nargs > 0
9997 ? gimple_call_arg_ptr (stmt, 0)
9998 : &error_mark_node);
10000 if (avoid_folding_inline_builtin (fndecl))
10001 return NULL_TREE;
10002 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10004 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10006 else
10008 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10009 if (ret)
10011 /* Propagate location information from original call to
10012 expansion of builtin. Otherwise things like
10013 maybe_emit_chk_warning, that operate on the expansion
10014 of a builtin, will use the wrong location information. */
10015 if (gimple_has_location (stmt))
10017 tree realret = ret;
10018 if (TREE_CODE (ret) == NOP_EXPR)
10019 realret = TREE_OPERAND (ret, 0);
10020 if (CAN_HAVE_LOCATION_P (realret)
10021 && !EXPR_HAS_LOCATION (realret))
10022 SET_EXPR_LOCATION (realret, loc);
10023 return realret;
10025 return ret;
10029 return NULL_TREE;
10032 /* Look up the function in builtin_decl that corresponds to DECL
10033 and set ASMSPEC as its user assembler name. DECL must be a
10034 function decl that declares a builtin. */
10036 void
10037 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10039 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10040 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10041 && asmspec != 0);
10043 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10044 set_user_assembler_name (builtin, asmspec);
10046 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10047 && INT_TYPE_SIZE < BITS_PER_WORD)
10049 set_user_assembler_libfunc ("ffs", asmspec);
10050 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE, MODE_INT, 0),
10051 "ffs");
10055 /* Return true if DECL is a builtin that expands to a constant or similarly
10056 simple code. */
10057 bool
10058 is_simple_builtin (tree decl)
10060 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10061 switch (DECL_FUNCTION_CODE (decl))
10063 /* Builtins that expand to constants. */
10064 case BUILT_IN_CONSTANT_P:
10065 case BUILT_IN_EXPECT:
10066 case BUILT_IN_OBJECT_SIZE:
10067 case BUILT_IN_UNREACHABLE:
10068 /* Simple register moves or loads from stack. */
10069 case BUILT_IN_ASSUME_ALIGNED:
10070 case BUILT_IN_RETURN_ADDRESS:
10071 case BUILT_IN_EXTRACT_RETURN_ADDR:
10072 case BUILT_IN_FROB_RETURN_ADDR:
10073 case BUILT_IN_RETURN:
10074 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10075 case BUILT_IN_FRAME_ADDRESS:
10076 case BUILT_IN_VA_END:
10077 case BUILT_IN_STACK_SAVE:
10078 case BUILT_IN_STACK_RESTORE:
10079 /* Exception state returns or moves registers around. */
10080 case BUILT_IN_EH_FILTER:
10081 case BUILT_IN_EH_POINTER:
10082 case BUILT_IN_EH_COPY_VALUES:
10083 return true;
10085 default:
10086 return false;
10089 return false;
10092 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10093 most probably expanded inline into reasonably simple code. This is a
10094 superset of is_simple_builtin. */
10095 bool
10096 is_inexpensive_builtin (tree decl)
10098 if (!decl)
10099 return false;
10100 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10101 return true;
10102 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10103 switch (DECL_FUNCTION_CODE (decl))
10105 case BUILT_IN_ABS:
10106 case BUILT_IN_ALLOCA:
10107 case BUILT_IN_ALLOCA_WITH_ALIGN:
10108 case BUILT_IN_BSWAP16:
10109 case BUILT_IN_BSWAP32:
10110 case BUILT_IN_BSWAP64:
10111 case BUILT_IN_CLZ:
10112 case BUILT_IN_CLZIMAX:
10113 case BUILT_IN_CLZL:
10114 case BUILT_IN_CLZLL:
10115 case BUILT_IN_CTZ:
10116 case BUILT_IN_CTZIMAX:
10117 case BUILT_IN_CTZL:
10118 case BUILT_IN_CTZLL:
10119 case BUILT_IN_FFS:
10120 case BUILT_IN_FFSIMAX:
10121 case BUILT_IN_FFSL:
10122 case BUILT_IN_FFSLL:
10123 case BUILT_IN_IMAXABS:
10124 case BUILT_IN_FINITE:
10125 case BUILT_IN_FINITEF:
10126 case BUILT_IN_FINITEL:
10127 case BUILT_IN_FINITED32:
10128 case BUILT_IN_FINITED64:
10129 case BUILT_IN_FINITED128:
10130 case BUILT_IN_FPCLASSIFY:
10131 case BUILT_IN_ISFINITE:
10132 case BUILT_IN_ISINF_SIGN:
10133 case BUILT_IN_ISINF:
10134 case BUILT_IN_ISINFF:
10135 case BUILT_IN_ISINFL:
10136 case BUILT_IN_ISINFD32:
10137 case BUILT_IN_ISINFD64:
10138 case BUILT_IN_ISINFD128:
10139 case BUILT_IN_ISNAN:
10140 case BUILT_IN_ISNANF:
10141 case BUILT_IN_ISNANL:
10142 case BUILT_IN_ISNAND32:
10143 case BUILT_IN_ISNAND64:
10144 case BUILT_IN_ISNAND128:
10145 case BUILT_IN_ISNORMAL:
10146 case BUILT_IN_ISGREATER:
10147 case BUILT_IN_ISGREATEREQUAL:
10148 case BUILT_IN_ISLESS:
10149 case BUILT_IN_ISLESSEQUAL:
10150 case BUILT_IN_ISLESSGREATER:
10151 case BUILT_IN_ISUNORDERED:
10152 case BUILT_IN_VA_ARG_PACK:
10153 case BUILT_IN_VA_ARG_PACK_LEN:
10154 case BUILT_IN_VA_COPY:
10155 case BUILT_IN_TRAP:
10156 case BUILT_IN_SAVEREGS:
10157 case BUILT_IN_POPCOUNTL:
10158 case BUILT_IN_POPCOUNTLL:
10159 case BUILT_IN_POPCOUNTIMAX:
10160 case BUILT_IN_POPCOUNT:
10161 case BUILT_IN_PARITYL:
10162 case BUILT_IN_PARITYLL:
10163 case BUILT_IN_PARITYIMAX:
10164 case BUILT_IN_PARITY:
10165 case BUILT_IN_LABS:
10166 case BUILT_IN_LLABS:
10167 case BUILT_IN_PREFETCH:
10168 case BUILT_IN_ACC_ON_DEVICE:
10169 return true;
10171 default:
10172 return is_simple_builtin (decl);
10175 return false;