Implement C _FloatN, _FloatNx types.
[official-gcc.git] / gcc / builtins.c
blob03a0dc84d5389a0092d4136dae507a20419f7c21
1 /* Expand builtin functions.
2 Copyright (C) 1988-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "predict.h"
33 #include "tm_p.h"
34 #include "stringpool.h"
35 #include "tree-ssanames.h"
36 #include "expmed.h"
37 #include "optabs.h"
38 #include "emit-rtl.h"
39 #include "recog.h"
40 #include "diagnostic-core.h"
41 #include "alias.h"
42 #include "fold-const.h"
43 #include "fold-const-call.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "varasm.h"
47 #include "tree-object-size.h"
48 #include "realmpfr.h"
49 #include "cfgrtl.h"
50 #include "except.h"
51 #include "dojump.h"
52 #include "explow.h"
53 #include "stmt.h"
54 #include "expr.h"
55 #include "libfuncs.h"
56 #include "output.h"
57 #include "typeclass.h"
58 #include "langhooks.h"
59 #include "value-prof.h"
60 #include "builtins.h"
61 #include "asan.h"
62 #include "cilk.h"
63 #include "tree-chkp.h"
64 #include "rtl-chkp.h"
65 #include "internal-fn.h"
66 #include "case-cfn-macros.h"
67 #include "gimple-fold.h"
70 struct target_builtins default_target_builtins;
71 #if SWITCHABLE_TARGET
72 struct target_builtins *this_target_builtins = &default_target_builtins;
73 #endif
75 /* Define the names of the builtin function types and codes. */
76 const char *const built_in_class_names[BUILT_IN_LAST]
77 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
79 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
80 const char * built_in_names[(int) END_BUILTINS] =
82 #include "builtins.def"
85 /* Setup an array of builtin_info_type, make sure each element decl is
86 initialized to NULL_TREE. */
87 builtin_info_type builtin_info[(int)END_BUILTINS];
89 /* Non-zero if __builtin_constant_p should be folded right away. */
90 bool force_folding_builtin_constant_p;
92 static rtx c_readstr (const char *, machine_mode);
93 static int target_char_cast (tree, char *);
94 static rtx get_memory_rtx (tree, tree);
95 static int apply_args_size (void);
96 static int apply_result_size (void);
97 static rtx result_vector (int, rtx);
98 static void expand_builtin_prefetch (tree);
99 static rtx expand_builtin_apply_args (void);
100 static rtx expand_builtin_apply_args_1 (void);
101 static rtx expand_builtin_apply (rtx, rtx, rtx);
102 static void expand_builtin_return (rtx);
103 static enum type_class type_to_class (tree);
104 static rtx expand_builtin_classify_type (tree);
105 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
106 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
107 static rtx expand_builtin_interclass_mathfn (tree, rtx);
108 static rtx expand_builtin_sincos (tree);
109 static rtx expand_builtin_cexpi (tree, rtx);
110 static rtx expand_builtin_int_roundingfn (tree, rtx);
111 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
112 static rtx expand_builtin_next_arg (void);
113 static rtx expand_builtin_va_start (tree);
114 static rtx expand_builtin_va_end (tree);
115 static rtx expand_builtin_va_copy (tree);
116 static rtx expand_builtin_strcmp (tree, rtx);
117 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
118 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
119 static rtx expand_builtin_memcpy (tree, rtx);
120 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
121 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
122 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
123 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
124 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
125 machine_mode, int, tree);
126 static rtx expand_builtin_strcpy (tree, rtx);
127 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
128 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
129 static rtx expand_builtin_strncpy (tree, rtx);
130 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
131 static rtx expand_builtin_memset (tree, rtx, machine_mode);
132 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
133 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
134 static rtx expand_builtin_bzero (tree);
135 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
136 static rtx expand_builtin_alloca (tree, bool);
137 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
138 static rtx expand_builtin_frame_address (tree, tree);
139 static tree stabilize_va_list_loc (location_t, tree, int);
140 static rtx expand_builtin_expect (tree, rtx);
141 static tree fold_builtin_constant_p (tree);
142 static tree fold_builtin_classify_type (tree);
143 static tree fold_builtin_strlen (location_t, tree, tree);
144 static tree fold_builtin_inf (location_t, tree, int);
145 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
146 static bool validate_arg (const_tree, enum tree_code code);
147 static rtx expand_builtin_fabs (tree, rtx, rtx);
148 static rtx expand_builtin_signbit (tree, rtx);
149 static tree fold_builtin_strchr (location_t, tree, tree, tree);
150 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
151 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
152 static tree fold_builtin_strcmp (location_t, tree, tree);
153 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
154 static tree fold_builtin_isascii (location_t, tree);
155 static tree fold_builtin_toascii (location_t, tree);
156 static tree fold_builtin_isdigit (location_t, tree);
157 static tree fold_builtin_fabs (location_t, tree, tree);
158 static tree fold_builtin_abs (location_t, tree, tree);
159 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
160 enum tree_code);
161 static tree fold_builtin_0 (location_t, tree);
162 static tree fold_builtin_1 (location_t, tree, tree);
163 static tree fold_builtin_2 (location_t, tree, tree, tree);
164 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
165 static tree fold_builtin_varargs (location_t, tree, tree*, int);
167 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
168 static tree fold_builtin_strstr (location_t, tree, tree, tree);
169 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
170 static tree fold_builtin_strspn (location_t, tree, tree);
171 static tree fold_builtin_strcspn (location_t, tree, tree);
173 static rtx expand_builtin_object_size (tree);
174 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
175 enum built_in_function);
176 static void maybe_emit_chk_warning (tree, enum built_in_function);
177 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
178 static void maybe_emit_free_warning (tree);
179 static tree fold_builtin_object_size (tree, tree);
181 unsigned HOST_WIDE_INT target_newline;
182 unsigned HOST_WIDE_INT target_percent;
183 static unsigned HOST_WIDE_INT target_c;
184 static unsigned HOST_WIDE_INT target_s;
185 char target_percent_c[3];
186 char target_percent_s[3];
187 char target_percent_s_newline[4];
188 static tree do_mpfr_remquo (tree, tree, tree);
189 static tree do_mpfr_lgamma_r (tree, tree, tree);
190 static void expand_builtin_sync_synchronize (void);
192 /* Return true if NAME starts with __builtin_ or __sync_. */
194 static bool
195 is_builtin_name (const char *name)
197 if (strncmp (name, "__builtin_", 10) == 0)
198 return true;
199 if (strncmp (name, "__sync_", 7) == 0)
200 return true;
201 if (strncmp (name, "__atomic_", 9) == 0)
202 return true;
203 if (flag_cilkplus
204 && (!strcmp (name, "__cilkrts_detach")
205 || !strcmp (name, "__cilkrts_pop_frame")))
206 return true;
207 return false;
211 /* Return true if DECL is a function symbol representing a built-in. */
213 bool
214 is_builtin_fn (tree decl)
216 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
219 /* Return true if NODE should be considered for inline expansion regardless
220 of the optimization level. This means whenever a function is invoked with
221 its "internal" name, which normally contains the prefix "__builtin". */
223 bool
224 called_as_built_in (tree node)
226 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
227 we want the name used to call the function, not the name it
228 will have. */
229 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
230 return is_builtin_name (name);
233 /* Compute values M and N such that M divides (address of EXP - N) and such
234 that N < M. If these numbers can be determined, store M in alignp and N in
235 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
236 *alignp and any bit-offset to *bitposp.
238 Note that the address (and thus the alignment) computed here is based
239 on the address to which a symbol resolves, whereas DECL_ALIGN is based
240 on the address at which an object is actually located. These two
241 addresses are not always the same. For example, on ARM targets,
242 the address &foo of a Thumb function foo() has the lowest bit set,
243 whereas foo() itself starts on an even address.
245 If ADDR_P is true we are taking the address of the memory reference EXP
246 and thus cannot rely on the access taking place. */
248 static bool
249 get_object_alignment_2 (tree exp, unsigned int *alignp,
250 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
252 HOST_WIDE_INT bitsize, bitpos;
253 tree offset;
254 machine_mode mode;
255 int unsignedp, reversep, volatilep;
256 unsigned int align = BITS_PER_UNIT;
257 bool known_alignment = false;
259 /* Get the innermost object and the constant (bitpos) and possibly
260 variable (offset) offset of the access. */
261 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
262 &unsignedp, &reversep, &volatilep);
264 /* Extract alignment information from the innermost object and
265 possibly adjust bitpos and offset. */
266 if (TREE_CODE (exp) == FUNCTION_DECL)
268 /* Function addresses can encode extra information besides their
269 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
270 allows the low bit to be used as a virtual bit, we know
271 that the address itself must be at least 2-byte aligned. */
272 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
273 align = 2 * BITS_PER_UNIT;
275 else if (TREE_CODE (exp) == LABEL_DECL)
277 else if (TREE_CODE (exp) == CONST_DECL)
279 /* The alignment of a CONST_DECL is determined by its initializer. */
280 exp = DECL_INITIAL (exp);
281 align = TYPE_ALIGN (TREE_TYPE (exp));
282 if (CONSTANT_CLASS_P (exp))
283 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
285 known_alignment = true;
287 else if (DECL_P (exp))
289 align = DECL_ALIGN (exp);
290 known_alignment = true;
292 else if (TREE_CODE (exp) == INDIRECT_REF
293 || TREE_CODE (exp) == MEM_REF
294 || TREE_CODE (exp) == TARGET_MEM_REF)
296 tree addr = TREE_OPERAND (exp, 0);
297 unsigned ptr_align;
298 unsigned HOST_WIDE_INT ptr_bitpos;
299 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
301 /* If the address is explicitely aligned, handle that. */
302 if (TREE_CODE (addr) == BIT_AND_EXPR
303 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
305 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
306 ptr_bitmask *= BITS_PER_UNIT;
307 align = ptr_bitmask & -ptr_bitmask;
308 addr = TREE_OPERAND (addr, 0);
311 known_alignment
312 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
313 align = MAX (ptr_align, align);
315 /* Re-apply explicit alignment to the bitpos. */
316 ptr_bitpos &= ptr_bitmask;
318 /* The alignment of the pointer operand in a TARGET_MEM_REF
319 has to take the variable offset parts into account. */
320 if (TREE_CODE (exp) == TARGET_MEM_REF)
322 if (TMR_INDEX (exp))
324 unsigned HOST_WIDE_INT step = 1;
325 if (TMR_STEP (exp))
326 step = TREE_INT_CST_LOW (TMR_STEP (exp));
327 align = MIN (align, (step & -step) * BITS_PER_UNIT);
329 if (TMR_INDEX2 (exp))
330 align = BITS_PER_UNIT;
331 known_alignment = false;
334 /* When EXP is an actual memory reference then we can use
335 TYPE_ALIGN of a pointer indirection to derive alignment.
336 Do so only if get_pointer_alignment_1 did not reveal absolute
337 alignment knowledge and if using that alignment would
338 improve the situation. */
339 if (!addr_p && !known_alignment
340 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
341 align = TYPE_ALIGN (TREE_TYPE (exp));
342 else
344 /* Else adjust bitpos accordingly. */
345 bitpos += ptr_bitpos;
346 if (TREE_CODE (exp) == MEM_REF
347 || TREE_CODE (exp) == TARGET_MEM_REF)
348 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
351 else if (TREE_CODE (exp) == STRING_CST)
353 /* STRING_CST are the only constant objects we allow to be not
354 wrapped inside a CONST_DECL. */
355 align = TYPE_ALIGN (TREE_TYPE (exp));
356 if (CONSTANT_CLASS_P (exp))
357 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
359 known_alignment = true;
362 /* If there is a non-constant offset part extract the maximum
363 alignment that can prevail. */
364 if (offset)
366 unsigned int trailing_zeros = tree_ctz (offset);
367 if (trailing_zeros < HOST_BITS_PER_INT)
369 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
370 if (inner)
371 align = MIN (align, inner);
375 *alignp = align;
376 *bitposp = bitpos & (*alignp - 1);
377 return known_alignment;
380 /* For a memory reference expression EXP compute values M and N such that M
381 divides (&EXP - N) and such that N < M. If these numbers can be determined,
382 store M in alignp and N in *BITPOSP and return true. Otherwise return false
383 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
385 bool
386 get_object_alignment_1 (tree exp, unsigned int *alignp,
387 unsigned HOST_WIDE_INT *bitposp)
389 return get_object_alignment_2 (exp, alignp, bitposp, false);
392 /* Return the alignment in bits of EXP, an object. */
394 unsigned int
395 get_object_alignment (tree exp)
397 unsigned HOST_WIDE_INT bitpos = 0;
398 unsigned int align;
400 get_object_alignment_1 (exp, &align, &bitpos);
402 /* align and bitpos now specify known low bits of the pointer.
403 ptr & (align - 1) == bitpos. */
405 if (bitpos != 0)
406 align = (bitpos & -bitpos);
407 return align;
410 /* For a pointer valued expression EXP compute values M and N such that M
411 divides (EXP - N) and such that N < M. If these numbers can be determined,
412 store M in alignp and N in *BITPOSP and return true. Return false if
413 the results are just a conservative approximation.
415 If EXP is not a pointer, false is returned too. */
417 bool
418 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
419 unsigned HOST_WIDE_INT *bitposp)
421 STRIP_NOPS (exp);
423 if (TREE_CODE (exp) == ADDR_EXPR)
424 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
425 alignp, bitposp, true);
426 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
428 unsigned int align;
429 unsigned HOST_WIDE_INT bitpos;
430 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
431 &align, &bitpos);
432 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
433 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
434 else
436 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
437 if (trailing_zeros < HOST_BITS_PER_INT)
439 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
440 if (inner)
441 align = MIN (align, inner);
444 *alignp = align;
445 *bitposp = bitpos & (align - 1);
446 return res;
448 else if (TREE_CODE (exp) == SSA_NAME
449 && POINTER_TYPE_P (TREE_TYPE (exp)))
451 unsigned int ptr_align, ptr_misalign;
452 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
454 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
456 *bitposp = ptr_misalign * BITS_PER_UNIT;
457 *alignp = ptr_align * BITS_PER_UNIT;
458 /* Make sure to return a sensible alignment when the multiplication
459 by BITS_PER_UNIT overflowed. */
460 if (*alignp == 0)
461 *alignp = 1u << (HOST_BITS_PER_INT - 1);
462 /* We cannot really tell whether this result is an approximation. */
463 return false;
465 else
467 *bitposp = 0;
468 *alignp = BITS_PER_UNIT;
469 return false;
472 else if (TREE_CODE (exp) == INTEGER_CST)
474 *alignp = BIGGEST_ALIGNMENT;
475 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
476 & (BIGGEST_ALIGNMENT - 1));
477 return true;
480 *bitposp = 0;
481 *alignp = BITS_PER_UNIT;
482 return false;
485 /* Return the alignment in bits of EXP, a pointer valued expression.
486 The alignment returned is, by default, the alignment of the thing that
487 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
489 Otherwise, look at the expression to see if we can do better, i.e., if the
490 expression is actually pointing at an object whose alignment is tighter. */
492 unsigned int
493 get_pointer_alignment (tree exp)
495 unsigned HOST_WIDE_INT bitpos = 0;
496 unsigned int align;
498 get_pointer_alignment_1 (exp, &align, &bitpos);
500 /* align and bitpos now specify known low bits of the pointer.
501 ptr & (align - 1) == bitpos. */
503 if (bitpos != 0)
504 align = (bitpos & -bitpos);
506 return align;
509 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
510 way, because it could contain a zero byte in the middle.
511 TREE_STRING_LENGTH is the size of the character array, not the string.
513 ONLY_VALUE should be nonzero if the result is not going to be emitted
514 into the instruction stream and zero if it is going to be expanded.
515 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
516 is returned, otherwise NULL, since
517 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
518 evaluate the side-effects.
520 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
521 accesses. Note that this implies the result is not going to be emitted
522 into the instruction stream.
524 The value returned is of type `ssizetype'.
526 Unfortunately, string_constant can't access the values of const char
527 arrays with initializers, so neither can we do so here. */
529 tree
530 c_strlen (tree src, int only_value)
532 tree offset_node;
533 HOST_WIDE_INT offset;
534 int max;
535 const char *ptr;
536 location_t loc;
538 STRIP_NOPS (src);
539 if (TREE_CODE (src) == COND_EXPR
540 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
542 tree len1, len2;
544 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
545 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
546 if (tree_int_cst_equal (len1, len2))
547 return len1;
550 if (TREE_CODE (src) == COMPOUND_EXPR
551 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
552 return c_strlen (TREE_OPERAND (src, 1), only_value);
554 loc = EXPR_LOC_OR_LOC (src, input_location);
556 src = string_constant (src, &offset_node);
557 if (src == 0)
558 return NULL_TREE;
560 max = TREE_STRING_LENGTH (src) - 1;
561 ptr = TREE_STRING_POINTER (src);
563 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
565 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
566 compute the offset to the following null if we don't know where to
567 start searching for it. */
568 int i;
570 for (i = 0; i < max; i++)
571 if (ptr[i] == 0)
572 return NULL_TREE;
574 /* We don't know the starting offset, but we do know that the string
575 has no internal zero bytes. We can assume that the offset falls
576 within the bounds of the string; otherwise, the programmer deserves
577 what he gets. Subtract the offset from the length of the string,
578 and return that. This would perhaps not be valid if we were dealing
579 with named arrays in addition to literal string constants. */
581 return size_diffop_loc (loc, size_int (max), offset_node);
584 /* We have a known offset into the string. Start searching there for
585 a null character if we can represent it as a single HOST_WIDE_INT. */
586 if (offset_node == 0)
587 offset = 0;
588 else if (! tree_fits_shwi_p (offset_node))
589 offset = -1;
590 else
591 offset = tree_to_shwi (offset_node);
593 /* If the offset is known to be out of bounds, warn, and call strlen at
594 runtime. */
595 if (offset < 0 || offset > max)
597 /* Suppress multiple warnings for propagated constant strings. */
598 if (only_value != 2
599 && !TREE_NO_WARNING (src))
601 warning_at (loc, 0, "offset outside bounds of constant string");
602 TREE_NO_WARNING (src) = 1;
604 return NULL_TREE;
607 /* Use strlen to search for the first zero byte. Since any strings
608 constructed with build_string will have nulls appended, we win even
609 if we get handed something like (char[4])"abcd".
611 Since OFFSET is our starting index into the string, no further
612 calculation is needed. */
613 return ssize_int (strlen (ptr + offset));
616 /* Return a constant integer corresponding to target reading
617 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
619 static rtx
620 c_readstr (const char *str, machine_mode mode)
622 HOST_WIDE_INT ch;
623 unsigned int i, j;
624 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
626 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
627 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
628 / HOST_BITS_PER_WIDE_INT;
630 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
631 for (i = 0; i < len; i++)
632 tmp[i] = 0;
634 ch = 1;
635 for (i = 0; i < GET_MODE_SIZE (mode); i++)
637 j = i;
638 if (WORDS_BIG_ENDIAN)
639 j = GET_MODE_SIZE (mode) - i - 1;
640 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
641 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
642 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
643 j *= BITS_PER_UNIT;
645 if (ch)
646 ch = (unsigned char) str[i];
647 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
650 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
651 return immed_wide_int_const (c, mode);
654 /* Cast a target constant CST to target CHAR and if that value fits into
655 host char type, return zero and put that value into variable pointed to by
656 P. */
658 static int
659 target_char_cast (tree cst, char *p)
661 unsigned HOST_WIDE_INT val, hostval;
663 if (TREE_CODE (cst) != INTEGER_CST
664 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
665 return 1;
667 /* Do not care if it fits or not right here. */
668 val = TREE_INT_CST_LOW (cst);
670 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
671 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
673 hostval = val;
674 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
675 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
677 if (val != hostval)
678 return 1;
680 *p = hostval;
681 return 0;
684 /* Similar to save_expr, but assumes that arbitrary code is not executed
685 in between the multiple evaluations. In particular, we assume that a
686 non-addressable local variable will not be modified. */
688 static tree
689 builtin_save_expr (tree exp)
691 if (TREE_CODE (exp) == SSA_NAME
692 || (TREE_ADDRESSABLE (exp) == 0
693 && (TREE_CODE (exp) == PARM_DECL
694 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
695 return exp;
697 return save_expr (exp);
700 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
701 times to get the address of either a higher stack frame, or a return
702 address located within it (depending on FNDECL_CODE). */
704 static rtx
705 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
707 int i;
708 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
709 if (tem == NULL_RTX)
711 /* For a zero count with __builtin_return_address, we don't care what
712 frame address we return, because target-specific definitions will
713 override us. Therefore frame pointer elimination is OK, and using
714 the soft frame pointer is OK.
716 For a nonzero count, or a zero count with __builtin_frame_address,
717 we require a stable offset from the current frame pointer to the
718 previous one, so we must use the hard frame pointer, and
719 we must disable frame pointer elimination. */
720 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
721 tem = frame_pointer_rtx;
722 else
724 tem = hard_frame_pointer_rtx;
726 /* Tell reload not to eliminate the frame pointer. */
727 crtl->accesses_prior_frames = 1;
731 if (count > 0)
732 SETUP_FRAME_ADDRESSES ();
734 /* On the SPARC, the return address is not in the frame, it is in a
735 register. There is no way to access it off of the current frame
736 pointer, but it can be accessed off the previous frame pointer by
737 reading the value from the register window save area. */
738 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
739 count--;
741 /* Scan back COUNT frames to the specified frame. */
742 for (i = 0; i < count; i++)
744 /* Assume the dynamic chain pointer is in the word that the
745 frame address points to, unless otherwise specified. */
746 tem = DYNAMIC_CHAIN_ADDRESS (tem);
747 tem = memory_address (Pmode, tem);
748 tem = gen_frame_mem (Pmode, tem);
749 tem = copy_to_reg (tem);
752 /* For __builtin_frame_address, return what we've got. But, on
753 the SPARC for example, we may have to add a bias. */
754 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
755 return FRAME_ADDR_RTX (tem);
757 /* For __builtin_return_address, get the return address from that frame. */
758 #ifdef RETURN_ADDR_RTX
759 tem = RETURN_ADDR_RTX (count, tem);
760 #else
761 tem = memory_address (Pmode,
762 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
763 tem = gen_frame_mem (Pmode, tem);
764 #endif
765 return tem;
768 /* Alias set used for setjmp buffer. */
769 static alias_set_type setjmp_alias_set = -1;
771 /* Construct the leading half of a __builtin_setjmp call. Control will
772 return to RECEIVER_LABEL. This is also called directly by the SJLJ
773 exception handling code. */
775 void
776 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
778 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
779 rtx stack_save;
780 rtx mem;
782 if (setjmp_alias_set == -1)
783 setjmp_alias_set = new_alias_set ();
785 buf_addr = convert_memory_address (Pmode, buf_addr);
787 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
789 /* We store the frame pointer and the address of receiver_label in
790 the buffer and use the rest of it for the stack save area, which
791 is machine-dependent. */
793 mem = gen_rtx_MEM (Pmode, buf_addr);
794 set_mem_alias_set (mem, setjmp_alias_set);
795 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
797 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
798 GET_MODE_SIZE (Pmode))),
799 set_mem_alias_set (mem, setjmp_alias_set);
801 emit_move_insn (validize_mem (mem),
802 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
804 stack_save = gen_rtx_MEM (sa_mode,
805 plus_constant (Pmode, buf_addr,
806 2 * GET_MODE_SIZE (Pmode)));
807 set_mem_alias_set (stack_save, setjmp_alias_set);
808 emit_stack_save (SAVE_NONLOCAL, &stack_save);
810 /* If there is further processing to do, do it. */
811 if (targetm.have_builtin_setjmp_setup ())
812 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
814 /* We have a nonlocal label. */
815 cfun->has_nonlocal_label = 1;
818 /* Construct the trailing part of a __builtin_setjmp call. This is
819 also called directly by the SJLJ exception handling code.
820 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
822 void
823 expand_builtin_setjmp_receiver (rtx receiver_label)
825 rtx chain;
827 /* Mark the FP as used when we get here, so we have to make sure it's
828 marked as used by this function. */
829 emit_use (hard_frame_pointer_rtx);
831 /* Mark the static chain as clobbered here so life information
832 doesn't get messed up for it. */
833 chain = targetm.calls.static_chain (current_function_decl, true);
834 if (chain && REG_P (chain))
835 emit_clobber (chain);
837 /* Now put in the code to restore the frame pointer, and argument
838 pointer, if needed. */
839 if (! targetm.have_nonlocal_goto ())
841 /* First adjust our frame pointer to its actual value. It was
842 previously set to the start of the virtual area corresponding to
843 the stacked variables when we branched here and now needs to be
844 adjusted to the actual hardware fp value.
846 Assignments to virtual registers are converted by
847 instantiate_virtual_regs into the corresponding assignment
848 to the underlying register (fp in this case) that makes
849 the original assignment true.
850 So the following insn will actually be decrementing fp by
851 STARTING_FRAME_OFFSET. */
852 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
854 /* Restoring the frame pointer also modifies the hard frame pointer.
855 Mark it used (so that the previous assignment remains live once
856 the frame pointer is eliminated) and clobbered (to represent the
857 implicit update from the assignment). */
858 emit_use (hard_frame_pointer_rtx);
859 emit_clobber (hard_frame_pointer_rtx);
862 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
864 #ifdef ELIMINABLE_REGS
865 /* If the argument pointer can be eliminated in favor of the
866 frame pointer, we don't need to restore it. We assume here
867 that if such an elimination is present, it can always be used.
868 This is the case on all known machines; if we don't make this
869 assumption, we do unnecessary saving on many machines. */
870 size_t i;
871 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
873 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
874 if (elim_regs[i].from == ARG_POINTER_REGNUM
875 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
876 break;
878 if (i == ARRAY_SIZE (elim_regs))
879 #endif
881 /* Now restore our arg pointer from the address at which it
882 was saved in our stack frame. */
883 emit_move_insn (crtl->args.internal_arg_pointer,
884 copy_to_reg (get_arg_pointer_save_area ()));
888 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
889 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
890 else if (targetm.have_nonlocal_goto_receiver ())
891 emit_insn (targetm.gen_nonlocal_goto_receiver ());
892 else
893 { /* Nothing */ }
895 /* We must not allow the code we just generated to be reordered by
896 scheduling. Specifically, the update of the frame pointer must
897 happen immediately, not later. */
898 emit_insn (gen_blockage ());
901 /* __builtin_longjmp is passed a pointer to an array of five words (not
902 all will be used on all machines). It operates similarly to the C
903 library function of the same name, but is more efficient. Much of
904 the code below is copied from the handling of non-local gotos. */
906 static void
907 expand_builtin_longjmp (rtx buf_addr, rtx value)
909 rtx fp, lab, stack;
910 rtx_insn *insn, *last;
911 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
913 /* DRAP is needed for stack realign if longjmp is expanded to current
914 function */
915 if (SUPPORTS_STACK_ALIGNMENT)
916 crtl->need_drap = true;
918 if (setjmp_alias_set == -1)
919 setjmp_alias_set = new_alias_set ();
921 buf_addr = convert_memory_address (Pmode, buf_addr);
923 buf_addr = force_reg (Pmode, buf_addr);
925 /* We require that the user must pass a second argument of 1, because
926 that is what builtin_setjmp will return. */
927 gcc_assert (value == const1_rtx);
929 last = get_last_insn ();
930 if (targetm.have_builtin_longjmp ())
931 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
932 else
934 fp = gen_rtx_MEM (Pmode, buf_addr);
935 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
936 GET_MODE_SIZE (Pmode)));
938 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
939 2 * GET_MODE_SIZE (Pmode)));
940 set_mem_alias_set (fp, setjmp_alias_set);
941 set_mem_alias_set (lab, setjmp_alias_set);
942 set_mem_alias_set (stack, setjmp_alias_set);
944 /* Pick up FP, label, and SP from the block and jump. This code is
945 from expand_goto in stmt.c; see there for detailed comments. */
946 if (targetm.have_nonlocal_goto ())
947 /* We have to pass a value to the nonlocal_goto pattern that will
948 get copied into the static_chain pointer, but it does not matter
949 what that value is, because builtin_setjmp does not use it. */
950 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
951 else
953 lab = copy_to_reg (lab);
955 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
956 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
958 emit_move_insn (hard_frame_pointer_rtx, fp);
959 emit_stack_restore (SAVE_NONLOCAL, stack);
961 emit_use (hard_frame_pointer_rtx);
962 emit_use (stack_pointer_rtx);
963 emit_indirect_jump (lab);
967 /* Search backwards and mark the jump insn as a non-local goto.
968 Note that this precludes the use of __builtin_longjmp to a
969 __builtin_setjmp target in the same function. However, we've
970 already cautioned the user that these functions are for
971 internal exception handling use only. */
972 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
974 gcc_assert (insn != last);
976 if (JUMP_P (insn))
978 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
979 break;
981 else if (CALL_P (insn))
982 break;
986 static inline bool
987 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
989 return (iter->i < iter->n);
992 /* This function validates the types of a function call argument list
993 against a specified list of tree_codes. If the last specifier is a 0,
994 that represents an ellipses, otherwise the last specifier must be a
995 VOID_TYPE. */
997 static bool
998 validate_arglist (const_tree callexpr, ...)
1000 enum tree_code code;
1001 bool res = 0;
1002 va_list ap;
1003 const_call_expr_arg_iterator iter;
1004 const_tree arg;
1006 va_start (ap, callexpr);
1007 init_const_call_expr_arg_iterator (callexpr, &iter);
1011 code = (enum tree_code) va_arg (ap, int);
1012 switch (code)
1014 case 0:
1015 /* This signifies an ellipses, any further arguments are all ok. */
1016 res = true;
1017 goto end;
1018 case VOID_TYPE:
1019 /* This signifies an endlink, if no arguments remain, return
1020 true, otherwise return false. */
1021 res = !more_const_call_expr_args_p (&iter);
1022 goto end;
1023 default:
1024 /* If no parameters remain or the parameter's code does not
1025 match the specified code, return false. Otherwise continue
1026 checking any remaining arguments. */
1027 arg = next_const_call_expr_arg (&iter);
1028 if (!validate_arg (arg, code))
1029 goto end;
1030 break;
1033 while (1);
1035 /* We need gotos here since we can only have one VA_CLOSE in a
1036 function. */
1037 end: ;
1038 va_end (ap);
1040 return res;
1043 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1044 and the address of the save area. */
1046 static rtx
1047 expand_builtin_nonlocal_goto (tree exp)
1049 tree t_label, t_save_area;
1050 rtx r_label, r_save_area, r_fp, r_sp;
1051 rtx_insn *insn;
1053 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1054 return NULL_RTX;
1056 t_label = CALL_EXPR_ARG (exp, 0);
1057 t_save_area = CALL_EXPR_ARG (exp, 1);
1059 r_label = expand_normal (t_label);
1060 r_label = convert_memory_address (Pmode, r_label);
1061 r_save_area = expand_normal (t_save_area);
1062 r_save_area = convert_memory_address (Pmode, r_save_area);
1063 /* Copy the address of the save location to a register just in case it was
1064 based on the frame pointer. */
1065 r_save_area = copy_to_reg (r_save_area);
1066 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1067 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1068 plus_constant (Pmode, r_save_area,
1069 GET_MODE_SIZE (Pmode)));
1071 crtl->has_nonlocal_goto = 1;
1073 /* ??? We no longer need to pass the static chain value, afaik. */
1074 if (targetm.have_nonlocal_goto ())
1075 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1076 else
1078 r_label = copy_to_reg (r_label);
1080 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1081 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1083 /* Restore frame pointer for containing function. */
1084 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1085 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1087 /* USE of hard_frame_pointer_rtx added for consistency;
1088 not clear if really needed. */
1089 emit_use (hard_frame_pointer_rtx);
1090 emit_use (stack_pointer_rtx);
1092 /* If the architecture is using a GP register, we must
1093 conservatively assume that the target function makes use of it.
1094 The prologue of functions with nonlocal gotos must therefore
1095 initialize the GP register to the appropriate value, and we
1096 must then make sure that this value is live at the point
1097 of the jump. (Note that this doesn't necessarily apply
1098 to targets with a nonlocal_goto pattern; they are free
1099 to implement it in their own way. Note also that this is
1100 a no-op if the GP register is a global invariant.) */
1101 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1102 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1103 emit_use (pic_offset_table_rtx);
1105 emit_indirect_jump (r_label);
1108 /* Search backwards to the jump insn and mark it as a
1109 non-local goto. */
1110 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1112 if (JUMP_P (insn))
1114 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1115 break;
1117 else if (CALL_P (insn))
1118 break;
1121 return const0_rtx;
1124 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1125 (not all will be used on all machines) that was passed to __builtin_setjmp.
1126 It updates the stack pointer in that block to the current value. This is
1127 also called directly by the SJLJ exception handling code. */
1129 void
1130 expand_builtin_update_setjmp_buf (rtx buf_addr)
1132 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1133 rtx stack_save
1134 = gen_rtx_MEM (sa_mode,
1135 memory_address
1136 (sa_mode,
1137 plus_constant (Pmode, buf_addr,
1138 2 * GET_MODE_SIZE (Pmode))));
1140 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1143 /* Expand a call to __builtin_prefetch. For a target that does not support
1144 data prefetch, evaluate the memory address argument in case it has side
1145 effects. */
1147 static void
1148 expand_builtin_prefetch (tree exp)
1150 tree arg0, arg1, arg2;
1151 int nargs;
1152 rtx op0, op1, op2;
1154 if (!validate_arglist (exp, POINTER_TYPE, 0))
1155 return;
1157 arg0 = CALL_EXPR_ARG (exp, 0);
1159 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1160 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1161 locality). */
1162 nargs = call_expr_nargs (exp);
1163 if (nargs > 1)
1164 arg1 = CALL_EXPR_ARG (exp, 1);
1165 else
1166 arg1 = integer_zero_node;
1167 if (nargs > 2)
1168 arg2 = CALL_EXPR_ARG (exp, 2);
1169 else
1170 arg2 = integer_three_node;
1172 /* Argument 0 is an address. */
1173 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1175 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1176 if (TREE_CODE (arg1) != INTEGER_CST)
1178 error ("second argument to %<__builtin_prefetch%> must be a constant");
1179 arg1 = integer_zero_node;
1181 op1 = expand_normal (arg1);
1182 /* Argument 1 must be either zero or one. */
1183 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1185 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1186 " using zero");
1187 op1 = const0_rtx;
1190 /* Argument 2 (locality) must be a compile-time constant int. */
1191 if (TREE_CODE (arg2) != INTEGER_CST)
1193 error ("third argument to %<__builtin_prefetch%> must be a constant");
1194 arg2 = integer_zero_node;
1196 op2 = expand_normal (arg2);
1197 /* Argument 2 must be 0, 1, 2, or 3. */
1198 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1200 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1201 op2 = const0_rtx;
1204 if (targetm.have_prefetch ())
1206 struct expand_operand ops[3];
1208 create_address_operand (&ops[0], op0);
1209 create_integer_operand (&ops[1], INTVAL (op1));
1210 create_integer_operand (&ops[2], INTVAL (op2));
1211 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1212 return;
1215 /* Don't do anything with direct references to volatile memory, but
1216 generate code to handle other side effects. */
1217 if (!MEM_P (op0) && side_effects_p (op0))
1218 emit_insn (op0);
1221 /* Get a MEM rtx for expression EXP which is the address of an operand
1222 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1223 the maximum length of the block of memory that might be accessed or
1224 NULL if unknown. */
1226 static rtx
1227 get_memory_rtx (tree exp, tree len)
1229 tree orig_exp = exp;
1230 rtx addr, mem;
1232 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1233 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1234 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1235 exp = TREE_OPERAND (exp, 0);
1237 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1238 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1240 /* Get an expression we can use to find the attributes to assign to MEM.
1241 First remove any nops. */
1242 while (CONVERT_EXPR_P (exp)
1243 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1244 exp = TREE_OPERAND (exp, 0);
1246 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1247 (as builtin stringops may alias with anything). */
1248 exp = fold_build2 (MEM_REF,
1249 build_array_type (char_type_node,
1250 build_range_type (sizetype,
1251 size_one_node, len)),
1252 exp, build_int_cst (ptr_type_node, 0));
1254 /* If the MEM_REF has no acceptable address, try to get the base object
1255 from the original address we got, and build an all-aliasing
1256 unknown-sized access to that one. */
1257 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1258 set_mem_attributes (mem, exp, 0);
1259 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1260 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1261 0))))
1263 exp = build_fold_addr_expr (exp);
1264 exp = fold_build2 (MEM_REF,
1265 build_array_type (char_type_node,
1266 build_range_type (sizetype,
1267 size_zero_node,
1268 NULL)),
1269 exp, build_int_cst (ptr_type_node, 0));
1270 set_mem_attributes (mem, exp, 0);
1272 set_mem_alias_set (mem, 0);
1273 return mem;
1276 /* Built-in functions to perform an untyped call and return. */
1278 #define apply_args_mode \
1279 (this_target_builtins->x_apply_args_mode)
1280 #define apply_result_mode \
1281 (this_target_builtins->x_apply_result_mode)
1283 /* Return the size required for the block returned by __builtin_apply_args,
1284 and initialize apply_args_mode. */
1286 static int
1287 apply_args_size (void)
1289 static int size = -1;
1290 int align;
1291 unsigned int regno;
1292 machine_mode mode;
1294 /* The values computed by this function never change. */
1295 if (size < 0)
1297 /* The first value is the incoming arg-pointer. */
1298 size = GET_MODE_SIZE (Pmode);
1300 /* The second value is the structure value address unless this is
1301 passed as an "invisible" first argument. */
1302 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1303 size += GET_MODE_SIZE (Pmode);
1305 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1306 if (FUNCTION_ARG_REGNO_P (regno))
1308 mode = targetm.calls.get_raw_arg_mode (regno);
1310 gcc_assert (mode != VOIDmode);
1312 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1313 if (size % align != 0)
1314 size = CEIL (size, align) * align;
1315 size += GET_MODE_SIZE (mode);
1316 apply_args_mode[regno] = mode;
1318 else
1320 apply_args_mode[regno] = VOIDmode;
1323 return size;
1326 /* Return the size required for the block returned by __builtin_apply,
1327 and initialize apply_result_mode. */
1329 static int
1330 apply_result_size (void)
1332 static int size = -1;
1333 int align, regno;
1334 machine_mode mode;
1336 /* The values computed by this function never change. */
1337 if (size < 0)
1339 size = 0;
1341 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1342 if (targetm.calls.function_value_regno_p (regno))
1344 mode = targetm.calls.get_raw_result_mode (regno);
1346 gcc_assert (mode != VOIDmode);
1348 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1349 if (size % align != 0)
1350 size = CEIL (size, align) * align;
1351 size += GET_MODE_SIZE (mode);
1352 apply_result_mode[regno] = mode;
1354 else
1355 apply_result_mode[regno] = VOIDmode;
1357 /* Allow targets that use untyped_call and untyped_return to override
1358 the size so that machine-specific information can be stored here. */
1359 #ifdef APPLY_RESULT_SIZE
1360 size = APPLY_RESULT_SIZE;
1361 #endif
1363 return size;
1366 /* Create a vector describing the result block RESULT. If SAVEP is true,
1367 the result block is used to save the values; otherwise it is used to
1368 restore the values. */
1370 static rtx
1371 result_vector (int savep, rtx result)
1373 int regno, size, align, nelts;
1374 machine_mode mode;
1375 rtx reg, mem;
1376 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1378 size = nelts = 0;
1379 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1380 if ((mode = apply_result_mode[regno]) != VOIDmode)
1382 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1383 if (size % align != 0)
1384 size = CEIL (size, align) * align;
1385 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1386 mem = adjust_address (result, mode, size);
1387 savevec[nelts++] = (savep
1388 ? gen_rtx_SET (mem, reg)
1389 : gen_rtx_SET (reg, mem));
1390 size += GET_MODE_SIZE (mode);
1392 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1395 /* Save the state required to perform an untyped call with the same
1396 arguments as were passed to the current function. */
1398 static rtx
1399 expand_builtin_apply_args_1 (void)
1401 rtx registers, tem;
1402 int size, align, regno;
1403 machine_mode mode;
1404 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1406 /* Create a block where the arg-pointer, structure value address,
1407 and argument registers can be saved. */
1408 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1410 /* Walk past the arg-pointer and structure value address. */
1411 size = GET_MODE_SIZE (Pmode);
1412 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1413 size += GET_MODE_SIZE (Pmode);
1415 /* Save each register used in calling a function to the block. */
1416 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1417 if ((mode = apply_args_mode[regno]) != VOIDmode)
1419 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1420 if (size % align != 0)
1421 size = CEIL (size, align) * align;
1423 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1425 emit_move_insn (adjust_address (registers, mode, size), tem);
1426 size += GET_MODE_SIZE (mode);
1429 /* Save the arg pointer to the block. */
1430 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1431 /* We need the pointer as the caller actually passed them to us, not
1432 as we might have pretended they were passed. Make sure it's a valid
1433 operand, as emit_move_insn isn't expected to handle a PLUS. */
1434 if (STACK_GROWS_DOWNWARD)
1436 = force_operand (plus_constant (Pmode, tem,
1437 crtl->args.pretend_args_size),
1438 NULL_RTX);
1439 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1441 size = GET_MODE_SIZE (Pmode);
1443 /* Save the structure value address unless this is passed as an
1444 "invisible" first argument. */
1445 if (struct_incoming_value)
1447 emit_move_insn (adjust_address (registers, Pmode, size),
1448 copy_to_reg (struct_incoming_value));
1449 size += GET_MODE_SIZE (Pmode);
1452 /* Return the address of the block. */
1453 return copy_addr_to_reg (XEXP (registers, 0));
1456 /* __builtin_apply_args returns block of memory allocated on
1457 the stack into which is stored the arg pointer, structure
1458 value address, static chain, and all the registers that might
1459 possibly be used in performing a function call. The code is
1460 moved to the start of the function so the incoming values are
1461 saved. */
1463 static rtx
1464 expand_builtin_apply_args (void)
1466 /* Don't do __builtin_apply_args more than once in a function.
1467 Save the result of the first call and reuse it. */
1468 if (apply_args_value != 0)
1469 return apply_args_value;
1471 /* When this function is called, it means that registers must be
1472 saved on entry to this function. So we migrate the
1473 call to the first insn of this function. */
1474 rtx temp;
1476 start_sequence ();
1477 temp = expand_builtin_apply_args_1 ();
1478 rtx_insn *seq = get_insns ();
1479 end_sequence ();
1481 apply_args_value = temp;
1483 /* Put the insns after the NOTE that starts the function.
1484 If this is inside a start_sequence, make the outer-level insn
1485 chain current, so the code is placed at the start of the
1486 function. If internal_arg_pointer is a non-virtual pseudo,
1487 it needs to be placed after the function that initializes
1488 that pseudo. */
1489 push_topmost_sequence ();
1490 if (REG_P (crtl->args.internal_arg_pointer)
1491 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1492 emit_insn_before (seq, parm_birth_insn);
1493 else
1494 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1495 pop_topmost_sequence ();
1496 return temp;
1500 /* Perform an untyped call and save the state required to perform an
1501 untyped return of whatever value was returned by the given function. */
1503 static rtx
1504 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1506 int size, align, regno;
1507 machine_mode mode;
1508 rtx incoming_args, result, reg, dest, src;
1509 rtx_call_insn *call_insn;
1510 rtx old_stack_level = 0;
1511 rtx call_fusage = 0;
1512 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1514 arguments = convert_memory_address (Pmode, arguments);
1516 /* Create a block where the return registers can be saved. */
1517 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1519 /* Fetch the arg pointer from the ARGUMENTS block. */
1520 incoming_args = gen_reg_rtx (Pmode);
1521 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1522 if (!STACK_GROWS_DOWNWARD)
1523 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1524 incoming_args, 0, OPTAB_LIB_WIDEN);
1526 /* Push a new argument block and copy the arguments. Do not allow
1527 the (potential) memcpy call below to interfere with our stack
1528 manipulations. */
1529 do_pending_stack_adjust ();
1530 NO_DEFER_POP;
1532 /* Save the stack with nonlocal if available. */
1533 if (targetm.have_save_stack_nonlocal ())
1534 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1535 else
1536 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1538 /* Allocate a block of memory onto the stack and copy the memory
1539 arguments to the outgoing arguments address. We can pass TRUE
1540 as the 4th argument because we just saved the stack pointer
1541 and will restore it right after the call. */
1542 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1544 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1545 may have already set current_function_calls_alloca to true.
1546 current_function_calls_alloca won't be set if argsize is zero,
1547 so we have to guarantee need_drap is true here. */
1548 if (SUPPORTS_STACK_ALIGNMENT)
1549 crtl->need_drap = true;
1551 dest = virtual_outgoing_args_rtx;
1552 if (!STACK_GROWS_DOWNWARD)
1554 if (CONST_INT_P (argsize))
1555 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1556 else
1557 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1559 dest = gen_rtx_MEM (BLKmode, dest);
1560 set_mem_align (dest, PARM_BOUNDARY);
1561 src = gen_rtx_MEM (BLKmode, incoming_args);
1562 set_mem_align (src, PARM_BOUNDARY);
1563 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1565 /* Refer to the argument block. */
1566 apply_args_size ();
1567 arguments = gen_rtx_MEM (BLKmode, arguments);
1568 set_mem_align (arguments, PARM_BOUNDARY);
1570 /* Walk past the arg-pointer and structure value address. */
1571 size = GET_MODE_SIZE (Pmode);
1572 if (struct_value)
1573 size += GET_MODE_SIZE (Pmode);
1575 /* Restore each of the registers previously saved. Make USE insns
1576 for each of these registers for use in making the call. */
1577 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1578 if ((mode = apply_args_mode[regno]) != VOIDmode)
1580 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1581 if (size % align != 0)
1582 size = CEIL (size, align) * align;
1583 reg = gen_rtx_REG (mode, regno);
1584 emit_move_insn (reg, adjust_address (arguments, mode, size));
1585 use_reg (&call_fusage, reg);
1586 size += GET_MODE_SIZE (mode);
1589 /* Restore the structure value address unless this is passed as an
1590 "invisible" first argument. */
1591 size = GET_MODE_SIZE (Pmode);
1592 if (struct_value)
1594 rtx value = gen_reg_rtx (Pmode);
1595 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1596 emit_move_insn (struct_value, value);
1597 if (REG_P (struct_value))
1598 use_reg (&call_fusage, struct_value);
1599 size += GET_MODE_SIZE (Pmode);
1602 /* All arguments and registers used for the call are set up by now! */
1603 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1605 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1606 and we don't want to load it into a register as an optimization,
1607 because prepare_call_address already did it if it should be done. */
1608 if (GET_CODE (function) != SYMBOL_REF)
1609 function = memory_address (FUNCTION_MODE, function);
1611 /* Generate the actual call instruction and save the return value. */
1612 if (targetm.have_untyped_call ())
1614 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1615 emit_call_insn (targetm.gen_untyped_call (mem, result,
1616 result_vector (1, result)));
1618 else if (targetm.have_call_value ())
1620 rtx valreg = 0;
1622 /* Locate the unique return register. It is not possible to
1623 express a call that sets more than one return register using
1624 call_value; use untyped_call for that. In fact, untyped_call
1625 only needs to save the return registers in the given block. */
1626 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1627 if ((mode = apply_result_mode[regno]) != VOIDmode)
1629 gcc_assert (!valreg); /* have_untyped_call required. */
1631 valreg = gen_rtx_REG (mode, regno);
1634 emit_insn (targetm.gen_call_value (valreg,
1635 gen_rtx_MEM (FUNCTION_MODE, function),
1636 const0_rtx, NULL_RTX, const0_rtx));
1638 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1640 else
1641 gcc_unreachable ();
1643 /* Find the CALL insn we just emitted, and attach the register usage
1644 information. */
1645 call_insn = last_call_insn ();
1646 add_function_usage_to (call_insn, call_fusage);
1648 /* Restore the stack. */
1649 if (targetm.have_save_stack_nonlocal ())
1650 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1651 else
1652 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1653 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1655 OK_DEFER_POP;
1657 /* Return the address of the result block. */
1658 result = copy_addr_to_reg (XEXP (result, 0));
1659 return convert_memory_address (ptr_mode, result);
1662 /* Perform an untyped return. */
1664 static void
1665 expand_builtin_return (rtx result)
1667 int size, align, regno;
1668 machine_mode mode;
1669 rtx reg;
1670 rtx_insn *call_fusage = 0;
1672 result = convert_memory_address (Pmode, result);
1674 apply_result_size ();
1675 result = gen_rtx_MEM (BLKmode, result);
1677 if (targetm.have_untyped_return ())
1679 rtx vector = result_vector (0, result);
1680 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1681 emit_barrier ();
1682 return;
1685 /* Restore the return value and note that each value is used. */
1686 size = 0;
1687 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1688 if ((mode = apply_result_mode[regno]) != VOIDmode)
1690 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1691 if (size % align != 0)
1692 size = CEIL (size, align) * align;
1693 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1694 emit_move_insn (reg, adjust_address (result, mode, size));
1696 push_to_sequence (call_fusage);
1697 emit_use (reg);
1698 call_fusage = get_insns ();
1699 end_sequence ();
1700 size += GET_MODE_SIZE (mode);
1703 /* Put the USE insns before the return. */
1704 emit_insn (call_fusage);
1706 /* Return whatever values was restored by jumping directly to the end
1707 of the function. */
1708 expand_naked_return ();
1711 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1713 static enum type_class
1714 type_to_class (tree type)
1716 switch (TREE_CODE (type))
1718 case VOID_TYPE: return void_type_class;
1719 case INTEGER_TYPE: return integer_type_class;
1720 case ENUMERAL_TYPE: return enumeral_type_class;
1721 case BOOLEAN_TYPE: return boolean_type_class;
1722 case POINTER_TYPE: return pointer_type_class;
1723 case REFERENCE_TYPE: return reference_type_class;
1724 case OFFSET_TYPE: return offset_type_class;
1725 case REAL_TYPE: return real_type_class;
1726 case COMPLEX_TYPE: return complex_type_class;
1727 case FUNCTION_TYPE: return function_type_class;
1728 case METHOD_TYPE: return method_type_class;
1729 case RECORD_TYPE: return record_type_class;
1730 case UNION_TYPE:
1731 case QUAL_UNION_TYPE: return union_type_class;
1732 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1733 ? string_type_class : array_type_class);
1734 case LANG_TYPE: return lang_type_class;
1735 default: return no_type_class;
1739 /* Expand a call EXP to __builtin_classify_type. */
1741 static rtx
1742 expand_builtin_classify_type (tree exp)
1744 if (call_expr_nargs (exp))
1745 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1746 return GEN_INT (no_type_class);
1749 /* This helper macro, meant to be used in mathfn_built_in below,
1750 determines which among a set of three builtin math functions is
1751 appropriate for a given type mode. The `F' and `L' cases are
1752 automatically generated from the `double' case. */
1753 #define CASE_MATHFN(MATHFN) \
1754 CASE_CFN_##MATHFN: \
1755 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1756 fcodel = BUILT_IN_##MATHFN##L ; break;
1757 /* Similar to above, but appends _R after any F/L suffix. */
1758 #define CASE_MATHFN_REENT(MATHFN) \
1759 case CFN_BUILT_IN_##MATHFN##_R: \
1760 case CFN_BUILT_IN_##MATHFN##F_R: \
1761 case CFN_BUILT_IN_##MATHFN##L_R: \
1762 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1763 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1765 /* Return a function equivalent to FN but operating on floating-point
1766 values of type TYPE, or END_BUILTINS if no such function exists.
1767 This is purely an operation on function codes; it does not guarantee
1768 that the target actually has an implementation of the function. */
1770 static built_in_function
1771 mathfn_built_in_2 (tree type, combined_fn fn)
1773 built_in_function fcode, fcodef, fcodel;
1775 switch (fn)
1777 CASE_MATHFN (ACOS)
1778 CASE_MATHFN (ACOSH)
1779 CASE_MATHFN (ASIN)
1780 CASE_MATHFN (ASINH)
1781 CASE_MATHFN (ATAN)
1782 CASE_MATHFN (ATAN2)
1783 CASE_MATHFN (ATANH)
1784 CASE_MATHFN (CBRT)
1785 CASE_MATHFN (CEIL)
1786 CASE_MATHFN (CEXPI)
1787 CASE_MATHFN (COPYSIGN)
1788 CASE_MATHFN (COS)
1789 CASE_MATHFN (COSH)
1790 CASE_MATHFN (DREM)
1791 CASE_MATHFN (ERF)
1792 CASE_MATHFN (ERFC)
1793 CASE_MATHFN (EXP)
1794 CASE_MATHFN (EXP10)
1795 CASE_MATHFN (EXP2)
1796 CASE_MATHFN (EXPM1)
1797 CASE_MATHFN (FABS)
1798 CASE_MATHFN (FDIM)
1799 CASE_MATHFN (FLOOR)
1800 CASE_MATHFN (FMA)
1801 CASE_MATHFN (FMAX)
1802 CASE_MATHFN (FMIN)
1803 CASE_MATHFN (FMOD)
1804 CASE_MATHFN (FREXP)
1805 CASE_MATHFN (GAMMA)
1806 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1807 CASE_MATHFN (HUGE_VAL)
1808 CASE_MATHFN (HYPOT)
1809 CASE_MATHFN (ILOGB)
1810 CASE_MATHFN (ICEIL)
1811 CASE_MATHFN (IFLOOR)
1812 CASE_MATHFN (INF)
1813 CASE_MATHFN (IRINT)
1814 CASE_MATHFN (IROUND)
1815 CASE_MATHFN (ISINF)
1816 CASE_MATHFN (J0)
1817 CASE_MATHFN (J1)
1818 CASE_MATHFN (JN)
1819 CASE_MATHFN (LCEIL)
1820 CASE_MATHFN (LDEXP)
1821 CASE_MATHFN (LFLOOR)
1822 CASE_MATHFN (LGAMMA)
1823 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1824 CASE_MATHFN (LLCEIL)
1825 CASE_MATHFN (LLFLOOR)
1826 CASE_MATHFN (LLRINT)
1827 CASE_MATHFN (LLROUND)
1828 CASE_MATHFN (LOG)
1829 CASE_MATHFN (LOG10)
1830 CASE_MATHFN (LOG1P)
1831 CASE_MATHFN (LOG2)
1832 CASE_MATHFN (LOGB)
1833 CASE_MATHFN (LRINT)
1834 CASE_MATHFN (LROUND)
1835 CASE_MATHFN (MODF)
1836 CASE_MATHFN (NAN)
1837 CASE_MATHFN (NANS)
1838 CASE_MATHFN (NEARBYINT)
1839 CASE_MATHFN (NEXTAFTER)
1840 CASE_MATHFN (NEXTTOWARD)
1841 CASE_MATHFN (POW)
1842 CASE_MATHFN (POWI)
1843 CASE_MATHFN (POW10)
1844 CASE_MATHFN (REMAINDER)
1845 CASE_MATHFN (REMQUO)
1846 CASE_MATHFN (RINT)
1847 CASE_MATHFN (ROUND)
1848 CASE_MATHFN (SCALB)
1849 CASE_MATHFN (SCALBLN)
1850 CASE_MATHFN (SCALBN)
1851 CASE_MATHFN (SIGNBIT)
1852 CASE_MATHFN (SIGNIFICAND)
1853 CASE_MATHFN (SIN)
1854 CASE_MATHFN (SINCOS)
1855 CASE_MATHFN (SINH)
1856 CASE_MATHFN (SQRT)
1857 CASE_MATHFN (TAN)
1858 CASE_MATHFN (TANH)
1859 CASE_MATHFN (TGAMMA)
1860 CASE_MATHFN (TRUNC)
1861 CASE_MATHFN (Y0)
1862 CASE_MATHFN (Y1)
1863 CASE_MATHFN (YN)
1865 default:
1866 return END_BUILTINS;
1869 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1870 return fcode;
1871 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1872 return fcodef;
1873 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1874 return fcodel;
1875 else
1876 return END_BUILTINS;
1879 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1880 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1881 otherwise use the explicit declaration. If we can't do the conversion,
1882 return null. */
1884 static tree
1885 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
1887 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1888 if (fcode2 == END_BUILTINS)
1889 return NULL_TREE;
1891 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1892 return NULL_TREE;
1894 return builtin_decl_explicit (fcode2);
1897 /* Like mathfn_built_in_1, but always use the implicit array. */
1899 tree
1900 mathfn_built_in (tree type, combined_fn fn)
1902 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1905 /* Like mathfn_built_in_1, but take a built_in_function and
1906 always use the implicit array. */
1908 tree
1909 mathfn_built_in (tree type, enum built_in_function fn)
1911 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
1914 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
1915 return its code, otherwise return IFN_LAST. Note that this function
1916 only tests whether the function is defined in internals.def, not whether
1917 it is actually available on the target. */
1919 internal_fn
1920 associated_internal_fn (tree fndecl)
1922 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
1923 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
1924 switch (DECL_FUNCTION_CODE (fndecl))
1926 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
1927 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1928 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
1929 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1930 #include "internal-fn.def"
1932 CASE_FLT_FN (BUILT_IN_POW10):
1933 return IFN_EXP10;
1935 CASE_FLT_FN (BUILT_IN_DREM):
1936 return IFN_REMAINDER;
1938 CASE_FLT_FN (BUILT_IN_SCALBN):
1939 CASE_FLT_FN (BUILT_IN_SCALBLN):
1940 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
1941 return IFN_LDEXP;
1942 return IFN_LAST;
1944 default:
1945 return IFN_LAST;
1949 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
1950 on the current target by a call to an internal function, return the
1951 code of that internal function, otherwise return IFN_LAST. The caller
1952 is responsible for ensuring that any side-effects of the built-in
1953 call are dealt with correctly. E.g. if CALL sets errno, the caller
1954 must decide that the errno result isn't needed or make it available
1955 in some other way. */
1957 internal_fn
1958 replacement_internal_fn (gcall *call)
1960 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1962 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
1963 if (ifn != IFN_LAST)
1965 tree_pair types = direct_internal_fn_types (ifn, call);
1966 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
1967 if (direct_internal_fn_supported_p (ifn, types, opt_type))
1968 return ifn;
1971 return IFN_LAST;
1974 /* Expand a call to the builtin trinary math functions (fma).
1975 Return NULL_RTX if a normal call should be emitted rather than expanding the
1976 function in-line. EXP is the expression that is a call to the builtin
1977 function; if convenient, the result should be placed in TARGET.
1978 SUBTARGET may be used as the target for computing one of EXP's
1979 operands. */
1981 static rtx
1982 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
1984 optab builtin_optab;
1985 rtx op0, op1, op2, result;
1986 rtx_insn *insns;
1987 tree fndecl = get_callee_fndecl (exp);
1988 tree arg0, arg1, arg2;
1989 machine_mode mode;
1991 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
1992 return NULL_RTX;
1994 arg0 = CALL_EXPR_ARG (exp, 0);
1995 arg1 = CALL_EXPR_ARG (exp, 1);
1996 arg2 = CALL_EXPR_ARG (exp, 2);
1998 switch (DECL_FUNCTION_CODE (fndecl))
2000 CASE_FLT_FN (BUILT_IN_FMA):
2001 builtin_optab = fma_optab; break;
2002 default:
2003 gcc_unreachable ();
2006 /* Make a suitable register to place result in. */
2007 mode = TYPE_MODE (TREE_TYPE (exp));
2009 /* Before working hard, check whether the instruction is available. */
2010 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2011 return NULL_RTX;
2013 result = gen_reg_rtx (mode);
2015 /* Always stabilize the argument list. */
2016 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2017 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2018 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2020 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2021 op1 = expand_normal (arg1);
2022 op2 = expand_normal (arg2);
2024 start_sequence ();
2026 /* Compute into RESULT.
2027 Set RESULT to wherever the result comes back. */
2028 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2029 result, 0);
2031 /* If we were unable to expand via the builtin, stop the sequence
2032 (without outputting the insns) and call to the library function
2033 with the stabilized argument list. */
2034 if (result == 0)
2036 end_sequence ();
2037 return expand_call (exp, target, target == const0_rtx);
2040 /* Output the entire sequence. */
2041 insns = get_insns ();
2042 end_sequence ();
2043 emit_insn (insns);
2045 return result;
2048 /* Expand a call to the builtin sin and cos math functions.
2049 Return NULL_RTX if a normal call should be emitted rather than expanding the
2050 function in-line. EXP is the expression that is a call to the builtin
2051 function; if convenient, the result should be placed in TARGET.
2052 SUBTARGET may be used as the target for computing one of EXP's
2053 operands. */
2055 static rtx
2056 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2058 optab builtin_optab;
2059 rtx op0;
2060 rtx_insn *insns;
2061 tree fndecl = get_callee_fndecl (exp);
2062 machine_mode mode;
2063 tree arg;
2065 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2066 return NULL_RTX;
2068 arg = CALL_EXPR_ARG (exp, 0);
2070 switch (DECL_FUNCTION_CODE (fndecl))
2072 CASE_FLT_FN (BUILT_IN_SIN):
2073 CASE_FLT_FN (BUILT_IN_COS):
2074 builtin_optab = sincos_optab; break;
2075 default:
2076 gcc_unreachable ();
2079 /* Make a suitable register to place result in. */
2080 mode = TYPE_MODE (TREE_TYPE (exp));
2082 /* Check if sincos insn is available, otherwise fallback
2083 to sin or cos insn. */
2084 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2085 switch (DECL_FUNCTION_CODE (fndecl))
2087 CASE_FLT_FN (BUILT_IN_SIN):
2088 builtin_optab = sin_optab; break;
2089 CASE_FLT_FN (BUILT_IN_COS):
2090 builtin_optab = cos_optab; break;
2091 default:
2092 gcc_unreachable ();
2095 /* Before working hard, check whether the instruction is available. */
2096 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2098 rtx result = gen_reg_rtx (mode);
2100 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2101 need to expand the argument again. This way, we will not perform
2102 side-effects more the once. */
2103 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2105 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2107 start_sequence ();
2109 /* Compute into RESULT.
2110 Set RESULT to wherever the result comes back. */
2111 if (builtin_optab == sincos_optab)
2113 int ok;
2115 switch (DECL_FUNCTION_CODE (fndecl))
2117 CASE_FLT_FN (BUILT_IN_SIN):
2118 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2119 break;
2120 CASE_FLT_FN (BUILT_IN_COS):
2121 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2122 break;
2123 default:
2124 gcc_unreachable ();
2126 gcc_assert (ok);
2128 else
2129 result = expand_unop (mode, builtin_optab, op0, result, 0);
2131 if (result != 0)
2133 /* Output the entire sequence. */
2134 insns = get_insns ();
2135 end_sequence ();
2136 emit_insn (insns);
2137 return result;
2140 /* If we were unable to expand via the builtin, stop the sequence
2141 (without outputting the insns) and call to the library function
2142 with the stabilized argument list. */
2143 end_sequence ();
2146 return expand_call (exp, target, target == const0_rtx);
2149 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2150 return an RTL instruction code that implements the functionality.
2151 If that isn't possible or available return CODE_FOR_nothing. */
2153 static enum insn_code
2154 interclass_mathfn_icode (tree arg, tree fndecl)
2156 bool errno_set = false;
2157 optab builtin_optab = unknown_optab;
2158 machine_mode mode;
2160 switch (DECL_FUNCTION_CODE (fndecl))
2162 CASE_FLT_FN (BUILT_IN_ILOGB):
2163 errno_set = true; builtin_optab = ilogb_optab; break;
2164 CASE_FLT_FN (BUILT_IN_ISINF):
2165 builtin_optab = isinf_optab; break;
2166 case BUILT_IN_ISNORMAL:
2167 case BUILT_IN_ISFINITE:
2168 CASE_FLT_FN (BUILT_IN_FINITE):
2169 case BUILT_IN_FINITED32:
2170 case BUILT_IN_FINITED64:
2171 case BUILT_IN_FINITED128:
2172 case BUILT_IN_ISINFD32:
2173 case BUILT_IN_ISINFD64:
2174 case BUILT_IN_ISINFD128:
2175 /* These builtins have no optabs (yet). */
2176 break;
2177 default:
2178 gcc_unreachable ();
2181 /* There's no easy way to detect the case we need to set EDOM. */
2182 if (flag_errno_math && errno_set)
2183 return CODE_FOR_nothing;
2185 /* Optab mode depends on the mode of the input argument. */
2186 mode = TYPE_MODE (TREE_TYPE (arg));
2188 if (builtin_optab)
2189 return optab_handler (builtin_optab, mode);
2190 return CODE_FOR_nothing;
2193 /* Expand a call to one of the builtin math functions that operate on
2194 floating point argument and output an integer result (ilogb, isinf,
2195 isnan, etc).
2196 Return 0 if a normal call should be emitted rather than expanding the
2197 function in-line. EXP is the expression that is a call to the builtin
2198 function; if convenient, the result should be placed in TARGET. */
2200 static rtx
2201 expand_builtin_interclass_mathfn (tree exp, rtx target)
2203 enum insn_code icode = CODE_FOR_nothing;
2204 rtx op0;
2205 tree fndecl = get_callee_fndecl (exp);
2206 machine_mode mode;
2207 tree arg;
2209 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2210 return NULL_RTX;
2212 arg = CALL_EXPR_ARG (exp, 0);
2213 icode = interclass_mathfn_icode (arg, fndecl);
2214 mode = TYPE_MODE (TREE_TYPE (arg));
2216 if (icode != CODE_FOR_nothing)
2218 struct expand_operand ops[1];
2219 rtx_insn *last = get_last_insn ();
2220 tree orig_arg = arg;
2222 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2223 need to expand the argument again. This way, we will not perform
2224 side-effects more the once. */
2225 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2227 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2229 if (mode != GET_MODE (op0))
2230 op0 = convert_to_mode (mode, op0, 0);
2232 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2233 if (maybe_legitimize_operands (icode, 0, 1, ops)
2234 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2235 return ops[0].value;
2237 delete_insns_since (last);
2238 CALL_EXPR_ARG (exp, 0) = orig_arg;
2241 return NULL_RTX;
2244 /* Expand a call to the builtin sincos math function.
2245 Return NULL_RTX if a normal call should be emitted rather than expanding the
2246 function in-line. EXP is the expression that is a call to the builtin
2247 function. */
2249 static rtx
2250 expand_builtin_sincos (tree exp)
2252 rtx op0, op1, op2, target1, target2;
2253 machine_mode mode;
2254 tree arg, sinp, cosp;
2255 int result;
2256 location_t loc = EXPR_LOCATION (exp);
2257 tree alias_type, alias_off;
2259 if (!validate_arglist (exp, REAL_TYPE,
2260 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2261 return NULL_RTX;
2263 arg = CALL_EXPR_ARG (exp, 0);
2264 sinp = CALL_EXPR_ARG (exp, 1);
2265 cosp = CALL_EXPR_ARG (exp, 2);
2267 /* Make a suitable register to place result in. */
2268 mode = TYPE_MODE (TREE_TYPE (arg));
2270 /* Check if sincos insn is available, otherwise emit the call. */
2271 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2272 return NULL_RTX;
2274 target1 = gen_reg_rtx (mode);
2275 target2 = gen_reg_rtx (mode);
2277 op0 = expand_normal (arg);
2278 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2279 alias_off = build_int_cst (alias_type, 0);
2280 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2281 sinp, alias_off));
2282 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2283 cosp, alias_off));
2285 /* Compute into target1 and target2.
2286 Set TARGET to wherever the result comes back. */
2287 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2288 gcc_assert (result);
2290 /* Move target1 and target2 to the memory locations indicated
2291 by op1 and op2. */
2292 emit_move_insn (op1, target1);
2293 emit_move_insn (op2, target2);
2295 return const0_rtx;
2298 /* Expand a call to the internal cexpi builtin to the sincos math function.
2299 EXP is the expression that is a call to the builtin function; if convenient,
2300 the result should be placed in TARGET. */
2302 static rtx
2303 expand_builtin_cexpi (tree exp, rtx target)
2305 tree fndecl = get_callee_fndecl (exp);
2306 tree arg, type;
2307 machine_mode mode;
2308 rtx op0, op1, op2;
2309 location_t loc = EXPR_LOCATION (exp);
2311 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2312 return NULL_RTX;
2314 arg = CALL_EXPR_ARG (exp, 0);
2315 type = TREE_TYPE (arg);
2316 mode = TYPE_MODE (TREE_TYPE (arg));
2318 /* Try expanding via a sincos optab, fall back to emitting a libcall
2319 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2320 is only generated from sincos, cexp or if we have either of them. */
2321 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2323 op1 = gen_reg_rtx (mode);
2324 op2 = gen_reg_rtx (mode);
2326 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2328 /* Compute into op1 and op2. */
2329 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2331 else if (targetm.libc_has_function (function_sincos))
2333 tree call, fn = NULL_TREE;
2334 tree top1, top2;
2335 rtx op1a, op2a;
2337 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2338 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2339 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2340 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2341 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2342 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2343 else
2344 gcc_unreachable ();
2346 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2347 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2348 op1a = copy_addr_to_reg (XEXP (op1, 0));
2349 op2a = copy_addr_to_reg (XEXP (op2, 0));
2350 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2351 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2353 /* Make sure not to fold the sincos call again. */
2354 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2355 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2356 call, 3, arg, top1, top2));
2358 else
2360 tree call, fn = NULL_TREE, narg;
2361 tree ctype = build_complex_type (type);
2363 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2364 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2365 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2366 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2367 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2368 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2369 else
2370 gcc_unreachable ();
2372 /* If we don't have a decl for cexp create one. This is the
2373 friendliest fallback if the user calls __builtin_cexpi
2374 without full target C99 function support. */
2375 if (fn == NULL_TREE)
2377 tree fntype;
2378 const char *name = NULL;
2380 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2381 name = "cexpf";
2382 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2383 name = "cexp";
2384 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2385 name = "cexpl";
2387 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2388 fn = build_fn_decl (name, fntype);
2391 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2392 build_real (type, dconst0), arg);
2394 /* Make sure not to fold the cexp call again. */
2395 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2396 return expand_expr (build_call_nary (ctype, call, 1, narg),
2397 target, VOIDmode, EXPAND_NORMAL);
2400 /* Now build the proper return type. */
2401 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2402 make_tree (TREE_TYPE (arg), op2),
2403 make_tree (TREE_TYPE (arg), op1)),
2404 target, VOIDmode, EXPAND_NORMAL);
2407 /* Conveniently construct a function call expression. FNDECL names the
2408 function to be called, N is the number of arguments, and the "..."
2409 parameters are the argument expressions. Unlike build_call_exr
2410 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2412 static tree
2413 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2415 va_list ap;
2416 tree fntype = TREE_TYPE (fndecl);
2417 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2419 va_start (ap, n);
2420 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2421 va_end (ap);
2422 SET_EXPR_LOCATION (fn, loc);
2423 return fn;
2426 /* Expand a call to one of the builtin rounding functions gcc defines
2427 as an extension (lfloor and lceil). As these are gcc extensions we
2428 do not need to worry about setting errno to EDOM.
2429 If expanding via optab fails, lower expression to (int)(floor(x)).
2430 EXP is the expression that is a call to the builtin function;
2431 if convenient, the result should be placed in TARGET. */
2433 static rtx
2434 expand_builtin_int_roundingfn (tree exp, rtx target)
2436 convert_optab builtin_optab;
2437 rtx op0, tmp;
2438 rtx_insn *insns;
2439 tree fndecl = get_callee_fndecl (exp);
2440 enum built_in_function fallback_fn;
2441 tree fallback_fndecl;
2442 machine_mode mode;
2443 tree arg;
2445 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2446 gcc_unreachable ();
2448 arg = CALL_EXPR_ARG (exp, 0);
2450 switch (DECL_FUNCTION_CODE (fndecl))
2452 CASE_FLT_FN (BUILT_IN_ICEIL):
2453 CASE_FLT_FN (BUILT_IN_LCEIL):
2454 CASE_FLT_FN (BUILT_IN_LLCEIL):
2455 builtin_optab = lceil_optab;
2456 fallback_fn = BUILT_IN_CEIL;
2457 break;
2459 CASE_FLT_FN (BUILT_IN_IFLOOR):
2460 CASE_FLT_FN (BUILT_IN_LFLOOR):
2461 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2462 builtin_optab = lfloor_optab;
2463 fallback_fn = BUILT_IN_FLOOR;
2464 break;
2466 default:
2467 gcc_unreachable ();
2470 /* Make a suitable register to place result in. */
2471 mode = TYPE_MODE (TREE_TYPE (exp));
2473 target = gen_reg_rtx (mode);
2475 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2476 need to expand the argument again. This way, we will not perform
2477 side-effects more the once. */
2478 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2480 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2482 start_sequence ();
2484 /* Compute into TARGET. */
2485 if (expand_sfix_optab (target, op0, builtin_optab))
2487 /* Output the entire sequence. */
2488 insns = get_insns ();
2489 end_sequence ();
2490 emit_insn (insns);
2491 return target;
2494 /* If we were unable to expand via the builtin, stop the sequence
2495 (without outputting the insns). */
2496 end_sequence ();
2498 /* Fall back to floating point rounding optab. */
2499 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2501 /* For non-C99 targets we may end up without a fallback fndecl here
2502 if the user called __builtin_lfloor directly. In this case emit
2503 a call to the floor/ceil variants nevertheless. This should result
2504 in the best user experience for not full C99 targets. */
2505 if (fallback_fndecl == NULL_TREE)
2507 tree fntype;
2508 const char *name = NULL;
2510 switch (DECL_FUNCTION_CODE (fndecl))
2512 case BUILT_IN_ICEIL:
2513 case BUILT_IN_LCEIL:
2514 case BUILT_IN_LLCEIL:
2515 name = "ceil";
2516 break;
2517 case BUILT_IN_ICEILF:
2518 case BUILT_IN_LCEILF:
2519 case BUILT_IN_LLCEILF:
2520 name = "ceilf";
2521 break;
2522 case BUILT_IN_ICEILL:
2523 case BUILT_IN_LCEILL:
2524 case BUILT_IN_LLCEILL:
2525 name = "ceill";
2526 break;
2527 case BUILT_IN_IFLOOR:
2528 case BUILT_IN_LFLOOR:
2529 case BUILT_IN_LLFLOOR:
2530 name = "floor";
2531 break;
2532 case BUILT_IN_IFLOORF:
2533 case BUILT_IN_LFLOORF:
2534 case BUILT_IN_LLFLOORF:
2535 name = "floorf";
2536 break;
2537 case BUILT_IN_IFLOORL:
2538 case BUILT_IN_LFLOORL:
2539 case BUILT_IN_LLFLOORL:
2540 name = "floorl";
2541 break;
2542 default:
2543 gcc_unreachable ();
2546 fntype = build_function_type_list (TREE_TYPE (arg),
2547 TREE_TYPE (arg), NULL_TREE);
2548 fallback_fndecl = build_fn_decl (name, fntype);
2551 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2553 tmp = expand_normal (exp);
2554 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2556 /* Truncate the result of floating point optab to integer
2557 via expand_fix (). */
2558 target = gen_reg_rtx (mode);
2559 expand_fix (target, tmp, 0);
2561 return target;
2564 /* Expand a call to one of the builtin math functions doing integer
2565 conversion (lrint).
2566 Return 0 if a normal call should be emitted rather than expanding the
2567 function in-line. EXP is the expression that is a call to the builtin
2568 function; if convenient, the result should be placed in TARGET. */
2570 static rtx
2571 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2573 convert_optab builtin_optab;
2574 rtx op0;
2575 rtx_insn *insns;
2576 tree fndecl = get_callee_fndecl (exp);
2577 tree arg;
2578 machine_mode mode;
2579 enum built_in_function fallback_fn = BUILT_IN_NONE;
2581 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2582 gcc_unreachable ();
2584 arg = CALL_EXPR_ARG (exp, 0);
2586 switch (DECL_FUNCTION_CODE (fndecl))
2588 CASE_FLT_FN (BUILT_IN_IRINT):
2589 fallback_fn = BUILT_IN_LRINT;
2590 /* FALLTHRU */
2591 CASE_FLT_FN (BUILT_IN_LRINT):
2592 CASE_FLT_FN (BUILT_IN_LLRINT):
2593 builtin_optab = lrint_optab;
2594 break;
2596 CASE_FLT_FN (BUILT_IN_IROUND):
2597 fallback_fn = BUILT_IN_LROUND;
2598 /* FALLTHRU */
2599 CASE_FLT_FN (BUILT_IN_LROUND):
2600 CASE_FLT_FN (BUILT_IN_LLROUND):
2601 builtin_optab = lround_optab;
2602 break;
2604 default:
2605 gcc_unreachable ();
2608 /* There's no easy way to detect the case we need to set EDOM. */
2609 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2610 return NULL_RTX;
2612 /* Make a suitable register to place result in. */
2613 mode = TYPE_MODE (TREE_TYPE (exp));
2615 /* There's no easy way to detect the case we need to set EDOM. */
2616 if (!flag_errno_math)
2618 rtx result = gen_reg_rtx (mode);
2620 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2621 need to expand the argument again. This way, we will not perform
2622 side-effects more the once. */
2623 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2625 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2627 start_sequence ();
2629 if (expand_sfix_optab (result, op0, builtin_optab))
2631 /* Output the entire sequence. */
2632 insns = get_insns ();
2633 end_sequence ();
2634 emit_insn (insns);
2635 return result;
2638 /* If we were unable to expand via the builtin, stop the sequence
2639 (without outputting the insns) and call to the library function
2640 with the stabilized argument list. */
2641 end_sequence ();
2644 if (fallback_fn != BUILT_IN_NONE)
2646 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2647 targets, (int) round (x) should never be transformed into
2648 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2649 a call to lround in the hope that the target provides at least some
2650 C99 functions. This should result in the best user experience for
2651 not full C99 targets. */
2652 tree fallback_fndecl = mathfn_built_in_1
2653 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2655 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2656 fallback_fndecl, 1, arg);
2658 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2659 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2660 return convert_to_mode (mode, target, 0);
2663 return expand_call (exp, target, target == const0_rtx);
2666 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2667 a normal call should be emitted rather than expanding the function
2668 in-line. EXP is the expression that is a call to the builtin
2669 function; if convenient, the result should be placed in TARGET. */
2671 static rtx
2672 expand_builtin_powi (tree exp, rtx target)
2674 tree arg0, arg1;
2675 rtx op0, op1;
2676 machine_mode mode;
2677 machine_mode mode2;
2679 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2680 return NULL_RTX;
2682 arg0 = CALL_EXPR_ARG (exp, 0);
2683 arg1 = CALL_EXPR_ARG (exp, 1);
2684 mode = TYPE_MODE (TREE_TYPE (exp));
2686 /* Emit a libcall to libgcc. */
2688 /* Mode of the 2nd argument must match that of an int. */
2689 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2691 if (target == NULL_RTX)
2692 target = gen_reg_rtx (mode);
2694 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2695 if (GET_MODE (op0) != mode)
2696 op0 = convert_to_mode (mode, op0, 0);
2697 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2698 if (GET_MODE (op1) != mode2)
2699 op1 = convert_to_mode (mode2, op1, 0);
2701 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2702 target, LCT_CONST, mode, 2,
2703 op0, mode, op1, mode2);
2705 return target;
2708 /* Expand expression EXP which is a call to the strlen builtin. Return
2709 NULL_RTX if we failed the caller should emit a normal call, otherwise
2710 try to get the result in TARGET, if convenient. */
2712 static rtx
2713 expand_builtin_strlen (tree exp, rtx target,
2714 machine_mode target_mode)
2716 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2717 return NULL_RTX;
2718 else
2720 struct expand_operand ops[4];
2721 rtx pat;
2722 tree len;
2723 tree src = CALL_EXPR_ARG (exp, 0);
2724 rtx src_reg;
2725 rtx_insn *before_strlen;
2726 machine_mode insn_mode = target_mode;
2727 enum insn_code icode = CODE_FOR_nothing;
2728 unsigned int align;
2730 /* If the length can be computed at compile-time, return it. */
2731 len = c_strlen (src, 0);
2732 if (len)
2733 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2735 /* If the length can be computed at compile-time and is constant
2736 integer, but there are side-effects in src, evaluate
2737 src for side-effects, then return len.
2738 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2739 can be optimized into: i++; x = 3; */
2740 len = c_strlen (src, 1);
2741 if (len && TREE_CODE (len) == INTEGER_CST)
2743 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2744 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2747 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2749 /* If SRC is not a pointer type, don't do this operation inline. */
2750 if (align == 0)
2751 return NULL_RTX;
2753 /* Bail out if we can't compute strlen in the right mode. */
2754 while (insn_mode != VOIDmode)
2756 icode = optab_handler (strlen_optab, insn_mode);
2757 if (icode != CODE_FOR_nothing)
2758 break;
2760 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2762 if (insn_mode == VOIDmode)
2763 return NULL_RTX;
2765 /* Make a place to hold the source address. We will not expand
2766 the actual source until we are sure that the expansion will
2767 not fail -- there are trees that cannot be expanded twice. */
2768 src_reg = gen_reg_rtx (Pmode);
2770 /* Mark the beginning of the strlen sequence so we can emit the
2771 source operand later. */
2772 before_strlen = get_last_insn ();
2774 create_output_operand (&ops[0], target, insn_mode);
2775 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2776 create_integer_operand (&ops[2], 0);
2777 create_integer_operand (&ops[3], align);
2778 if (!maybe_expand_insn (icode, 4, ops))
2779 return NULL_RTX;
2781 /* Now that we are assured of success, expand the source. */
2782 start_sequence ();
2783 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2784 if (pat != src_reg)
2786 #ifdef POINTERS_EXTEND_UNSIGNED
2787 if (GET_MODE (pat) != Pmode)
2788 pat = convert_to_mode (Pmode, pat,
2789 POINTERS_EXTEND_UNSIGNED);
2790 #endif
2791 emit_move_insn (src_reg, pat);
2793 pat = get_insns ();
2794 end_sequence ();
2796 if (before_strlen)
2797 emit_insn_after (pat, before_strlen);
2798 else
2799 emit_insn_before (pat, get_insns ());
2801 /* Return the value in the proper mode for this function. */
2802 if (GET_MODE (ops[0].value) == target_mode)
2803 target = ops[0].value;
2804 else if (target != 0)
2805 convert_move (target, ops[0].value, 0);
2806 else
2807 target = convert_to_mode (target_mode, ops[0].value, 0);
2809 return target;
2813 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2814 bytes from constant string DATA + OFFSET and return it as target
2815 constant. */
2817 static rtx
2818 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2819 machine_mode mode)
2821 const char *str = (const char *) data;
2823 gcc_assert (offset >= 0
2824 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2825 <= strlen (str) + 1));
2827 return c_readstr (str + offset, mode);
2830 /* LEN specify length of the block of memcpy/memset operation.
2831 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2832 In some cases we can make very likely guess on max size, then we
2833 set it into PROBABLE_MAX_SIZE. */
2835 static void
2836 determine_block_size (tree len, rtx len_rtx,
2837 unsigned HOST_WIDE_INT *min_size,
2838 unsigned HOST_WIDE_INT *max_size,
2839 unsigned HOST_WIDE_INT *probable_max_size)
2841 if (CONST_INT_P (len_rtx))
2843 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
2844 return;
2846 else
2848 wide_int min, max;
2849 enum value_range_type range_type = VR_UNDEFINED;
2851 /* Determine bounds from the type. */
2852 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2853 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2854 else
2855 *min_size = 0;
2856 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2857 *probable_max_size = *max_size
2858 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
2859 else
2860 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2862 if (TREE_CODE (len) == SSA_NAME)
2863 range_type = get_range_info (len, &min, &max);
2864 if (range_type == VR_RANGE)
2866 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
2867 *min_size = min.to_uhwi ();
2868 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
2869 *probable_max_size = *max_size = max.to_uhwi ();
2871 else if (range_type == VR_ANTI_RANGE)
2873 /* Anti range 0...N lets us to determine minimal size to N+1. */
2874 if (min == 0)
2876 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2877 *min_size = max.to_uhwi () + 1;
2879 /* Code like
2881 int n;
2882 if (n < 100)
2883 memcpy (a, b, n)
2885 Produce anti range allowing negative values of N. We still
2886 can use the information and make a guess that N is not negative.
2888 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
2889 *probable_max_size = min.to_uhwi () - 1;
2892 gcc_checking_assert (*max_size <=
2893 (unsigned HOST_WIDE_INT)
2894 GET_MODE_MASK (GET_MODE (len_rtx)));
2897 /* Helper function to do the actual work for expand_builtin_memcpy. */
2899 static rtx
2900 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
2902 const char *src_str;
2903 unsigned int src_align = get_pointer_alignment (src);
2904 unsigned int dest_align = get_pointer_alignment (dest);
2905 rtx dest_mem, src_mem, dest_addr, len_rtx;
2906 HOST_WIDE_INT expected_size = -1;
2907 unsigned int expected_align = 0;
2908 unsigned HOST_WIDE_INT min_size;
2909 unsigned HOST_WIDE_INT max_size;
2910 unsigned HOST_WIDE_INT probable_max_size;
2912 /* If DEST is not a pointer type, call the normal function. */
2913 if (dest_align == 0)
2914 return NULL_RTX;
2916 /* If either SRC is not a pointer type, don't do this
2917 operation in-line. */
2918 if (src_align == 0)
2919 return NULL_RTX;
2921 if (currently_expanding_gimple_stmt)
2922 stringop_block_profile (currently_expanding_gimple_stmt,
2923 &expected_align, &expected_size);
2925 if (expected_align < dest_align)
2926 expected_align = dest_align;
2927 dest_mem = get_memory_rtx (dest, len);
2928 set_mem_align (dest_mem, dest_align);
2929 len_rtx = expand_normal (len);
2930 determine_block_size (len, len_rtx, &min_size, &max_size,
2931 &probable_max_size);
2932 src_str = c_getstr (src);
2934 /* If SRC is a string constant and block move would be done
2935 by pieces, we can avoid loading the string from memory
2936 and only stored the computed constants. */
2937 if (src_str
2938 && CONST_INT_P (len_rtx)
2939 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2940 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2941 CONST_CAST (char *, src_str),
2942 dest_align, false))
2944 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2945 builtin_memcpy_read_str,
2946 CONST_CAST (char *, src_str),
2947 dest_align, false, 0);
2948 dest_mem = force_operand (XEXP (dest_mem, 0), target);
2949 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2950 return dest_mem;
2953 src_mem = get_memory_rtx (src, len);
2954 set_mem_align (src_mem, src_align);
2956 /* Copy word part most expediently. */
2957 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
2958 CALL_EXPR_TAILCALL (exp)
2959 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
2960 expected_align, expected_size,
2961 min_size, max_size, probable_max_size);
2963 if (dest_addr == 0)
2965 dest_addr = force_operand (XEXP (dest_mem, 0), target);
2966 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2969 return dest_addr;
2972 /* Expand a call EXP to the memcpy builtin.
2973 Return NULL_RTX if we failed, the caller should emit a normal call,
2974 otherwise try to get the result in TARGET, if convenient (and in
2975 mode MODE if that's convenient). */
2977 static rtx
2978 expand_builtin_memcpy (tree exp, rtx target)
2980 if (!validate_arglist (exp,
2981 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2982 return NULL_RTX;
2983 else
2985 tree dest = CALL_EXPR_ARG (exp, 0);
2986 tree src = CALL_EXPR_ARG (exp, 1);
2987 tree len = CALL_EXPR_ARG (exp, 2);
2988 return expand_builtin_memcpy_args (dest, src, len, target, exp);
2992 /* Expand an instrumented call EXP to the memcpy builtin.
2993 Return NULL_RTX if we failed, the caller should emit a normal call,
2994 otherwise try to get the result in TARGET, if convenient (and in
2995 mode MODE if that's convenient). */
2997 static rtx
2998 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3000 if (!validate_arglist (exp,
3001 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3002 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3003 INTEGER_TYPE, VOID_TYPE))
3004 return NULL_RTX;
3005 else
3007 tree dest = CALL_EXPR_ARG (exp, 0);
3008 tree src = CALL_EXPR_ARG (exp, 2);
3009 tree len = CALL_EXPR_ARG (exp, 4);
3010 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3012 /* Return src bounds with the result. */
3013 if (res)
3015 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3016 expand_normal (CALL_EXPR_ARG (exp, 1)));
3017 res = chkp_join_splitted_slot (res, bnd);
3019 return res;
3023 /* Expand a call EXP to the mempcpy builtin.
3024 Return NULL_RTX if we failed; the caller should emit a normal call,
3025 otherwise try to get the result in TARGET, if convenient (and in
3026 mode MODE if that's convenient). If ENDP is 0 return the
3027 destination pointer, if ENDP is 1 return the end pointer ala
3028 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3029 stpcpy. */
3031 static rtx
3032 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3034 if (!validate_arglist (exp,
3035 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3036 return NULL_RTX;
3037 else
3039 tree dest = CALL_EXPR_ARG (exp, 0);
3040 tree src = CALL_EXPR_ARG (exp, 1);
3041 tree len = CALL_EXPR_ARG (exp, 2);
3042 return expand_builtin_mempcpy_args (dest, src, len,
3043 target, mode, /*endp=*/ 1,
3044 exp);
3048 /* Expand an instrumented call EXP to the mempcpy builtin.
3049 Return NULL_RTX if we failed, the caller should emit a normal call,
3050 otherwise try to get the result in TARGET, if convenient (and in
3051 mode MODE if that's convenient). */
3053 static rtx
3054 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3056 if (!validate_arglist (exp,
3057 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3058 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3059 INTEGER_TYPE, VOID_TYPE))
3060 return NULL_RTX;
3061 else
3063 tree dest = CALL_EXPR_ARG (exp, 0);
3064 tree src = CALL_EXPR_ARG (exp, 2);
3065 tree len = CALL_EXPR_ARG (exp, 4);
3066 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3067 mode, 1, exp);
3069 /* Return src bounds with the result. */
3070 if (res)
3072 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3073 expand_normal (CALL_EXPR_ARG (exp, 1)));
3074 res = chkp_join_splitted_slot (res, bnd);
3076 return res;
3080 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3081 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3082 so that this can also be called without constructing an actual CALL_EXPR.
3083 The other arguments and return value are the same as for
3084 expand_builtin_mempcpy. */
3086 static rtx
3087 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3088 rtx target, machine_mode mode, int endp,
3089 tree orig_exp)
3091 tree fndecl = get_callee_fndecl (orig_exp);
3093 /* If return value is ignored, transform mempcpy into memcpy. */
3094 if (target == const0_rtx
3095 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3096 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3098 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3099 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3100 dest, src, len);
3101 return expand_expr (result, target, mode, EXPAND_NORMAL);
3103 else if (target == const0_rtx
3104 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3106 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3107 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3108 dest, src, len);
3109 return expand_expr (result, target, mode, EXPAND_NORMAL);
3111 else
3113 const char *src_str;
3114 unsigned int src_align = get_pointer_alignment (src);
3115 unsigned int dest_align = get_pointer_alignment (dest);
3116 rtx dest_mem, src_mem, len_rtx;
3118 /* If either SRC or DEST is not a pointer type, don't do this
3119 operation in-line. */
3120 if (dest_align == 0 || src_align == 0)
3121 return NULL_RTX;
3123 /* If LEN is not constant, call the normal function. */
3124 if (! tree_fits_uhwi_p (len))
3125 return NULL_RTX;
3127 len_rtx = expand_normal (len);
3128 src_str = c_getstr (src);
3130 /* If SRC is a string constant and block move would be done
3131 by pieces, we can avoid loading the string from memory
3132 and only stored the computed constants. */
3133 if (src_str
3134 && CONST_INT_P (len_rtx)
3135 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3136 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3137 CONST_CAST (char *, src_str),
3138 dest_align, false))
3140 dest_mem = get_memory_rtx (dest, len);
3141 set_mem_align (dest_mem, dest_align);
3142 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3143 builtin_memcpy_read_str,
3144 CONST_CAST (char *, src_str),
3145 dest_align, false, endp);
3146 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3147 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3148 return dest_mem;
3151 if (CONST_INT_P (len_rtx)
3152 && can_move_by_pieces (INTVAL (len_rtx),
3153 MIN (dest_align, src_align)))
3155 dest_mem = get_memory_rtx (dest, len);
3156 set_mem_align (dest_mem, dest_align);
3157 src_mem = get_memory_rtx (src, len);
3158 set_mem_align (src_mem, src_align);
3159 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3160 MIN (dest_align, src_align), endp);
3161 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3162 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3163 return dest_mem;
3166 return NULL_RTX;
3170 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3171 we failed, the caller should emit a normal call, otherwise try to
3172 get the result in TARGET, if convenient. If ENDP is 0 return the
3173 destination pointer, if ENDP is 1 return the end pointer ala
3174 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3175 stpcpy. */
3177 static rtx
3178 expand_movstr (tree dest, tree src, rtx target, int endp)
3180 struct expand_operand ops[3];
3181 rtx dest_mem;
3182 rtx src_mem;
3184 if (!targetm.have_movstr ())
3185 return NULL_RTX;
3187 dest_mem = get_memory_rtx (dest, NULL);
3188 src_mem = get_memory_rtx (src, NULL);
3189 if (!endp)
3191 target = force_reg (Pmode, XEXP (dest_mem, 0));
3192 dest_mem = replace_equiv_address (dest_mem, target);
3195 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3196 create_fixed_operand (&ops[1], dest_mem);
3197 create_fixed_operand (&ops[2], src_mem);
3198 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3199 return NULL_RTX;
3201 if (endp && target != const0_rtx)
3203 target = ops[0].value;
3204 /* movstr is supposed to set end to the address of the NUL
3205 terminator. If the caller requested a mempcpy-like return value,
3206 adjust it. */
3207 if (endp == 1)
3209 rtx tem = plus_constant (GET_MODE (target),
3210 gen_lowpart (GET_MODE (target), target), 1);
3211 emit_move_insn (target, force_operand (tem, NULL_RTX));
3214 return target;
3217 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3218 NULL_RTX if we failed the caller should emit a normal call, otherwise
3219 try to get the result in TARGET, if convenient (and in mode MODE if that's
3220 convenient). */
3222 static rtx
3223 expand_builtin_strcpy (tree exp, rtx target)
3225 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3227 tree dest = CALL_EXPR_ARG (exp, 0);
3228 tree src = CALL_EXPR_ARG (exp, 1);
3229 return expand_builtin_strcpy_args (dest, src, target);
3231 return NULL_RTX;
3234 /* Helper function to do the actual work for expand_builtin_strcpy. The
3235 arguments to the builtin_strcpy call DEST and SRC are broken out
3236 so that this can also be called without constructing an actual CALL_EXPR.
3237 The other arguments and return value are the same as for
3238 expand_builtin_strcpy. */
3240 static rtx
3241 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3243 return expand_movstr (dest, src, target, /*endp=*/0);
3246 /* Expand a call EXP to the stpcpy builtin.
3247 Return NULL_RTX if we failed the caller should emit a normal call,
3248 otherwise try to get the result in TARGET, if convenient (and in
3249 mode MODE if that's convenient). */
3251 static rtx
3252 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3254 tree dst, src;
3255 location_t loc = EXPR_LOCATION (exp);
3257 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3258 return NULL_RTX;
3260 dst = CALL_EXPR_ARG (exp, 0);
3261 src = CALL_EXPR_ARG (exp, 1);
3263 /* If return value is ignored, transform stpcpy into strcpy. */
3264 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3266 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3267 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3268 return expand_expr (result, target, mode, EXPAND_NORMAL);
3270 else
3272 tree len, lenp1;
3273 rtx ret;
3275 /* Ensure we get an actual string whose length can be evaluated at
3276 compile-time, not an expression containing a string. This is
3277 because the latter will potentially produce pessimized code
3278 when used to produce the return value. */
3279 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3280 return expand_movstr (dst, src, target, /*endp=*/2);
3282 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3283 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3284 target, mode, /*endp=*/2,
3285 exp);
3287 if (ret)
3288 return ret;
3290 if (TREE_CODE (len) == INTEGER_CST)
3292 rtx len_rtx = expand_normal (len);
3294 if (CONST_INT_P (len_rtx))
3296 ret = expand_builtin_strcpy_args (dst, src, target);
3298 if (ret)
3300 if (! target)
3302 if (mode != VOIDmode)
3303 target = gen_reg_rtx (mode);
3304 else
3305 target = gen_reg_rtx (GET_MODE (ret));
3307 if (GET_MODE (target) != GET_MODE (ret))
3308 ret = gen_lowpart (GET_MODE (target), ret);
3310 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3311 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3312 gcc_assert (ret);
3314 return target;
3319 return expand_movstr (dst, src, target, /*endp=*/2);
3323 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3324 bytes from constant string DATA + OFFSET and return it as target
3325 constant. */
3328 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3329 machine_mode mode)
3331 const char *str = (const char *) data;
3333 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3334 return const0_rtx;
3336 return c_readstr (str + offset, mode);
3339 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3340 NULL_RTX if we failed the caller should emit a normal call. */
3342 static rtx
3343 expand_builtin_strncpy (tree exp, rtx target)
3345 location_t loc = EXPR_LOCATION (exp);
3347 if (validate_arglist (exp,
3348 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3350 tree dest = CALL_EXPR_ARG (exp, 0);
3351 tree src = CALL_EXPR_ARG (exp, 1);
3352 tree len = CALL_EXPR_ARG (exp, 2);
3353 tree slen = c_strlen (src, 1);
3355 /* We must be passed a constant len and src parameter. */
3356 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3357 return NULL_RTX;
3359 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3361 /* We're required to pad with trailing zeros if the requested
3362 len is greater than strlen(s2)+1. In that case try to
3363 use store_by_pieces, if it fails, punt. */
3364 if (tree_int_cst_lt (slen, len))
3366 unsigned int dest_align = get_pointer_alignment (dest);
3367 const char *p = c_getstr (src);
3368 rtx dest_mem;
3370 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3371 || !can_store_by_pieces (tree_to_uhwi (len),
3372 builtin_strncpy_read_str,
3373 CONST_CAST (char *, p),
3374 dest_align, false))
3375 return NULL_RTX;
3377 dest_mem = get_memory_rtx (dest, len);
3378 store_by_pieces (dest_mem, tree_to_uhwi (len),
3379 builtin_strncpy_read_str,
3380 CONST_CAST (char *, p), dest_align, false, 0);
3381 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3382 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3383 return dest_mem;
3386 return NULL_RTX;
3389 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3390 bytes from constant string DATA + OFFSET and return it as target
3391 constant. */
3394 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3395 machine_mode mode)
3397 const char *c = (const char *) data;
3398 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3400 memset (p, *c, GET_MODE_SIZE (mode));
3402 return c_readstr (p, mode);
3405 /* Callback routine for store_by_pieces. Return the RTL of a register
3406 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3407 char value given in the RTL register data. For example, if mode is
3408 4 bytes wide, return the RTL for 0x01010101*data. */
3410 static rtx
3411 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3412 machine_mode mode)
3414 rtx target, coeff;
3415 size_t size;
3416 char *p;
3418 size = GET_MODE_SIZE (mode);
3419 if (size == 1)
3420 return (rtx) data;
3422 p = XALLOCAVEC (char, size);
3423 memset (p, 1, size);
3424 coeff = c_readstr (p, mode);
3426 target = convert_to_mode (mode, (rtx) data, 1);
3427 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3428 return force_reg (mode, target);
3431 /* Expand expression EXP, which is a call to the memset builtin. Return
3432 NULL_RTX if we failed the caller should emit a normal call, otherwise
3433 try to get the result in TARGET, if convenient (and in mode MODE if that's
3434 convenient). */
3436 static rtx
3437 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3439 if (!validate_arglist (exp,
3440 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3441 return NULL_RTX;
3442 else
3444 tree dest = CALL_EXPR_ARG (exp, 0);
3445 tree val = CALL_EXPR_ARG (exp, 1);
3446 tree len = CALL_EXPR_ARG (exp, 2);
3447 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3451 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3452 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3453 try to get the result in TARGET, if convenient (and in mode MODE if that's
3454 convenient). */
3456 static rtx
3457 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3459 if (!validate_arglist (exp,
3460 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3461 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3462 return NULL_RTX;
3463 else
3465 tree dest = CALL_EXPR_ARG (exp, 0);
3466 tree val = CALL_EXPR_ARG (exp, 2);
3467 tree len = CALL_EXPR_ARG (exp, 3);
3468 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3470 /* Return src bounds with the result. */
3471 if (res)
3473 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3474 expand_normal (CALL_EXPR_ARG (exp, 1)));
3475 res = chkp_join_splitted_slot (res, bnd);
3477 return res;
3481 /* Helper function to do the actual work for expand_builtin_memset. The
3482 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3483 so that this can also be called without constructing an actual CALL_EXPR.
3484 The other arguments and return value are the same as for
3485 expand_builtin_memset. */
3487 static rtx
3488 expand_builtin_memset_args (tree dest, tree val, tree len,
3489 rtx target, machine_mode mode, tree orig_exp)
3491 tree fndecl, fn;
3492 enum built_in_function fcode;
3493 machine_mode val_mode;
3494 char c;
3495 unsigned int dest_align;
3496 rtx dest_mem, dest_addr, len_rtx;
3497 HOST_WIDE_INT expected_size = -1;
3498 unsigned int expected_align = 0;
3499 unsigned HOST_WIDE_INT min_size;
3500 unsigned HOST_WIDE_INT max_size;
3501 unsigned HOST_WIDE_INT probable_max_size;
3503 dest_align = get_pointer_alignment (dest);
3505 /* If DEST is not a pointer type, don't do this operation in-line. */
3506 if (dest_align == 0)
3507 return NULL_RTX;
3509 if (currently_expanding_gimple_stmt)
3510 stringop_block_profile (currently_expanding_gimple_stmt,
3511 &expected_align, &expected_size);
3513 if (expected_align < dest_align)
3514 expected_align = dest_align;
3516 /* If the LEN parameter is zero, return DEST. */
3517 if (integer_zerop (len))
3519 /* Evaluate and ignore VAL in case it has side-effects. */
3520 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3521 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3524 /* Stabilize the arguments in case we fail. */
3525 dest = builtin_save_expr (dest);
3526 val = builtin_save_expr (val);
3527 len = builtin_save_expr (len);
3529 len_rtx = expand_normal (len);
3530 determine_block_size (len, len_rtx, &min_size, &max_size,
3531 &probable_max_size);
3532 dest_mem = get_memory_rtx (dest, len);
3533 val_mode = TYPE_MODE (unsigned_char_type_node);
3535 if (TREE_CODE (val) != INTEGER_CST)
3537 rtx val_rtx;
3539 val_rtx = expand_normal (val);
3540 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3542 /* Assume that we can memset by pieces if we can store
3543 * the coefficients by pieces (in the required modes).
3544 * We can't pass builtin_memset_gen_str as that emits RTL. */
3545 c = 1;
3546 if (tree_fits_uhwi_p (len)
3547 && can_store_by_pieces (tree_to_uhwi (len),
3548 builtin_memset_read_str, &c, dest_align,
3549 true))
3551 val_rtx = force_reg (val_mode, val_rtx);
3552 store_by_pieces (dest_mem, tree_to_uhwi (len),
3553 builtin_memset_gen_str, val_rtx, dest_align,
3554 true, 0);
3556 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3557 dest_align, expected_align,
3558 expected_size, min_size, max_size,
3559 probable_max_size))
3560 goto do_libcall;
3562 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3563 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3564 return dest_mem;
3567 if (target_char_cast (val, &c))
3568 goto do_libcall;
3570 if (c)
3572 if (tree_fits_uhwi_p (len)
3573 && can_store_by_pieces (tree_to_uhwi (len),
3574 builtin_memset_read_str, &c, dest_align,
3575 true))
3576 store_by_pieces (dest_mem, tree_to_uhwi (len),
3577 builtin_memset_read_str, &c, dest_align, true, 0);
3578 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3579 gen_int_mode (c, val_mode),
3580 dest_align, expected_align,
3581 expected_size, min_size, max_size,
3582 probable_max_size))
3583 goto do_libcall;
3585 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3586 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3587 return dest_mem;
3590 set_mem_align (dest_mem, dest_align);
3591 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3592 CALL_EXPR_TAILCALL (orig_exp)
3593 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3594 expected_align, expected_size,
3595 min_size, max_size,
3596 probable_max_size);
3598 if (dest_addr == 0)
3600 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3601 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3604 return dest_addr;
3606 do_libcall:
3607 fndecl = get_callee_fndecl (orig_exp);
3608 fcode = DECL_FUNCTION_CODE (fndecl);
3609 if (fcode == BUILT_IN_MEMSET
3610 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3611 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3612 dest, val, len);
3613 else if (fcode == BUILT_IN_BZERO)
3614 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3615 dest, len);
3616 else
3617 gcc_unreachable ();
3618 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3619 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3620 return expand_call (fn, target, target == const0_rtx);
3623 /* Expand expression EXP, which is a call to the bzero builtin. Return
3624 NULL_RTX if we failed the caller should emit a normal call. */
3626 static rtx
3627 expand_builtin_bzero (tree exp)
3629 tree dest, size;
3630 location_t loc = EXPR_LOCATION (exp);
3632 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3633 return NULL_RTX;
3635 dest = CALL_EXPR_ARG (exp, 0);
3636 size = CALL_EXPR_ARG (exp, 1);
3638 /* New argument list transforming bzero(ptr x, int y) to
3639 memset(ptr x, int 0, size_t y). This is done this way
3640 so that if it isn't expanded inline, we fallback to
3641 calling bzero instead of memset. */
3643 return expand_builtin_memset_args (dest, integer_zero_node,
3644 fold_convert_loc (loc,
3645 size_type_node, size),
3646 const0_rtx, VOIDmode, exp);
3649 /* Try to expand cmpstr operation ICODE with the given operands.
3650 Return the result rtx on success, otherwise return null. */
3652 static rtx
3653 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
3654 HOST_WIDE_INT align)
3656 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3658 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3659 target = NULL_RTX;
3661 struct expand_operand ops[4];
3662 create_output_operand (&ops[0], target, insn_mode);
3663 create_fixed_operand (&ops[1], arg1_rtx);
3664 create_fixed_operand (&ops[2], arg2_rtx);
3665 create_integer_operand (&ops[3], align);
3666 if (maybe_expand_insn (icode, 4, ops))
3667 return ops[0].value;
3668 return NULL_RTX;
3671 /* Expand expression EXP, which is a call to the memcmp built-in function.
3672 Return NULL_RTX if we failed and the caller should emit a normal call,
3673 otherwise try to get the result in TARGET, if convenient.
3674 RESULT_EQ is true if we can relax the returned value to be either zero
3675 or nonzero, without caring about the sign. */
3677 static rtx
3678 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
3680 if (!validate_arglist (exp,
3681 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3682 return NULL_RTX;
3684 tree arg1 = CALL_EXPR_ARG (exp, 0);
3685 tree arg2 = CALL_EXPR_ARG (exp, 1);
3686 tree len = CALL_EXPR_ARG (exp, 2);
3687 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3688 location_t loc = EXPR_LOCATION (exp);
3690 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3691 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3693 /* If we don't have POINTER_TYPE, call the function. */
3694 if (arg1_align == 0 || arg2_align == 0)
3695 return NULL_RTX;
3697 rtx arg1_rtx = get_memory_rtx (arg1, len);
3698 rtx arg2_rtx = get_memory_rtx (arg2, len);
3699 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3701 /* Set MEM_SIZE as appropriate. */
3702 if (CONST_INT_P (len_rtx))
3704 set_mem_size (arg1_rtx, INTVAL (len_rtx));
3705 set_mem_size (arg2_rtx, INTVAL (len_rtx));
3708 by_pieces_constfn constfn = NULL;
3710 const char *src_str = c_getstr (arg1);
3711 if (src_str == NULL)
3712 src_str = c_getstr (arg2);
3713 else
3714 std::swap (arg1_rtx, arg2_rtx);
3716 /* If SRC is a string constant and block move would be done
3717 by pieces, we can avoid loading the string from memory
3718 and only stored the computed constants. */
3719 if (src_str
3720 && CONST_INT_P (len_rtx)
3721 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
3722 constfn = builtin_memcpy_read_str;
3724 rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
3725 TREE_TYPE (len), target,
3726 result_eq, constfn,
3727 CONST_CAST (char *, src_str));
3729 if (result)
3731 /* Return the value in the proper mode for this function. */
3732 if (GET_MODE (result) == mode)
3733 return result;
3735 if (target != 0)
3737 convert_move (target, result, 0);
3738 return target;
3741 return convert_to_mode (mode, result, 0);
3744 return NULL_RTX;
3747 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3748 if we failed the caller should emit a normal call, otherwise try to get
3749 the result in TARGET, if convenient. */
3751 static rtx
3752 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3754 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3755 return NULL_RTX;
3757 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
3758 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
3759 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
3761 rtx arg1_rtx, arg2_rtx;
3762 tree fndecl, fn;
3763 tree arg1 = CALL_EXPR_ARG (exp, 0);
3764 tree arg2 = CALL_EXPR_ARG (exp, 1);
3765 rtx result = NULL_RTX;
3767 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3768 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3770 /* If we don't have POINTER_TYPE, call the function. */
3771 if (arg1_align == 0 || arg2_align == 0)
3772 return NULL_RTX;
3774 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3775 arg1 = builtin_save_expr (arg1);
3776 arg2 = builtin_save_expr (arg2);
3778 arg1_rtx = get_memory_rtx (arg1, NULL);
3779 arg2_rtx = get_memory_rtx (arg2, NULL);
3781 /* Try to call cmpstrsi. */
3782 if (cmpstr_icode != CODE_FOR_nothing)
3783 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
3784 MIN (arg1_align, arg2_align));
3786 /* Try to determine at least one length and call cmpstrnsi. */
3787 if (!result && cmpstrn_icode != CODE_FOR_nothing)
3789 tree len;
3790 rtx arg3_rtx;
3792 tree len1 = c_strlen (arg1, 1);
3793 tree len2 = c_strlen (arg2, 1);
3795 if (len1)
3796 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3797 if (len2)
3798 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3800 /* If we don't have a constant length for the first, use the length
3801 of the second, if we know it. We don't require a constant for
3802 this case; some cost analysis could be done if both are available
3803 but neither is constant. For now, assume they're equally cheap,
3804 unless one has side effects. If both strings have constant lengths,
3805 use the smaller. */
3807 if (!len1)
3808 len = len2;
3809 else if (!len2)
3810 len = len1;
3811 else if (TREE_SIDE_EFFECTS (len1))
3812 len = len2;
3813 else if (TREE_SIDE_EFFECTS (len2))
3814 len = len1;
3815 else if (TREE_CODE (len1) != INTEGER_CST)
3816 len = len2;
3817 else if (TREE_CODE (len2) != INTEGER_CST)
3818 len = len1;
3819 else if (tree_int_cst_lt (len1, len2))
3820 len = len1;
3821 else
3822 len = len2;
3824 /* If both arguments have side effects, we cannot optimize. */
3825 if (len && !TREE_SIDE_EFFECTS (len))
3827 arg3_rtx = expand_normal (len);
3828 result = expand_cmpstrn_or_cmpmem
3829 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
3830 arg3_rtx, MIN (arg1_align, arg2_align));
3834 if (result)
3836 /* Return the value in the proper mode for this function. */
3837 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3838 if (GET_MODE (result) == mode)
3839 return result;
3840 if (target == 0)
3841 return convert_to_mode (mode, result, 0);
3842 convert_move (target, result, 0);
3843 return target;
3846 /* Expand the library call ourselves using a stabilized argument
3847 list to avoid re-evaluating the function's arguments twice. */
3848 fndecl = get_callee_fndecl (exp);
3849 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3850 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3851 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3852 return expand_call (fn, target, target == const0_rtx);
3854 return NULL_RTX;
3857 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3858 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3859 the result in TARGET, if convenient. */
3861 static rtx
3862 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3863 ATTRIBUTE_UNUSED machine_mode mode)
3865 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3867 if (!validate_arglist (exp,
3868 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3869 return NULL_RTX;
3871 /* If c_strlen can determine an expression for one of the string
3872 lengths, and it doesn't have side effects, then emit cmpstrnsi
3873 using length MIN(strlen(string)+1, arg3). */
3874 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
3875 if (cmpstrn_icode != CODE_FOR_nothing)
3877 tree len, len1, len2;
3878 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3879 rtx result;
3880 tree fndecl, fn;
3881 tree arg1 = CALL_EXPR_ARG (exp, 0);
3882 tree arg2 = CALL_EXPR_ARG (exp, 1);
3883 tree arg3 = CALL_EXPR_ARG (exp, 2);
3885 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3886 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3888 len1 = c_strlen (arg1, 1);
3889 len2 = c_strlen (arg2, 1);
3891 if (len1)
3892 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
3893 if (len2)
3894 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
3896 /* If we don't have a constant length for the first, use the length
3897 of the second, if we know it. We don't require a constant for
3898 this case; some cost analysis could be done if both are available
3899 but neither is constant. For now, assume they're equally cheap,
3900 unless one has side effects. If both strings have constant lengths,
3901 use the smaller. */
3903 if (!len1)
3904 len = len2;
3905 else if (!len2)
3906 len = len1;
3907 else if (TREE_SIDE_EFFECTS (len1))
3908 len = len2;
3909 else if (TREE_SIDE_EFFECTS (len2))
3910 len = len1;
3911 else if (TREE_CODE (len1) != INTEGER_CST)
3912 len = len2;
3913 else if (TREE_CODE (len2) != INTEGER_CST)
3914 len = len1;
3915 else if (tree_int_cst_lt (len1, len2))
3916 len = len1;
3917 else
3918 len = len2;
3920 /* If both arguments have side effects, we cannot optimize. */
3921 if (!len || TREE_SIDE_EFFECTS (len))
3922 return NULL_RTX;
3924 /* The actual new length parameter is MIN(len,arg3). */
3925 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
3926 fold_convert_loc (loc, TREE_TYPE (len), arg3));
3928 /* If we don't have POINTER_TYPE, call the function. */
3929 if (arg1_align == 0 || arg2_align == 0)
3930 return NULL_RTX;
3932 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
3933 arg1 = builtin_save_expr (arg1);
3934 arg2 = builtin_save_expr (arg2);
3935 len = builtin_save_expr (len);
3937 arg1_rtx = get_memory_rtx (arg1, len);
3938 arg2_rtx = get_memory_rtx (arg2, len);
3939 arg3_rtx = expand_normal (len);
3940 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
3941 arg2_rtx, TREE_TYPE (len), arg3_rtx,
3942 MIN (arg1_align, arg2_align));
3943 if (result)
3945 /* Return the value in the proper mode for this function. */
3946 mode = TYPE_MODE (TREE_TYPE (exp));
3947 if (GET_MODE (result) == mode)
3948 return result;
3949 if (target == 0)
3950 return convert_to_mode (mode, result, 0);
3951 convert_move (target, result, 0);
3952 return target;
3955 /* Expand the library call ourselves using a stabilized argument
3956 list to avoid re-evaluating the function's arguments twice. */
3957 fndecl = get_callee_fndecl (exp);
3958 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
3959 arg1, arg2, len);
3960 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3961 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3962 return expand_call (fn, target, target == const0_rtx);
3964 return NULL_RTX;
3967 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
3968 if that's convenient. */
3971 expand_builtin_saveregs (void)
3973 rtx val;
3974 rtx_insn *seq;
3976 /* Don't do __builtin_saveregs more than once in a function.
3977 Save the result of the first call and reuse it. */
3978 if (saveregs_value != 0)
3979 return saveregs_value;
3981 /* When this function is called, it means that registers must be
3982 saved on entry to this function. So we migrate the call to the
3983 first insn of this function. */
3985 start_sequence ();
3987 /* Do whatever the machine needs done in this case. */
3988 val = targetm.calls.expand_builtin_saveregs ();
3990 seq = get_insns ();
3991 end_sequence ();
3993 saveregs_value = val;
3995 /* Put the insns after the NOTE that starts the function. If this
3996 is inside a start_sequence, make the outer-level insn chain current, so
3997 the code is placed at the start of the function. */
3998 push_topmost_sequence ();
3999 emit_insn_after (seq, entry_of_function ());
4000 pop_topmost_sequence ();
4002 return val;
4005 /* Expand a call to __builtin_next_arg. */
4007 static rtx
4008 expand_builtin_next_arg (void)
4010 /* Checking arguments is already done in fold_builtin_next_arg
4011 that must be called before this function. */
4012 return expand_binop (ptr_mode, add_optab,
4013 crtl->args.internal_arg_pointer,
4014 crtl->args.arg_offset_rtx,
4015 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4018 /* Make it easier for the backends by protecting the valist argument
4019 from multiple evaluations. */
4021 static tree
4022 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4024 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4026 /* The current way of determining the type of valist is completely
4027 bogus. We should have the information on the va builtin instead. */
4028 if (!vatype)
4029 vatype = targetm.fn_abi_va_list (cfun->decl);
4031 if (TREE_CODE (vatype) == ARRAY_TYPE)
4033 if (TREE_SIDE_EFFECTS (valist))
4034 valist = save_expr (valist);
4036 /* For this case, the backends will be expecting a pointer to
4037 vatype, but it's possible we've actually been given an array
4038 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4039 So fix it. */
4040 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4042 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4043 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4046 else
4048 tree pt = build_pointer_type (vatype);
4050 if (! needs_lvalue)
4052 if (! TREE_SIDE_EFFECTS (valist))
4053 return valist;
4055 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4056 TREE_SIDE_EFFECTS (valist) = 1;
4059 if (TREE_SIDE_EFFECTS (valist))
4060 valist = save_expr (valist);
4061 valist = fold_build2_loc (loc, MEM_REF,
4062 vatype, valist, build_int_cst (pt, 0));
4065 return valist;
4068 /* The "standard" definition of va_list is void*. */
4070 tree
4071 std_build_builtin_va_list (void)
4073 return ptr_type_node;
4076 /* The "standard" abi va_list is va_list_type_node. */
4078 tree
4079 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4081 return va_list_type_node;
4084 /* The "standard" type of va_list is va_list_type_node. */
4086 tree
4087 std_canonical_va_list_type (tree type)
4089 tree wtype, htype;
4091 if (INDIRECT_REF_P (type))
4092 type = TREE_TYPE (type);
4093 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4094 type = TREE_TYPE (type);
4095 wtype = va_list_type_node;
4096 htype = type;
4097 /* Treat structure va_list types. */
4098 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4099 htype = TREE_TYPE (htype);
4100 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4102 /* If va_list is an array type, the argument may have decayed
4103 to a pointer type, e.g. by being passed to another function.
4104 In that case, unwrap both types so that we can compare the
4105 underlying records. */
4106 if (TREE_CODE (htype) == ARRAY_TYPE
4107 || POINTER_TYPE_P (htype))
4109 wtype = TREE_TYPE (wtype);
4110 htype = TREE_TYPE (htype);
4113 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4114 return va_list_type_node;
4116 return NULL_TREE;
4119 /* The "standard" implementation of va_start: just assign `nextarg' to
4120 the variable. */
4122 void
4123 std_expand_builtin_va_start (tree valist, rtx nextarg)
4125 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4126 convert_move (va_r, nextarg, 0);
4128 /* We do not have any valid bounds for the pointer, so
4129 just store zero bounds for it. */
4130 if (chkp_function_instrumented_p (current_function_decl))
4131 chkp_expand_bounds_reset_for_mem (valist,
4132 make_tree (TREE_TYPE (valist),
4133 nextarg));
4136 /* Expand EXP, a call to __builtin_va_start. */
4138 static rtx
4139 expand_builtin_va_start (tree exp)
4141 rtx nextarg;
4142 tree valist;
4143 location_t loc = EXPR_LOCATION (exp);
4145 if (call_expr_nargs (exp) < 2)
4147 error_at (loc, "too few arguments to function %<va_start%>");
4148 return const0_rtx;
4151 if (fold_builtin_next_arg (exp, true))
4152 return const0_rtx;
4154 nextarg = expand_builtin_next_arg ();
4155 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4157 if (targetm.expand_builtin_va_start)
4158 targetm.expand_builtin_va_start (valist, nextarg);
4159 else
4160 std_expand_builtin_va_start (valist, nextarg);
4162 return const0_rtx;
4165 /* Expand EXP, a call to __builtin_va_end. */
4167 static rtx
4168 expand_builtin_va_end (tree exp)
4170 tree valist = CALL_EXPR_ARG (exp, 0);
4172 /* Evaluate for side effects, if needed. I hate macros that don't
4173 do that. */
4174 if (TREE_SIDE_EFFECTS (valist))
4175 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4177 return const0_rtx;
4180 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4181 builtin rather than just as an assignment in stdarg.h because of the
4182 nastiness of array-type va_list types. */
4184 static rtx
4185 expand_builtin_va_copy (tree exp)
4187 tree dst, src, t;
4188 location_t loc = EXPR_LOCATION (exp);
4190 dst = CALL_EXPR_ARG (exp, 0);
4191 src = CALL_EXPR_ARG (exp, 1);
4193 dst = stabilize_va_list_loc (loc, dst, 1);
4194 src = stabilize_va_list_loc (loc, src, 0);
4196 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4198 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4200 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4201 TREE_SIDE_EFFECTS (t) = 1;
4202 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4204 else
4206 rtx dstb, srcb, size;
4208 /* Evaluate to pointers. */
4209 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4210 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4211 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4212 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4214 dstb = convert_memory_address (Pmode, dstb);
4215 srcb = convert_memory_address (Pmode, srcb);
4217 /* "Dereference" to BLKmode memories. */
4218 dstb = gen_rtx_MEM (BLKmode, dstb);
4219 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4220 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4221 srcb = gen_rtx_MEM (BLKmode, srcb);
4222 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4223 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4225 /* Copy. */
4226 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4229 return const0_rtx;
4232 /* Expand a call to one of the builtin functions __builtin_frame_address or
4233 __builtin_return_address. */
4235 static rtx
4236 expand_builtin_frame_address (tree fndecl, tree exp)
4238 /* The argument must be a nonnegative integer constant.
4239 It counts the number of frames to scan up the stack.
4240 The value is either the frame pointer value or the return
4241 address saved in that frame. */
4242 if (call_expr_nargs (exp) == 0)
4243 /* Warning about missing arg was already issued. */
4244 return const0_rtx;
4245 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4247 error ("invalid argument to %qD", fndecl);
4248 return const0_rtx;
4250 else
4252 /* Number of frames to scan up the stack. */
4253 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4255 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4257 /* Some ports cannot access arbitrary stack frames. */
4258 if (tem == NULL)
4260 warning (0, "unsupported argument to %qD", fndecl);
4261 return const0_rtx;
4264 if (count)
4266 /* Warn since no effort is made to ensure that any frame
4267 beyond the current one exists or can be safely reached. */
4268 warning (OPT_Wframe_address, "calling %qD with "
4269 "a nonzero argument is unsafe", fndecl);
4272 /* For __builtin_frame_address, return what we've got. */
4273 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4274 return tem;
4276 if (!REG_P (tem)
4277 && ! CONSTANT_P (tem))
4278 tem = copy_addr_to_reg (tem);
4279 return tem;
4283 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4284 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4285 is the same as for allocate_dynamic_stack_space. */
4287 static rtx
4288 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4290 rtx op0;
4291 rtx result;
4292 bool valid_arglist;
4293 unsigned int align;
4294 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4295 == BUILT_IN_ALLOCA_WITH_ALIGN);
4297 valid_arglist
4298 = (alloca_with_align
4299 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4300 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4302 if (!valid_arglist)
4303 return NULL_RTX;
4305 /* Compute the argument. */
4306 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4308 /* Compute the alignment. */
4309 align = (alloca_with_align
4310 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4311 : BIGGEST_ALIGNMENT);
4313 /* Allocate the desired space. */
4314 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4315 result = convert_memory_address (ptr_mode, result);
4317 return result;
4320 /* Expand a call to bswap builtin in EXP.
4321 Return NULL_RTX if a normal call should be emitted rather than expanding the
4322 function in-line. If convenient, the result should be placed in TARGET.
4323 SUBTARGET may be used as the target for computing one of EXP's operands. */
4325 static rtx
4326 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4327 rtx subtarget)
4329 tree arg;
4330 rtx op0;
4332 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4333 return NULL_RTX;
4335 arg = CALL_EXPR_ARG (exp, 0);
4336 op0 = expand_expr (arg,
4337 subtarget && GET_MODE (subtarget) == target_mode
4338 ? subtarget : NULL_RTX,
4339 target_mode, EXPAND_NORMAL);
4340 if (GET_MODE (op0) != target_mode)
4341 op0 = convert_to_mode (target_mode, op0, 1);
4343 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4345 gcc_assert (target);
4347 return convert_to_mode (target_mode, target, 1);
4350 /* Expand a call to a unary builtin in EXP.
4351 Return NULL_RTX if a normal call should be emitted rather than expanding the
4352 function in-line. If convenient, the result should be placed in TARGET.
4353 SUBTARGET may be used as the target for computing one of EXP's operands. */
4355 static rtx
4356 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4357 rtx subtarget, optab op_optab)
4359 rtx op0;
4361 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4362 return NULL_RTX;
4364 /* Compute the argument. */
4365 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4366 (subtarget
4367 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4368 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4369 VOIDmode, EXPAND_NORMAL);
4370 /* Compute op, into TARGET if possible.
4371 Set TARGET to wherever the result comes back. */
4372 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4373 op_optab, op0, target, op_optab != clrsb_optab);
4374 gcc_assert (target);
4376 return convert_to_mode (target_mode, target, 0);
4379 /* Expand a call to __builtin_expect. We just return our argument
4380 as the builtin_expect semantic should've been already executed by
4381 tree branch prediction pass. */
4383 static rtx
4384 expand_builtin_expect (tree exp, rtx target)
4386 tree arg;
4388 if (call_expr_nargs (exp) < 2)
4389 return const0_rtx;
4390 arg = CALL_EXPR_ARG (exp, 0);
4392 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4393 /* When guessing was done, the hints should be already stripped away. */
4394 gcc_assert (!flag_guess_branch_prob
4395 || optimize == 0 || seen_error ());
4396 return target;
4399 /* Expand a call to __builtin_assume_aligned. We just return our first
4400 argument as the builtin_assume_aligned semantic should've been already
4401 executed by CCP. */
4403 static rtx
4404 expand_builtin_assume_aligned (tree exp, rtx target)
4406 if (call_expr_nargs (exp) < 2)
4407 return const0_rtx;
4408 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4409 EXPAND_NORMAL);
4410 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4411 && (call_expr_nargs (exp) < 3
4412 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4413 return target;
4416 void
4417 expand_builtin_trap (void)
4419 if (targetm.have_trap ())
4421 rtx_insn *insn = emit_insn (targetm.gen_trap ());
4422 /* For trap insns when not accumulating outgoing args force
4423 REG_ARGS_SIZE note to prevent crossjumping of calls with
4424 different args sizes. */
4425 if (!ACCUMULATE_OUTGOING_ARGS)
4426 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4428 else
4430 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
4431 tree call_expr = build_call_expr (fn, 0);
4432 expand_call (call_expr, NULL_RTX, false);
4435 emit_barrier ();
4438 /* Expand a call to __builtin_unreachable. We do nothing except emit
4439 a barrier saying that control flow will not pass here.
4441 It is the responsibility of the program being compiled to ensure
4442 that control flow does never reach __builtin_unreachable. */
4443 static void
4444 expand_builtin_unreachable (void)
4446 emit_barrier ();
4449 /* Expand EXP, a call to fabs, fabsf or fabsl.
4450 Return NULL_RTX if a normal call should be emitted rather than expanding
4451 the function inline. If convenient, the result should be placed
4452 in TARGET. SUBTARGET may be used as the target for computing
4453 the operand. */
4455 static rtx
4456 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4458 machine_mode mode;
4459 tree arg;
4460 rtx op0;
4462 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4463 return NULL_RTX;
4465 arg = CALL_EXPR_ARG (exp, 0);
4466 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4467 mode = TYPE_MODE (TREE_TYPE (arg));
4468 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4469 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4472 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4473 Return NULL is a normal call should be emitted rather than expanding the
4474 function inline. If convenient, the result should be placed in TARGET.
4475 SUBTARGET may be used as the target for computing the operand. */
4477 static rtx
4478 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4480 rtx op0, op1;
4481 tree arg;
4483 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4484 return NULL_RTX;
4486 arg = CALL_EXPR_ARG (exp, 0);
4487 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4489 arg = CALL_EXPR_ARG (exp, 1);
4490 op1 = expand_normal (arg);
4492 return expand_copysign (op0, op1, target);
4495 /* Expand a call to __builtin___clear_cache. */
4497 static rtx
4498 expand_builtin___clear_cache (tree exp)
4500 if (!targetm.code_for_clear_cache)
4502 #ifdef CLEAR_INSN_CACHE
4503 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4504 does something. Just do the default expansion to a call to
4505 __clear_cache(). */
4506 return NULL_RTX;
4507 #else
4508 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4509 does nothing. There is no need to call it. Do nothing. */
4510 return const0_rtx;
4511 #endif /* CLEAR_INSN_CACHE */
4514 /* We have a "clear_cache" insn, and it will handle everything. */
4515 tree begin, end;
4516 rtx begin_rtx, end_rtx;
4518 /* We must not expand to a library call. If we did, any
4519 fallback library function in libgcc that might contain a call to
4520 __builtin___clear_cache() would recurse infinitely. */
4521 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4523 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4524 return const0_rtx;
4527 if (targetm.have_clear_cache ())
4529 struct expand_operand ops[2];
4531 begin = CALL_EXPR_ARG (exp, 0);
4532 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4534 end = CALL_EXPR_ARG (exp, 1);
4535 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4537 create_address_operand (&ops[0], begin_rtx);
4538 create_address_operand (&ops[1], end_rtx);
4539 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
4540 return const0_rtx;
4542 return const0_rtx;
4545 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4547 static rtx
4548 round_trampoline_addr (rtx tramp)
4550 rtx temp, addend, mask;
4552 /* If we don't need too much alignment, we'll have been guaranteed
4553 proper alignment by get_trampoline_type. */
4554 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4555 return tramp;
4557 /* Round address up to desired boundary. */
4558 temp = gen_reg_rtx (Pmode);
4559 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4560 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4562 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4563 temp, 0, OPTAB_LIB_WIDEN);
4564 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4565 temp, 0, OPTAB_LIB_WIDEN);
4567 return tramp;
4570 static rtx
4571 expand_builtin_init_trampoline (tree exp, bool onstack)
4573 tree t_tramp, t_func, t_chain;
4574 rtx m_tramp, r_tramp, r_chain, tmp;
4576 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4577 POINTER_TYPE, VOID_TYPE))
4578 return NULL_RTX;
4580 t_tramp = CALL_EXPR_ARG (exp, 0);
4581 t_func = CALL_EXPR_ARG (exp, 1);
4582 t_chain = CALL_EXPR_ARG (exp, 2);
4584 r_tramp = expand_normal (t_tramp);
4585 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4586 MEM_NOTRAP_P (m_tramp) = 1;
4588 /* If ONSTACK, the TRAMP argument should be the address of a field
4589 within the local function's FRAME decl. Either way, let's see if
4590 we can fill in the MEM_ATTRs for this memory. */
4591 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4592 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4594 /* Creator of a heap trampoline is responsible for making sure the
4595 address is aligned to at least STACK_BOUNDARY. Normally malloc
4596 will ensure this anyhow. */
4597 tmp = round_trampoline_addr (r_tramp);
4598 if (tmp != r_tramp)
4600 m_tramp = change_address (m_tramp, BLKmode, tmp);
4601 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4602 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4605 /* The FUNC argument should be the address of the nested function.
4606 Extract the actual function decl to pass to the hook. */
4607 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4608 t_func = TREE_OPERAND (t_func, 0);
4609 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4611 r_chain = expand_normal (t_chain);
4613 /* Generate insns to initialize the trampoline. */
4614 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4616 if (onstack)
4618 trampolines_created = 1;
4620 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4621 "trampoline generated for nested function %qD", t_func);
4624 return const0_rtx;
4627 static rtx
4628 expand_builtin_adjust_trampoline (tree exp)
4630 rtx tramp;
4632 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4633 return NULL_RTX;
4635 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4636 tramp = round_trampoline_addr (tramp);
4637 if (targetm.calls.trampoline_adjust_address)
4638 tramp = targetm.calls.trampoline_adjust_address (tramp);
4640 return tramp;
4643 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4644 function. The function first checks whether the back end provides
4645 an insn to implement signbit for the respective mode. If not, it
4646 checks whether the floating point format of the value is such that
4647 the sign bit can be extracted. If that is not the case, error out.
4648 EXP is the expression that is a call to the builtin function; if
4649 convenient, the result should be placed in TARGET. */
4650 static rtx
4651 expand_builtin_signbit (tree exp, rtx target)
4653 const struct real_format *fmt;
4654 machine_mode fmode, imode, rmode;
4655 tree arg;
4656 int word, bitpos;
4657 enum insn_code icode;
4658 rtx temp;
4659 location_t loc = EXPR_LOCATION (exp);
4661 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4662 return NULL_RTX;
4664 arg = CALL_EXPR_ARG (exp, 0);
4665 fmode = TYPE_MODE (TREE_TYPE (arg));
4666 rmode = TYPE_MODE (TREE_TYPE (exp));
4667 fmt = REAL_MODE_FORMAT (fmode);
4669 arg = builtin_save_expr (arg);
4671 /* Expand the argument yielding a RTX expression. */
4672 temp = expand_normal (arg);
4674 /* Check if the back end provides an insn that handles signbit for the
4675 argument's mode. */
4676 icode = optab_handler (signbit_optab, fmode);
4677 if (icode != CODE_FOR_nothing)
4679 rtx_insn *last = get_last_insn ();
4680 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4681 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4682 return target;
4683 delete_insns_since (last);
4686 /* For floating point formats without a sign bit, implement signbit
4687 as "ARG < 0.0". */
4688 bitpos = fmt->signbit_ro;
4689 if (bitpos < 0)
4691 /* But we can't do this if the format supports signed zero. */
4692 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
4694 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4695 build_real (TREE_TYPE (arg), dconst0));
4696 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4699 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4701 imode = int_mode_for_mode (fmode);
4702 gcc_assert (imode != BLKmode);
4703 temp = gen_lowpart (imode, temp);
4705 else
4707 imode = word_mode;
4708 /* Handle targets with different FP word orders. */
4709 if (FLOAT_WORDS_BIG_ENDIAN)
4710 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4711 else
4712 word = bitpos / BITS_PER_WORD;
4713 temp = operand_subword_force (temp, word, fmode);
4714 bitpos = bitpos % BITS_PER_WORD;
4717 /* Force the intermediate word_mode (or narrower) result into a
4718 register. This avoids attempting to create paradoxical SUBREGs
4719 of floating point modes below. */
4720 temp = force_reg (imode, temp);
4722 /* If the bitpos is within the "result mode" lowpart, the operation
4723 can be implement with a single bitwise AND. Otherwise, we need
4724 a right shift and an AND. */
4726 if (bitpos < GET_MODE_BITSIZE (rmode))
4728 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
4730 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4731 temp = gen_lowpart (rmode, temp);
4732 temp = expand_binop (rmode, and_optab, temp,
4733 immed_wide_int_const (mask, rmode),
4734 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4736 else
4738 /* Perform a logical right shift to place the signbit in the least
4739 significant bit, then truncate the result to the desired mode
4740 and mask just this bit. */
4741 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4742 temp = gen_lowpart (rmode, temp);
4743 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4744 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4747 return temp;
4750 /* Expand fork or exec calls. TARGET is the desired target of the
4751 call. EXP is the call. FN is the
4752 identificator of the actual function. IGNORE is nonzero if the
4753 value is to be ignored. */
4755 static rtx
4756 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4758 tree id, decl;
4759 tree call;
4761 /* If we are not profiling, just call the function. */
4762 if (!profile_arc_flag)
4763 return NULL_RTX;
4765 /* Otherwise call the wrapper. This should be equivalent for the rest of
4766 compiler, so the code does not diverge, and the wrapper may run the
4767 code necessary for keeping the profiling sane. */
4769 switch (DECL_FUNCTION_CODE (fn))
4771 case BUILT_IN_FORK:
4772 id = get_identifier ("__gcov_fork");
4773 break;
4775 case BUILT_IN_EXECL:
4776 id = get_identifier ("__gcov_execl");
4777 break;
4779 case BUILT_IN_EXECV:
4780 id = get_identifier ("__gcov_execv");
4781 break;
4783 case BUILT_IN_EXECLP:
4784 id = get_identifier ("__gcov_execlp");
4785 break;
4787 case BUILT_IN_EXECLE:
4788 id = get_identifier ("__gcov_execle");
4789 break;
4791 case BUILT_IN_EXECVP:
4792 id = get_identifier ("__gcov_execvp");
4793 break;
4795 case BUILT_IN_EXECVE:
4796 id = get_identifier ("__gcov_execve");
4797 break;
4799 default:
4800 gcc_unreachable ();
4803 decl = build_decl (DECL_SOURCE_LOCATION (fn),
4804 FUNCTION_DECL, id, TREE_TYPE (fn));
4805 DECL_EXTERNAL (decl) = 1;
4806 TREE_PUBLIC (decl) = 1;
4807 DECL_ARTIFICIAL (decl) = 1;
4808 TREE_NOTHROW (decl) = 1;
4809 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
4810 DECL_VISIBILITY_SPECIFIED (decl) = 1;
4811 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
4812 return expand_call (call, target, ignore);
4817 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
4818 the pointer in these functions is void*, the tree optimizers may remove
4819 casts. The mode computed in expand_builtin isn't reliable either, due
4820 to __sync_bool_compare_and_swap.
4822 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
4823 group of builtins. This gives us log2 of the mode size. */
4825 static inline machine_mode
4826 get_builtin_sync_mode (int fcode_diff)
4828 /* The size is not negotiable, so ask not to get BLKmode in return
4829 if the target indicates that a smaller size would be better. */
4830 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
4833 /* Expand the memory expression LOC and return the appropriate memory operand
4834 for the builtin_sync operations. */
4836 static rtx
4837 get_builtin_sync_mem (tree loc, machine_mode mode)
4839 rtx addr, mem;
4841 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
4842 addr = convert_memory_address (Pmode, addr);
4844 /* Note that we explicitly do not want any alias information for this
4845 memory, so that we kill all other live memories. Otherwise we don't
4846 satisfy the full barrier semantics of the intrinsic. */
4847 mem = validize_mem (gen_rtx_MEM (mode, addr));
4849 /* The alignment needs to be at least according to that of the mode. */
4850 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
4851 get_pointer_alignment (loc)));
4852 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
4853 MEM_VOLATILE_P (mem) = 1;
4855 return mem;
4858 /* Make sure an argument is in the right mode.
4859 EXP is the tree argument.
4860 MODE is the mode it should be in. */
4862 static rtx
4863 expand_expr_force_mode (tree exp, machine_mode mode)
4865 rtx val;
4866 machine_mode old_mode;
4868 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
4869 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
4870 of CONST_INTs, where we know the old_mode only from the call argument. */
4872 old_mode = GET_MODE (val);
4873 if (old_mode == VOIDmode)
4874 old_mode = TYPE_MODE (TREE_TYPE (exp));
4875 val = convert_modes (mode, old_mode, val, 1);
4876 return val;
4880 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
4881 EXP is the CALL_EXPR. CODE is the rtx code
4882 that corresponds to the arithmetic or logical operation from the name;
4883 an exception here is that NOT actually means NAND. TARGET is an optional
4884 place for us to store the results; AFTER is true if this is the
4885 fetch_and_xxx form. */
4887 static rtx
4888 expand_builtin_sync_operation (machine_mode mode, tree exp,
4889 enum rtx_code code, bool after,
4890 rtx target)
4892 rtx val, mem;
4893 location_t loc = EXPR_LOCATION (exp);
4895 if (code == NOT && warn_sync_nand)
4897 tree fndecl = get_callee_fndecl (exp);
4898 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4900 static bool warned_f_a_n, warned_n_a_f;
4902 switch (fcode)
4904 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
4905 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
4906 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
4907 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
4908 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
4909 if (warned_f_a_n)
4910 break;
4912 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
4913 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
4914 warned_f_a_n = true;
4915 break;
4917 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
4918 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
4919 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
4920 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
4921 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
4922 if (warned_n_a_f)
4923 break;
4925 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
4926 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
4927 warned_n_a_f = true;
4928 break;
4930 default:
4931 gcc_unreachable ();
4935 /* Expand the operands. */
4936 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
4937 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
4939 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
4940 after);
4943 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
4944 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
4945 true if this is the boolean form. TARGET is a place for us to store the
4946 results; this is NOT optional if IS_BOOL is true. */
4948 static rtx
4949 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
4950 bool is_bool, rtx target)
4952 rtx old_val, new_val, mem;
4953 rtx *pbool, *poval;
4955 /* Expand the operands. */
4956 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
4957 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
4958 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
4960 pbool = poval = NULL;
4961 if (target != const0_rtx)
4963 if (is_bool)
4964 pbool = &target;
4965 else
4966 poval = &target;
4968 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
4969 false, MEMMODEL_SYNC_SEQ_CST,
4970 MEMMODEL_SYNC_SEQ_CST))
4971 return NULL_RTX;
4973 return target;
4976 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
4977 general form is actually an atomic exchange, and some targets only
4978 support a reduced form with the second argument being a constant 1.
4979 EXP is the CALL_EXPR; TARGET is an optional place for us to store
4980 the results. */
4982 static rtx
4983 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
4984 rtx target)
4986 rtx val, mem;
4988 /* Expand the operands. */
4989 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
4990 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
4992 return expand_sync_lock_test_and_set (target, mem, val);
4995 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
4997 static void
4998 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5000 rtx mem;
5002 /* Expand the operands. */
5003 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5005 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5008 /* Given an integer representing an ``enum memmodel'', verify its
5009 correctness and return the memory model enum. */
5011 static enum memmodel
5012 get_memmodel (tree exp)
5014 rtx op;
5015 unsigned HOST_WIDE_INT val;
5016 source_location loc
5017 = expansion_point_location_if_in_system_header (input_location);
5019 /* If the parameter is not a constant, it's a run time value so we'll just
5020 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5021 if (TREE_CODE (exp) != INTEGER_CST)
5022 return MEMMODEL_SEQ_CST;
5024 op = expand_normal (exp);
5026 val = INTVAL (op);
5027 if (targetm.memmodel_check)
5028 val = targetm.memmodel_check (val);
5029 else if (val & ~MEMMODEL_MASK)
5031 warning_at (loc, OPT_Winvalid_memory_model,
5032 "unknown architecture specifier in memory model to builtin");
5033 return MEMMODEL_SEQ_CST;
5036 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5037 if (memmodel_base (val) >= MEMMODEL_LAST)
5039 warning_at (loc, OPT_Winvalid_memory_model,
5040 "invalid memory model argument to builtin");
5041 return MEMMODEL_SEQ_CST;
5044 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5045 be conservative and promote consume to acquire. */
5046 if (val == MEMMODEL_CONSUME)
5047 val = MEMMODEL_ACQUIRE;
5049 return (enum memmodel) val;
5052 /* Expand the __atomic_exchange intrinsic:
5053 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5054 EXP is the CALL_EXPR.
5055 TARGET is an optional place for us to store the results. */
5057 static rtx
5058 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5060 rtx val, mem;
5061 enum memmodel model;
5063 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5065 if (!flag_inline_atomics)
5066 return NULL_RTX;
5068 /* Expand the operands. */
5069 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5070 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5072 return expand_atomic_exchange (target, mem, val, model);
5075 /* Expand the __atomic_compare_exchange intrinsic:
5076 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5077 TYPE desired, BOOL weak,
5078 enum memmodel success,
5079 enum memmodel failure)
5080 EXP is the CALL_EXPR.
5081 TARGET is an optional place for us to store the results. */
5083 static rtx
5084 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5085 rtx target)
5087 rtx expect, desired, mem, oldval;
5088 rtx_code_label *label;
5089 enum memmodel success, failure;
5090 tree weak;
5091 bool is_weak;
5092 source_location loc
5093 = expansion_point_location_if_in_system_header (input_location);
5095 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5096 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5098 if (failure > success)
5100 warning_at (loc, OPT_Winvalid_memory_model,
5101 "failure memory model cannot be stronger than success "
5102 "memory model for %<__atomic_compare_exchange%>");
5103 success = MEMMODEL_SEQ_CST;
5106 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5108 warning_at (loc, OPT_Winvalid_memory_model,
5109 "invalid failure memory model for "
5110 "%<__atomic_compare_exchange%>");
5111 failure = MEMMODEL_SEQ_CST;
5112 success = MEMMODEL_SEQ_CST;
5116 if (!flag_inline_atomics)
5117 return NULL_RTX;
5119 /* Expand the operands. */
5120 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5122 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5123 expect = convert_memory_address (Pmode, expect);
5124 expect = gen_rtx_MEM (mode, expect);
5125 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5127 weak = CALL_EXPR_ARG (exp, 3);
5128 is_weak = false;
5129 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5130 is_weak = true;
5132 if (target == const0_rtx)
5133 target = NULL;
5135 /* Lest the rtl backend create a race condition with an imporoper store
5136 to memory, always create a new pseudo for OLDVAL. */
5137 oldval = NULL;
5139 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5140 is_weak, success, failure))
5141 return NULL_RTX;
5143 /* Conditionally store back to EXPECT, lest we create a race condition
5144 with an improper store to memory. */
5145 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5146 the normal case where EXPECT is totally private, i.e. a register. At
5147 which point the store can be unconditional. */
5148 label = gen_label_rtx ();
5149 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5150 GET_MODE (target), 1, label);
5151 emit_move_insn (expect, oldval);
5152 emit_label (label);
5154 return target;
5157 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5158 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5159 call. The weak parameter must be dropped to match the expected parameter
5160 list and the expected argument changed from value to pointer to memory
5161 slot. */
5163 static void
5164 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5166 unsigned int z;
5167 vec<tree, va_gc> *vec;
5169 vec_alloc (vec, 5);
5170 vec->quick_push (gimple_call_arg (call, 0));
5171 tree expected = gimple_call_arg (call, 1);
5172 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5173 TREE_TYPE (expected));
5174 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5175 if (expd != x)
5176 emit_move_insn (x, expd);
5177 tree v = make_tree (TREE_TYPE (expected), x);
5178 vec->quick_push (build1 (ADDR_EXPR,
5179 build_pointer_type (TREE_TYPE (expected)), v));
5180 vec->quick_push (gimple_call_arg (call, 2));
5181 /* Skip the boolean weak parameter. */
5182 for (z = 4; z < 6; z++)
5183 vec->quick_push (gimple_call_arg (call, z));
5184 built_in_function fncode
5185 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5186 + exact_log2 (GET_MODE_SIZE (mode)));
5187 tree fndecl = builtin_decl_explicit (fncode);
5188 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
5189 fndecl);
5190 tree exp = build_call_vec (boolean_type_node, fn, vec);
5191 tree lhs = gimple_call_lhs (call);
5192 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
5193 if (lhs)
5195 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5196 if (GET_MODE (boolret) != mode)
5197 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5198 x = force_reg (mode, x);
5199 write_complex_part (target, boolret, true);
5200 write_complex_part (target, x, false);
5204 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
5206 void
5207 expand_ifn_atomic_compare_exchange (gcall *call)
5209 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
5210 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
5211 machine_mode mode = mode_for_size (BITS_PER_UNIT * size, MODE_INT, 0);
5212 rtx expect, desired, mem, oldval, boolret;
5213 enum memmodel success, failure;
5214 tree lhs;
5215 bool is_weak;
5216 source_location loc
5217 = expansion_point_location_if_in_system_header (gimple_location (call));
5219 success = get_memmodel (gimple_call_arg (call, 4));
5220 failure = get_memmodel (gimple_call_arg (call, 5));
5222 if (failure > success)
5224 warning_at (loc, OPT_Winvalid_memory_model,
5225 "failure memory model cannot be stronger than success "
5226 "memory model for %<__atomic_compare_exchange%>");
5227 success = MEMMODEL_SEQ_CST;
5230 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5232 warning_at (loc, OPT_Winvalid_memory_model,
5233 "invalid failure memory model for "
5234 "%<__atomic_compare_exchange%>");
5235 failure = MEMMODEL_SEQ_CST;
5236 success = MEMMODEL_SEQ_CST;
5239 if (!flag_inline_atomics)
5241 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5242 return;
5245 /* Expand the operands. */
5246 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
5248 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
5249 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
5251 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
5253 boolret = NULL;
5254 oldval = NULL;
5256 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
5257 is_weak, success, failure))
5259 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5260 return;
5263 lhs = gimple_call_lhs (call);
5264 if (lhs)
5266 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5267 if (GET_MODE (boolret) != mode)
5268 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5269 write_complex_part (target, boolret, true);
5270 write_complex_part (target, oldval, false);
5274 /* Expand the __atomic_load intrinsic:
5275 TYPE __atomic_load (TYPE *object, enum memmodel)
5276 EXP is the CALL_EXPR.
5277 TARGET is an optional place for us to store the results. */
5279 static rtx
5280 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5282 rtx mem;
5283 enum memmodel model;
5285 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5286 if (is_mm_release (model) || is_mm_acq_rel (model))
5288 source_location loc
5289 = expansion_point_location_if_in_system_header (input_location);
5290 warning_at (loc, OPT_Winvalid_memory_model,
5291 "invalid memory model for %<__atomic_load%>");
5292 model = MEMMODEL_SEQ_CST;
5295 if (!flag_inline_atomics)
5296 return NULL_RTX;
5298 /* Expand the operand. */
5299 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5301 return expand_atomic_load (target, mem, model);
5305 /* Expand the __atomic_store intrinsic:
5306 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5307 EXP is the CALL_EXPR.
5308 TARGET is an optional place for us to store the results. */
5310 static rtx
5311 expand_builtin_atomic_store (machine_mode mode, tree exp)
5313 rtx mem, val;
5314 enum memmodel model;
5316 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5317 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5318 || is_mm_release (model)))
5320 source_location loc
5321 = expansion_point_location_if_in_system_header (input_location);
5322 warning_at (loc, OPT_Winvalid_memory_model,
5323 "invalid memory model for %<__atomic_store%>");
5324 model = MEMMODEL_SEQ_CST;
5327 if (!flag_inline_atomics)
5328 return NULL_RTX;
5330 /* Expand the operands. */
5331 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5332 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5334 return expand_atomic_store (mem, val, model, false);
5337 /* Expand the __atomic_fetch_XXX intrinsic:
5338 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5339 EXP is the CALL_EXPR.
5340 TARGET is an optional place for us to store the results.
5341 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5342 FETCH_AFTER is true if returning the result of the operation.
5343 FETCH_AFTER is false if returning the value before the operation.
5344 IGNORE is true if the result is not used.
5345 EXT_CALL is the correct builtin for an external call if this cannot be
5346 resolved to an instruction sequence. */
5348 static rtx
5349 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5350 enum rtx_code code, bool fetch_after,
5351 bool ignore, enum built_in_function ext_call)
5353 rtx val, mem, ret;
5354 enum memmodel model;
5355 tree fndecl;
5356 tree addr;
5358 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5360 /* Expand the operands. */
5361 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5362 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5364 /* Only try generating instructions if inlining is turned on. */
5365 if (flag_inline_atomics)
5367 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5368 if (ret)
5369 return ret;
5372 /* Return if a different routine isn't needed for the library call. */
5373 if (ext_call == BUILT_IN_NONE)
5374 return NULL_RTX;
5376 /* Change the call to the specified function. */
5377 fndecl = get_callee_fndecl (exp);
5378 addr = CALL_EXPR_FN (exp);
5379 STRIP_NOPS (addr);
5381 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5382 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5384 /* Expand the call here so we can emit trailing code. */
5385 ret = expand_call (exp, target, ignore);
5387 /* Replace the original function just in case it matters. */
5388 TREE_OPERAND (addr, 0) = fndecl;
5390 /* Then issue the arithmetic correction to return the right result. */
5391 if (!ignore)
5393 if (code == NOT)
5395 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5396 OPTAB_LIB_WIDEN);
5397 ret = expand_simple_unop (mode, NOT, ret, target, true);
5399 else
5400 ret = expand_simple_binop (mode, code, ret, val, target, true,
5401 OPTAB_LIB_WIDEN);
5403 return ret;
5406 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
5408 void
5409 expand_ifn_atomic_bit_test_and (gcall *call)
5411 tree ptr = gimple_call_arg (call, 0);
5412 tree bit = gimple_call_arg (call, 1);
5413 tree flag = gimple_call_arg (call, 2);
5414 tree lhs = gimple_call_lhs (call);
5415 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
5416 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
5417 enum rtx_code code;
5418 optab optab;
5419 struct expand_operand ops[5];
5421 gcc_assert (flag_inline_atomics);
5423 if (gimple_call_num_args (call) == 4)
5424 model = get_memmodel (gimple_call_arg (call, 3));
5426 rtx mem = get_builtin_sync_mem (ptr, mode);
5427 rtx val = expand_expr_force_mode (bit, mode);
5429 switch (gimple_call_internal_fn (call))
5431 case IFN_ATOMIC_BIT_TEST_AND_SET:
5432 code = IOR;
5433 optab = atomic_bit_test_and_set_optab;
5434 break;
5435 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
5436 code = XOR;
5437 optab = atomic_bit_test_and_complement_optab;
5438 break;
5439 case IFN_ATOMIC_BIT_TEST_AND_RESET:
5440 code = AND;
5441 optab = atomic_bit_test_and_reset_optab;
5442 break;
5443 default:
5444 gcc_unreachable ();
5447 if (lhs == NULL_TREE)
5449 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
5450 val, NULL_RTX, true, OPTAB_DIRECT);
5451 if (code == AND)
5452 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
5453 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
5454 return;
5457 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5458 enum insn_code icode = direct_optab_handler (optab, mode);
5459 gcc_assert (icode != CODE_FOR_nothing);
5460 create_output_operand (&ops[0], target, mode);
5461 create_fixed_operand (&ops[1], mem);
5462 create_convert_operand_to (&ops[2], val, mode, true);
5463 create_integer_operand (&ops[3], model);
5464 create_integer_operand (&ops[4], integer_onep (flag));
5465 if (maybe_expand_insn (icode, 5, ops))
5466 return;
5468 rtx bitval = val;
5469 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
5470 val, NULL_RTX, true, OPTAB_DIRECT);
5471 rtx maskval = val;
5472 if (code == AND)
5473 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
5474 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
5475 code, model, false);
5476 if (integer_onep (flag))
5478 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
5479 NULL_RTX, true, OPTAB_DIRECT);
5480 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
5481 true, OPTAB_DIRECT);
5483 else
5484 result = expand_simple_binop (mode, AND, result, maskval, target, true,
5485 OPTAB_DIRECT);
5486 if (result != target)
5487 emit_move_insn (target, result);
5490 /* Expand an atomic clear operation.
5491 void _atomic_clear (BOOL *obj, enum memmodel)
5492 EXP is the call expression. */
5494 static rtx
5495 expand_builtin_atomic_clear (tree exp)
5497 machine_mode mode;
5498 rtx mem, ret;
5499 enum memmodel model;
5501 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5502 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5503 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5505 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
5507 source_location loc
5508 = expansion_point_location_if_in_system_header (input_location);
5509 warning_at (loc, OPT_Winvalid_memory_model,
5510 "invalid memory model for %<__atomic_store%>");
5511 model = MEMMODEL_SEQ_CST;
5514 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5515 Failing that, a store is issued by __atomic_store. The only way this can
5516 fail is if the bool type is larger than a word size. Unlikely, but
5517 handle it anyway for completeness. Assume a single threaded model since
5518 there is no atomic support in this case, and no barriers are required. */
5519 ret = expand_atomic_store (mem, const0_rtx, model, true);
5520 if (!ret)
5521 emit_move_insn (mem, const0_rtx);
5522 return const0_rtx;
5525 /* Expand an atomic test_and_set operation.
5526 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5527 EXP is the call expression. */
5529 static rtx
5530 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5532 rtx mem;
5533 enum memmodel model;
5534 machine_mode mode;
5536 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5537 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5538 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5540 return expand_atomic_test_and_set (target, mem, model);
5544 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5545 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5547 static tree
5548 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5550 int size;
5551 machine_mode mode;
5552 unsigned int mode_align, type_align;
5554 if (TREE_CODE (arg0) != INTEGER_CST)
5555 return NULL_TREE;
5557 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5558 mode = mode_for_size (size, MODE_INT, 0);
5559 mode_align = GET_MODE_ALIGNMENT (mode);
5561 if (TREE_CODE (arg1) == INTEGER_CST)
5563 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
5565 /* Either this argument is null, or it's a fake pointer encoding
5566 the alignment of the object. */
5567 val = val & -val;
5568 val *= BITS_PER_UNIT;
5570 if (val == 0 || mode_align < val)
5571 type_align = mode_align;
5572 else
5573 type_align = val;
5575 else
5577 tree ttype = TREE_TYPE (arg1);
5579 /* This function is usually invoked and folded immediately by the front
5580 end before anything else has a chance to look at it. The pointer
5581 parameter at this point is usually cast to a void *, so check for that
5582 and look past the cast. */
5583 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
5584 && VOID_TYPE_P (TREE_TYPE (ttype)))
5585 arg1 = TREE_OPERAND (arg1, 0);
5587 ttype = TREE_TYPE (arg1);
5588 gcc_assert (POINTER_TYPE_P (ttype));
5590 /* Get the underlying type of the object. */
5591 ttype = TREE_TYPE (ttype);
5592 type_align = TYPE_ALIGN (ttype);
5595 /* If the object has smaller alignment, the lock free routines cannot
5596 be used. */
5597 if (type_align < mode_align)
5598 return boolean_false_node;
5600 /* Check if a compare_and_swap pattern exists for the mode which represents
5601 the required size. The pattern is not allowed to fail, so the existence
5602 of the pattern indicates support is present. */
5603 if (can_compare_and_swap_p (mode, true))
5604 return boolean_true_node;
5605 else
5606 return boolean_false_node;
5609 /* Return true if the parameters to call EXP represent an object which will
5610 always generate lock free instructions. The first argument represents the
5611 size of the object, and the second parameter is a pointer to the object
5612 itself. If NULL is passed for the object, then the result is based on
5613 typical alignment for an object of the specified size. Otherwise return
5614 false. */
5616 static rtx
5617 expand_builtin_atomic_always_lock_free (tree exp)
5619 tree size;
5620 tree arg0 = CALL_EXPR_ARG (exp, 0);
5621 tree arg1 = CALL_EXPR_ARG (exp, 1);
5623 if (TREE_CODE (arg0) != INTEGER_CST)
5625 error ("non-constant argument 1 to __atomic_always_lock_free");
5626 return const0_rtx;
5629 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5630 if (size == boolean_true_node)
5631 return const1_rtx;
5632 return const0_rtx;
5635 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5636 is lock free on this architecture. */
5638 static tree
5639 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5641 if (!flag_inline_atomics)
5642 return NULL_TREE;
5644 /* If it isn't always lock free, don't generate a result. */
5645 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5646 return boolean_true_node;
5648 return NULL_TREE;
5651 /* Return true if the parameters to call EXP represent an object which will
5652 always generate lock free instructions. The first argument represents the
5653 size of the object, and the second parameter is a pointer to the object
5654 itself. If NULL is passed for the object, then the result is based on
5655 typical alignment for an object of the specified size. Otherwise return
5656 NULL*/
5658 static rtx
5659 expand_builtin_atomic_is_lock_free (tree exp)
5661 tree size;
5662 tree arg0 = CALL_EXPR_ARG (exp, 0);
5663 tree arg1 = CALL_EXPR_ARG (exp, 1);
5665 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5667 error ("non-integer argument 1 to __atomic_is_lock_free");
5668 return NULL_RTX;
5671 if (!flag_inline_atomics)
5672 return NULL_RTX;
5674 /* If the value is known at compile time, return the RTX for it. */
5675 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5676 if (size == boolean_true_node)
5677 return const1_rtx;
5679 return NULL_RTX;
5682 /* Expand the __atomic_thread_fence intrinsic:
5683 void __atomic_thread_fence (enum memmodel)
5684 EXP is the CALL_EXPR. */
5686 static void
5687 expand_builtin_atomic_thread_fence (tree exp)
5689 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5690 expand_mem_thread_fence (model);
5693 /* Expand the __atomic_signal_fence intrinsic:
5694 void __atomic_signal_fence (enum memmodel)
5695 EXP is the CALL_EXPR. */
5697 static void
5698 expand_builtin_atomic_signal_fence (tree exp)
5700 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5701 expand_mem_signal_fence (model);
5704 /* Expand the __sync_synchronize intrinsic. */
5706 static void
5707 expand_builtin_sync_synchronize (void)
5709 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
5712 static rtx
5713 expand_builtin_thread_pointer (tree exp, rtx target)
5715 enum insn_code icode;
5716 if (!validate_arglist (exp, VOID_TYPE))
5717 return const0_rtx;
5718 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5719 if (icode != CODE_FOR_nothing)
5721 struct expand_operand op;
5722 /* If the target is not sutitable then create a new target. */
5723 if (target == NULL_RTX
5724 || !REG_P (target)
5725 || GET_MODE (target) != Pmode)
5726 target = gen_reg_rtx (Pmode);
5727 create_output_operand (&op, target, Pmode);
5728 expand_insn (icode, 1, &op);
5729 return target;
5731 error ("__builtin_thread_pointer is not supported on this target");
5732 return const0_rtx;
5735 static void
5736 expand_builtin_set_thread_pointer (tree exp)
5738 enum insn_code icode;
5739 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5740 return;
5741 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5742 if (icode != CODE_FOR_nothing)
5744 struct expand_operand op;
5745 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5746 Pmode, EXPAND_NORMAL);
5747 create_input_operand (&op, val, Pmode);
5748 expand_insn (icode, 1, &op);
5749 return;
5751 error ("__builtin_set_thread_pointer is not supported on this target");
5755 /* Emit code to restore the current value of stack. */
5757 static void
5758 expand_stack_restore (tree var)
5760 rtx_insn *prev;
5761 rtx sa = expand_normal (var);
5763 sa = convert_memory_address (Pmode, sa);
5765 prev = get_last_insn ();
5766 emit_stack_restore (SAVE_BLOCK, sa);
5768 record_new_stack_level ();
5770 fixup_args_size_notes (prev, get_last_insn (), 0);
5773 /* Emit code to save the current value of stack. */
5775 static rtx
5776 expand_stack_save (void)
5778 rtx ret = NULL_RTX;
5780 emit_stack_save (SAVE_BLOCK, &ret);
5781 return ret;
5785 /* Expand an expression EXP that calls a built-in function,
5786 with result going to TARGET if that's convenient
5787 (and in mode MODE if that's convenient).
5788 SUBTARGET may be used as the target for computing one of EXP's operands.
5789 IGNORE is nonzero if the value is to be ignored. */
5792 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5793 int ignore)
5795 tree fndecl = get_callee_fndecl (exp);
5796 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5797 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5798 int flags;
5800 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5801 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5803 /* When ASan is enabled, we don't want to expand some memory/string
5804 builtins and rely on libsanitizer's hooks. This allows us to avoid
5805 redundant checks and be sure, that possible overflow will be detected
5806 by ASan. */
5808 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5809 return expand_call (exp, target, ignore);
5811 /* When not optimizing, generate calls to library functions for a certain
5812 set of builtins. */
5813 if (!optimize
5814 && !called_as_built_in (fndecl)
5815 && fcode != BUILT_IN_FORK
5816 && fcode != BUILT_IN_EXECL
5817 && fcode != BUILT_IN_EXECV
5818 && fcode != BUILT_IN_EXECLP
5819 && fcode != BUILT_IN_EXECLE
5820 && fcode != BUILT_IN_EXECVP
5821 && fcode != BUILT_IN_EXECVE
5822 && fcode != BUILT_IN_ALLOCA
5823 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5824 && fcode != BUILT_IN_FREE
5825 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5826 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5827 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5828 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5829 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5830 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5831 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5832 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5833 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5834 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5835 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5836 && fcode != BUILT_IN_CHKP_BNDRET)
5837 return expand_call (exp, target, ignore);
5839 /* The built-in function expanders test for target == const0_rtx
5840 to determine whether the function's result will be ignored. */
5841 if (ignore)
5842 target = const0_rtx;
5844 /* If the result of a pure or const built-in function is ignored, and
5845 none of its arguments are volatile, we can avoid expanding the
5846 built-in call and just evaluate the arguments for side-effects. */
5847 if (target == const0_rtx
5848 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5849 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5851 bool volatilep = false;
5852 tree arg;
5853 call_expr_arg_iterator iter;
5855 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5856 if (TREE_THIS_VOLATILE (arg))
5858 volatilep = true;
5859 break;
5862 if (! volatilep)
5864 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5865 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5866 return const0_rtx;
5870 /* expand_builtin_with_bounds is supposed to be used for
5871 instrumented builtin calls. */
5872 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
5874 switch (fcode)
5876 CASE_FLT_FN (BUILT_IN_FABS):
5877 case BUILT_IN_FABSD32:
5878 case BUILT_IN_FABSD64:
5879 case BUILT_IN_FABSD128:
5880 target = expand_builtin_fabs (exp, target, subtarget);
5881 if (target)
5882 return target;
5883 break;
5885 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5886 target = expand_builtin_copysign (exp, target, subtarget);
5887 if (target)
5888 return target;
5889 break;
5891 /* Just do a normal library call if we were unable to fold
5892 the values. */
5893 CASE_FLT_FN (BUILT_IN_CABS):
5894 break;
5896 CASE_FLT_FN (BUILT_IN_FMA):
5897 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5898 if (target)
5899 return target;
5900 break;
5902 CASE_FLT_FN (BUILT_IN_ILOGB):
5903 if (! flag_unsafe_math_optimizations)
5904 break;
5905 CASE_FLT_FN (BUILT_IN_ISINF):
5906 CASE_FLT_FN (BUILT_IN_FINITE):
5907 case BUILT_IN_ISFINITE:
5908 case BUILT_IN_ISNORMAL:
5909 target = expand_builtin_interclass_mathfn (exp, target);
5910 if (target)
5911 return target;
5912 break;
5914 CASE_FLT_FN (BUILT_IN_ICEIL):
5915 CASE_FLT_FN (BUILT_IN_LCEIL):
5916 CASE_FLT_FN (BUILT_IN_LLCEIL):
5917 CASE_FLT_FN (BUILT_IN_LFLOOR):
5918 CASE_FLT_FN (BUILT_IN_IFLOOR):
5919 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5920 target = expand_builtin_int_roundingfn (exp, target);
5921 if (target)
5922 return target;
5923 break;
5925 CASE_FLT_FN (BUILT_IN_IRINT):
5926 CASE_FLT_FN (BUILT_IN_LRINT):
5927 CASE_FLT_FN (BUILT_IN_LLRINT):
5928 CASE_FLT_FN (BUILT_IN_IROUND):
5929 CASE_FLT_FN (BUILT_IN_LROUND):
5930 CASE_FLT_FN (BUILT_IN_LLROUND):
5931 target = expand_builtin_int_roundingfn_2 (exp, target);
5932 if (target)
5933 return target;
5934 break;
5936 CASE_FLT_FN (BUILT_IN_POWI):
5937 target = expand_builtin_powi (exp, target);
5938 if (target)
5939 return target;
5940 break;
5942 CASE_FLT_FN (BUILT_IN_CEXPI):
5943 target = expand_builtin_cexpi (exp, target);
5944 gcc_assert (target);
5945 return target;
5947 CASE_FLT_FN (BUILT_IN_SIN):
5948 CASE_FLT_FN (BUILT_IN_COS):
5949 if (! flag_unsafe_math_optimizations)
5950 break;
5951 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5952 if (target)
5953 return target;
5954 break;
5956 CASE_FLT_FN (BUILT_IN_SINCOS):
5957 if (! flag_unsafe_math_optimizations)
5958 break;
5959 target = expand_builtin_sincos (exp);
5960 if (target)
5961 return target;
5962 break;
5964 case BUILT_IN_APPLY_ARGS:
5965 return expand_builtin_apply_args ();
5967 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5968 FUNCTION with a copy of the parameters described by
5969 ARGUMENTS, and ARGSIZE. It returns a block of memory
5970 allocated on the stack into which is stored all the registers
5971 that might possibly be used for returning the result of a
5972 function. ARGUMENTS is the value returned by
5973 __builtin_apply_args. ARGSIZE is the number of bytes of
5974 arguments that must be copied. ??? How should this value be
5975 computed? We'll also need a safe worst case value for varargs
5976 functions. */
5977 case BUILT_IN_APPLY:
5978 if (!validate_arglist (exp, POINTER_TYPE,
5979 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5980 && !validate_arglist (exp, REFERENCE_TYPE,
5981 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5982 return const0_rtx;
5983 else
5985 rtx ops[3];
5987 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5988 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5989 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5991 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5994 /* __builtin_return (RESULT) causes the function to return the
5995 value described by RESULT. RESULT is address of the block of
5996 memory returned by __builtin_apply. */
5997 case BUILT_IN_RETURN:
5998 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5999 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6000 return const0_rtx;
6002 case BUILT_IN_SAVEREGS:
6003 return expand_builtin_saveregs ();
6005 case BUILT_IN_VA_ARG_PACK:
6006 /* All valid uses of __builtin_va_arg_pack () are removed during
6007 inlining. */
6008 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6009 return const0_rtx;
6011 case BUILT_IN_VA_ARG_PACK_LEN:
6012 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6013 inlining. */
6014 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6015 return const0_rtx;
6017 /* Return the address of the first anonymous stack arg. */
6018 case BUILT_IN_NEXT_ARG:
6019 if (fold_builtin_next_arg (exp, false))
6020 return const0_rtx;
6021 return expand_builtin_next_arg ();
6023 case BUILT_IN_CLEAR_CACHE:
6024 target = expand_builtin___clear_cache (exp);
6025 if (target)
6026 return target;
6027 break;
6029 case BUILT_IN_CLASSIFY_TYPE:
6030 return expand_builtin_classify_type (exp);
6032 case BUILT_IN_CONSTANT_P:
6033 return const0_rtx;
6035 case BUILT_IN_FRAME_ADDRESS:
6036 case BUILT_IN_RETURN_ADDRESS:
6037 return expand_builtin_frame_address (fndecl, exp);
6039 /* Returns the address of the area where the structure is returned.
6040 0 otherwise. */
6041 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6042 if (call_expr_nargs (exp) != 0
6043 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6044 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6045 return const0_rtx;
6046 else
6047 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6049 case BUILT_IN_ALLOCA:
6050 case BUILT_IN_ALLOCA_WITH_ALIGN:
6051 /* If the allocation stems from the declaration of a variable-sized
6052 object, it cannot accumulate. */
6053 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6054 if (target)
6055 return target;
6056 break;
6058 case BUILT_IN_STACK_SAVE:
6059 return expand_stack_save ();
6061 case BUILT_IN_STACK_RESTORE:
6062 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6063 return const0_rtx;
6065 case BUILT_IN_BSWAP16:
6066 case BUILT_IN_BSWAP32:
6067 case BUILT_IN_BSWAP64:
6068 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6069 if (target)
6070 return target;
6071 break;
6073 CASE_INT_FN (BUILT_IN_FFS):
6074 target = expand_builtin_unop (target_mode, exp, target,
6075 subtarget, ffs_optab);
6076 if (target)
6077 return target;
6078 break;
6080 CASE_INT_FN (BUILT_IN_CLZ):
6081 target = expand_builtin_unop (target_mode, exp, target,
6082 subtarget, clz_optab);
6083 if (target)
6084 return target;
6085 break;
6087 CASE_INT_FN (BUILT_IN_CTZ):
6088 target = expand_builtin_unop (target_mode, exp, target,
6089 subtarget, ctz_optab);
6090 if (target)
6091 return target;
6092 break;
6094 CASE_INT_FN (BUILT_IN_CLRSB):
6095 target = expand_builtin_unop (target_mode, exp, target,
6096 subtarget, clrsb_optab);
6097 if (target)
6098 return target;
6099 break;
6101 CASE_INT_FN (BUILT_IN_POPCOUNT):
6102 target = expand_builtin_unop (target_mode, exp, target,
6103 subtarget, popcount_optab);
6104 if (target)
6105 return target;
6106 break;
6108 CASE_INT_FN (BUILT_IN_PARITY):
6109 target = expand_builtin_unop (target_mode, exp, target,
6110 subtarget, parity_optab);
6111 if (target)
6112 return target;
6113 break;
6115 case BUILT_IN_STRLEN:
6116 target = expand_builtin_strlen (exp, target, target_mode);
6117 if (target)
6118 return target;
6119 break;
6121 case BUILT_IN_STRCPY:
6122 target = expand_builtin_strcpy (exp, target);
6123 if (target)
6124 return target;
6125 break;
6127 case BUILT_IN_STRNCPY:
6128 target = expand_builtin_strncpy (exp, target);
6129 if (target)
6130 return target;
6131 break;
6133 case BUILT_IN_STPCPY:
6134 target = expand_builtin_stpcpy (exp, target, mode);
6135 if (target)
6136 return target;
6137 break;
6139 case BUILT_IN_MEMCPY:
6140 target = expand_builtin_memcpy (exp, target);
6141 if (target)
6142 return target;
6143 break;
6145 case BUILT_IN_MEMPCPY:
6146 target = expand_builtin_mempcpy (exp, target, mode);
6147 if (target)
6148 return target;
6149 break;
6151 case BUILT_IN_MEMSET:
6152 target = expand_builtin_memset (exp, target, mode);
6153 if (target)
6154 return target;
6155 break;
6157 case BUILT_IN_BZERO:
6158 target = expand_builtin_bzero (exp);
6159 if (target)
6160 return target;
6161 break;
6163 case BUILT_IN_STRCMP:
6164 target = expand_builtin_strcmp (exp, target);
6165 if (target)
6166 return target;
6167 break;
6169 case BUILT_IN_STRNCMP:
6170 target = expand_builtin_strncmp (exp, target, mode);
6171 if (target)
6172 return target;
6173 break;
6175 case BUILT_IN_BCMP:
6176 case BUILT_IN_MEMCMP:
6177 case BUILT_IN_MEMCMP_EQ:
6178 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
6179 if (target)
6180 return target;
6181 if (fcode == BUILT_IN_MEMCMP_EQ)
6183 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
6184 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
6186 break;
6188 case BUILT_IN_SETJMP:
6189 /* This should have been lowered to the builtins below. */
6190 gcc_unreachable ();
6192 case BUILT_IN_SETJMP_SETUP:
6193 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6194 and the receiver label. */
6195 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6197 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6198 VOIDmode, EXPAND_NORMAL);
6199 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6200 rtx_insn *label_r = label_rtx (label);
6202 /* This is copied from the handling of non-local gotos. */
6203 expand_builtin_setjmp_setup (buf_addr, label_r);
6204 nonlocal_goto_handler_labels
6205 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6206 nonlocal_goto_handler_labels);
6207 /* ??? Do not let expand_label treat us as such since we would
6208 not want to be both on the list of non-local labels and on
6209 the list of forced labels. */
6210 FORCED_LABEL (label) = 0;
6211 return const0_rtx;
6213 break;
6215 case BUILT_IN_SETJMP_RECEIVER:
6216 /* __builtin_setjmp_receiver is passed the receiver label. */
6217 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6219 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6220 rtx_insn *label_r = label_rtx (label);
6222 expand_builtin_setjmp_receiver (label_r);
6223 return const0_rtx;
6225 break;
6227 /* __builtin_longjmp is passed a pointer to an array of five words.
6228 It's similar to the C library longjmp function but works with
6229 __builtin_setjmp above. */
6230 case BUILT_IN_LONGJMP:
6231 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6233 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6234 VOIDmode, EXPAND_NORMAL);
6235 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6237 if (value != const1_rtx)
6239 error ("%<__builtin_longjmp%> second argument must be 1");
6240 return const0_rtx;
6243 expand_builtin_longjmp (buf_addr, value);
6244 return const0_rtx;
6246 break;
6248 case BUILT_IN_NONLOCAL_GOTO:
6249 target = expand_builtin_nonlocal_goto (exp);
6250 if (target)
6251 return target;
6252 break;
6254 /* This updates the setjmp buffer that is its argument with the value
6255 of the current stack pointer. */
6256 case BUILT_IN_UPDATE_SETJMP_BUF:
6257 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6259 rtx buf_addr
6260 = expand_normal (CALL_EXPR_ARG (exp, 0));
6262 expand_builtin_update_setjmp_buf (buf_addr);
6263 return const0_rtx;
6265 break;
6267 case BUILT_IN_TRAP:
6268 expand_builtin_trap ();
6269 return const0_rtx;
6271 case BUILT_IN_UNREACHABLE:
6272 expand_builtin_unreachable ();
6273 return const0_rtx;
6275 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6276 case BUILT_IN_SIGNBITD32:
6277 case BUILT_IN_SIGNBITD64:
6278 case BUILT_IN_SIGNBITD128:
6279 target = expand_builtin_signbit (exp, target);
6280 if (target)
6281 return target;
6282 break;
6284 /* Various hooks for the DWARF 2 __throw routine. */
6285 case BUILT_IN_UNWIND_INIT:
6286 expand_builtin_unwind_init ();
6287 return const0_rtx;
6288 case BUILT_IN_DWARF_CFA:
6289 return virtual_cfa_rtx;
6290 #ifdef DWARF2_UNWIND_INFO
6291 case BUILT_IN_DWARF_SP_COLUMN:
6292 return expand_builtin_dwarf_sp_column ();
6293 case BUILT_IN_INIT_DWARF_REG_SIZES:
6294 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6295 return const0_rtx;
6296 #endif
6297 case BUILT_IN_FROB_RETURN_ADDR:
6298 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6299 case BUILT_IN_EXTRACT_RETURN_ADDR:
6300 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6301 case BUILT_IN_EH_RETURN:
6302 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6303 CALL_EXPR_ARG (exp, 1));
6304 return const0_rtx;
6305 case BUILT_IN_EH_RETURN_DATA_REGNO:
6306 return expand_builtin_eh_return_data_regno (exp);
6307 case BUILT_IN_EXTEND_POINTER:
6308 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6309 case BUILT_IN_EH_POINTER:
6310 return expand_builtin_eh_pointer (exp);
6311 case BUILT_IN_EH_FILTER:
6312 return expand_builtin_eh_filter (exp);
6313 case BUILT_IN_EH_COPY_VALUES:
6314 return expand_builtin_eh_copy_values (exp);
6316 case BUILT_IN_VA_START:
6317 return expand_builtin_va_start (exp);
6318 case BUILT_IN_VA_END:
6319 return expand_builtin_va_end (exp);
6320 case BUILT_IN_VA_COPY:
6321 return expand_builtin_va_copy (exp);
6322 case BUILT_IN_EXPECT:
6323 return expand_builtin_expect (exp, target);
6324 case BUILT_IN_ASSUME_ALIGNED:
6325 return expand_builtin_assume_aligned (exp, target);
6326 case BUILT_IN_PREFETCH:
6327 expand_builtin_prefetch (exp);
6328 return const0_rtx;
6330 case BUILT_IN_INIT_TRAMPOLINE:
6331 return expand_builtin_init_trampoline (exp, true);
6332 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6333 return expand_builtin_init_trampoline (exp, false);
6334 case BUILT_IN_ADJUST_TRAMPOLINE:
6335 return expand_builtin_adjust_trampoline (exp);
6337 case BUILT_IN_FORK:
6338 case BUILT_IN_EXECL:
6339 case BUILT_IN_EXECV:
6340 case BUILT_IN_EXECLP:
6341 case BUILT_IN_EXECLE:
6342 case BUILT_IN_EXECVP:
6343 case BUILT_IN_EXECVE:
6344 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6345 if (target)
6346 return target;
6347 break;
6349 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6350 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6351 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6352 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6353 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6354 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6355 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6356 if (target)
6357 return target;
6358 break;
6360 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6361 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6362 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6363 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6364 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6365 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6366 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6367 if (target)
6368 return target;
6369 break;
6371 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6372 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6373 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6374 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6375 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6376 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6377 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6378 if (target)
6379 return target;
6380 break;
6382 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6383 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6384 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6385 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6386 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6387 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6388 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6389 if (target)
6390 return target;
6391 break;
6393 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6394 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6395 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6396 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6397 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6398 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6399 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6400 if (target)
6401 return target;
6402 break;
6404 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6405 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6406 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6407 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6408 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6409 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6410 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6411 if (target)
6412 return target;
6413 break;
6415 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6416 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6417 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6418 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6419 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6420 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6421 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6422 if (target)
6423 return target;
6424 break;
6426 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6427 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6428 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6429 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6430 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6431 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6432 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6433 if (target)
6434 return target;
6435 break;
6437 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6438 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6439 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6440 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6441 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6442 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6443 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6444 if (target)
6445 return target;
6446 break;
6448 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6449 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6450 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6451 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6452 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6453 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6454 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6455 if (target)
6456 return target;
6457 break;
6459 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6460 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6461 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6462 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6463 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6464 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6465 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6466 if (target)
6467 return target;
6468 break;
6470 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6471 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6472 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6473 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6474 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6475 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6476 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6477 if (target)
6478 return target;
6479 break;
6481 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6482 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6483 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6484 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6485 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6486 if (mode == VOIDmode)
6487 mode = TYPE_MODE (boolean_type_node);
6488 if (!target || !register_operand (target, mode))
6489 target = gen_reg_rtx (mode);
6491 mode = get_builtin_sync_mode
6492 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6493 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6494 if (target)
6495 return target;
6496 break;
6498 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6499 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6500 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6501 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6502 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6503 mode = get_builtin_sync_mode
6504 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6505 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6506 if (target)
6507 return target;
6508 break;
6510 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6511 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6512 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6513 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6514 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6515 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6516 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6517 if (target)
6518 return target;
6519 break;
6521 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6522 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6523 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6524 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6525 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6526 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6527 expand_builtin_sync_lock_release (mode, exp);
6528 return const0_rtx;
6530 case BUILT_IN_SYNC_SYNCHRONIZE:
6531 expand_builtin_sync_synchronize ();
6532 return const0_rtx;
6534 case BUILT_IN_ATOMIC_EXCHANGE_1:
6535 case BUILT_IN_ATOMIC_EXCHANGE_2:
6536 case BUILT_IN_ATOMIC_EXCHANGE_4:
6537 case BUILT_IN_ATOMIC_EXCHANGE_8:
6538 case BUILT_IN_ATOMIC_EXCHANGE_16:
6539 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6540 target = expand_builtin_atomic_exchange (mode, exp, target);
6541 if (target)
6542 return target;
6543 break;
6545 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6546 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6547 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6548 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6549 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6551 unsigned int nargs, z;
6552 vec<tree, va_gc> *vec;
6554 mode =
6555 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6556 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6557 if (target)
6558 return target;
6560 /* If this is turned into an external library call, the weak parameter
6561 must be dropped to match the expected parameter list. */
6562 nargs = call_expr_nargs (exp);
6563 vec_alloc (vec, nargs - 1);
6564 for (z = 0; z < 3; z++)
6565 vec->quick_push (CALL_EXPR_ARG (exp, z));
6566 /* Skip the boolean weak parameter. */
6567 for (z = 4; z < 6; z++)
6568 vec->quick_push (CALL_EXPR_ARG (exp, z));
6569 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6570 break;
6573 case BUILT_IN_ATOMIC_LOAD_1:
6574 case BUILT_IN_ATOMIC_LOAD_2:
6575 case BUILT_IN_ATOMIC_LOAD_4:
6576 case BUILT_IN_ATOMIC_LOAD_8:
6577 case BUILT_IN_ATOMIC_LOAD_16:
6578 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6579 target = expand_builtin_atomic_load (mode, exp, target);
6580 if (target)
6581 return target;
6582 break;
6584 case BUILT_IN_ATOMIC_STORE_1:
6585 case BUILT_IN_ATOMIC_STORE_2:
6586 case BUILT_IN_ATOMIC_STORE_4:
6587 case BUILT_IN_ATOMIC_STORE_8:
6588 case BUILT_IN_ATOMIC_STORE_16:
6589 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6590 target = expand_builtin_atomic_store (mode, exp);
6591 if (target)
6592 return const0_rtx;
6593 break;
6595 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6596 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6597 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6598 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6599 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6601 enum built_in_function lib;
6602 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6603 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6604 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6605 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6606 ignore, lib);
6607 if (target)
6608 return target;
6609 break;
6611 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6612 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6613 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6614 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6615 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6617 enum built_in_function lib;
6618 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6619 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6620 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6621 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6622 ignore, lib);
6623 if (target)
6624 return target;
6625 break;
6627 case BUILT_IN_ATOMIC_AND_FETCH_1:
6628 case BUILT_IN_ATOMIC_AND_FETCH_2:
6629 case BUILT_IN_ATOMIC_AND_FETCH_4:
6630 case BUILT_IN_ATOMIC_AND_FETCH_8:
6631 case BUILT_IN_ATOMIC_AND_FETCH_16:
6633 enum built_in_function lib;
6634 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6635 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6636 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6637 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6638 ignore, lib);
6639 if (target)
6640 return target;
6641 break;
6643 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6644 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6645 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6646 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6647 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6649 enum built_in_function lib;
6650 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6651 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6652 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6653 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6654 ignore, lib);
6655 if (target)
6656 return target;
6657 break;
6659 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6660 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6661 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6662 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6663 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6665 enum built_in_function lib;
6666 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6667 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6668 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6669 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6670 ignore, lib);
6671 if (target)
6672 return target;
6673 break;
6675 case BUILT_IN_ATOMIC_OR_FETCH_1:
6676 case BUILT_IN_ATOMIC_OR_FETCH_2:
6677 case BUILT_IN_ATOMIC_OR_FETCH_4:
6678 case BUILT_IN_ATOMIC_OR_FETCH_8:
6679 case BUILT_IN_ATOMIC_OR_FETCH_16:
6681 enum built_in_function lib;
6682 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6683 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6684 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6685 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6686 ignore, lib);
6687 if (target)
6688 return target;
6689 break;
6691 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6692 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6693 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6694 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6695 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6696 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6697 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6698 ignore, BUILT_IN_NONE);
6699 if (target)
6700 return target;
6701 break;
6703 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6704 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6705 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6706 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6707 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6708 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6709 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6710 ignore, BUILT_IN_NONE);
6711 if (target)
6712 return target;
6713 break;
6715 case BUILT_IN_ATOMIC_FETCH_AND_1:
6716 case BUILT_IN_ATOMIC_FETCH_AND_2:
6717 case BUILT_IN_ATOMIC_FETCH_AND_4:
6718 case BUILT_IN_ATOMIC_FETCH_AND_8:
6719 case BUILT_IN_ATOMIC_FETCH_AND_16:
6720 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6721 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6722 ignore, BUILT_IN_NONE);
6723 if (target)
6724 return target;
6725 break;
6727 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6728 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6729 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6730 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6731 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6732 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6733 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6734 ignore, BUILT_IN_NONE);
6735 if (target)
6736 return target;
6737 break;
6739 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6740 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6741 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6742 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6743 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6744 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6745 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6746 ignore, BUILT_IN_NONE);
6747 if (target)
6748 return target;
6749 break;
6751 case BUILT_IN_ATOMIC_FETCH_OR_1:
6752 case BUILT_IN_ATOMIC_FETCH_OR_2:
6753 case BUILT_IN_ATOMIC_FETCH_OR_4:
6754 case BUILT_IN_ATOMIC_FETCH_OR_8:
6755 case BUILT_IN_ATOMIC_FETCH_OR_16:
6756 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6757 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6758 ignore, BUILT_IN_NONE);
6759 if (target)
6760 return target;
6761 break;
6763 case BUILT_IN_ATOMIC_TEST_AND_SET:
6764 return expand_builtin_atomic_test_and_set (exp, target);
6766 case BUILT_IN_ATOMIC_CLEAR:
6767 return expand_builtin_atomic_clear (exp);
6769 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6770 return expand_builtin_atomic_always_lock_free (exp);
6772 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6773 target = expand_builtin_atomic_is_lock_free (exp);
6774 if (target)
6775 return target;
6776 break;
6778 case BUILT_IN_ATOMIC_THREAD_FENCE:
6779 expand_builtin_atomic_thread_fence (exp);
6780 return const0_rtx;
6782 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6783 expand_builtin_atomic_signal_fence (exp);
6784 return const0_rtx;
6786 case BUILT_IN_OBJECT_SIZE:
6787 return expand_builtin_object_size (exp);
6789 case BUILT_IN_MEMCPY_CHK:
6790 case BUILT_IN_MEMPCPY_CHK:
6791 case BUILT_IN_MEMMOVE_CHK:
6792 case BUILT_IN_MEMSET_CHK:
6793 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6794 if (target)
6795 return target;
6796 break;
6798 case BUILT_IN_STRCPY_CHK:
6799 case BUILT_IN_STPCPY_CHK:
6800 case BUILT_IN_STRNCPY_CHK:
6801 case BUILT_IN_STPNCPY_CHK:
6802 case BUILT_IN_STRCAT_CHK:
6803 case BUILT_IN_STRNCAT_CHK:
6804 case BUILT_IN_SNPRINTF_CHK:
6805 case BUILT_IN_VSNPRINTF_CHK:
6806 maybe_emit_chk_warning (exp, fcode);
6807 break;
6809 case BUILT_IN_SPRINTF_CHK:
6810 case BUILT_IN_VSPRINTF_CHK:
6811 maybe_emit_sprintf_chk_warning (exp, fcode);
6812 break;
6814 case BUILT_IN_FREE:
6815 if (warn_free_nonheap_object)
6816 maybe_emit_free_warning (exp);
6817 break;
6819 case BUILT_IN_THREAD_POINTER:
6820 return expand_builtin_thread_pointer (exp, target);
6822 case BUILT_IN_SET_THREAD_POINTER:
6823 expand_builtin_set_thread_pointer (exp);
6824 return const0_rtx;
6826 case BUILT_IN_CILK_DETACH:
6827 expand_builtin_cilk_detach (exp);
6828 return const0_rtx;
6830 case BUILT_IN_CILK_POP_FRAME:
6831 expand_builtin_cilk_pop_frame (exp);
6832 return const0_rtx;
6834 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6835 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6836 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6837 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6838 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6839 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6840 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6841 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6842 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6843 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6844 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6845 /* We allow user CHKP builtins if Pointer Bounds
6846 Checker is off. */
6847 if (!chkp_function_instrumented_p (current_function_decl))
6849 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
6850 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6851 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
6852 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
6853 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
6854 return expand_normal (CALL_EXPR_ARG (exp, 0));
6855 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
6856 return expand_normal (size_zero_node);
6857 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
6858 return expand_normal (size_int (-1));
6859 else
6860 return const0_rtx;
6862 /* FALLTHROUGH */
6864 case BUILT_IN_CHKP_BNDMK:
6865 case BUILT_IN_CHKP_BNDSTX:
6866 case BUILT_IN_CHKP_BNDCL:
6867 case BUILT_IN_CHKP_BNDCU:
6868 case BUILT_IN_CHKP_BNDLDX:
6869 case BUILT_IN_CHKP_BNDRET:
6870 case BUILT_IN_CHKP_INTERSECT:
6871 case BUILT_IN_CHKP_NARROW:
6872 case BUILT_IN_CHKP_EXTRACT_LOWER:
6873 case BUILT_IN_CHKP_EXTRACT_UPPER:
6874 /* Software implementation of Pointer Bounds Checker is NYI.
6875 Target support is required. */
6876 error ("Your target platform does not support -fcheck-pointer-bounds");
6877 break;
6879 case BUILT_IN_ACC_ON_DEVICE:
6880 /* Do library call, if we failed to expand the builtin when
6881 folding. */
6882 break;
6884 default: /* just do library call, if unknown builtin */
6885 break;
6888 /* The switch statement above can drop through to cause the function
6889 to be called normally. */
6890 return expand_call (exp, target, ignore);
6893 /* Similar to expand_builtin but is used for instrumented calls. */
6896 expand_builtin_with_bounds (tree exp, rtx target,
6897 rtx subtarget ATTRIBUTE_UNUSED,
6898 machine_mode mode, int ignore)
6900 tree fndecl = get_callee_fndecl (exp);
6901 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6903 gcc_assert (CALL_WITH_BOUNDS_P (exp));
6905 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6906 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6908 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
6909 && fcode < END_CHKP_BUILTINS);
6911 switch (fcode)
6913 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
6914 target = expand_builtin_memcpy_with_bounds (exp, target);
6915 if (target)
6916 return target;
6917 break;
6919 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
6920 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
6921 if (target)
6922 return target;
6923 break;
6925 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
6926 target = expand_builtin_memset_with_bounds (exp, target, mode);
6927 if (target)
6928 return target;
6929 break;
6931 default:
6932 break;
6935 /* The switch statement above can drop through to cause the function
6936 to be called normally. */
6937 return expand_call (exp, target, ignore);
6940 /* Determine whether a tree node represents a call to a built-in
6941 function. If the tree T is a call to a built-in function with
6942 the right number of arguments of the appropriate types, return
6943 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6944 Otherwise the return value is END_BUILTINS. */
6946 enum built_in_function
6947 builtin_mathfn_code (const_tree t)
6949 const_tree fndecl, arg, parmlist;
6950 const_tree argtype, parmtype;
6951 const_call_expr_arg_iterator iter;
6953 if (TREE_CODE (t) != CALL_EXPR
6954 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6955 return END_BUILTINS;
6957 fndecl = get_callee_fndecl (t);
6958 if (fndecl == NULL_TREE
6959 || TREE_CODE (fndecl) != FUNCTION_DECL
6960 || ! DECL_BUILT_IN (fndecl)
6961 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6962 return END_BUILTINS;
6964 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6965 init_const_call_expr_arg_iterator (t, &iter);
6966 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6968 /* If a function doesn't take a variable number of arguments,
6969 the last element in the list will have type `void'. */
6970 parmtype = TREE_VALUE (parmlist);
6971 if (VOID_TYPE_P (parmtype))
6973 if (more_const_call_expr_args_p (&iter))
6974 return END_BUILTINS;
6975 return DECL_FUNCTION_CODE (fndecl);
6978 if (! more_const_call_expr_args_p (&iter))
6979 return END_BUILTINS;
6981 arg = next_const_call_expr_arg (&iter);
6982 argtype = TREE_TYPE (arg);
6984 if (SCALAR_FLOAT_TYPE_P (parmtype))
6986 if (! SCALAR_FLOAT_TYPE_P (argtype))
6987 return END_BUILTINS;
6989 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6991 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6992 return END_BUILTINS;
6994 else if (POINTER_TYPE_P (parmtype))
6996 if (! POINTER_TYPE_P (argtype))
6997 return END_BUILTINS;
6999 else if (INTEGRAL_TYPE_P (parmtype))
7001 if (! INTEGRAL_TYPE_P (argtype))
7002 return END_BUILTINS;
7004 else
7005 return END_BUILTINS;
7008 /* Variable-length argument list. */
7009 return DECL_FUNCTION_CODE (fndecl);
7012 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7013 evaluate to a constant. */
7015 static tree
7016 fold_builtin_constant_p (tree arg)
7018 /* We return 1 for a numeric type that's known to be a constant
7019 value at compile-time or for an aggregate type that's a
7020 literal constant. */
7021 STRIP_NOPS (arg);
7023 /* If we know this is a constant, emit the constant of one. */
7024 if (CONSTANT_CLASS_P (arg)
7025 || (TREE_CODE (arg) == CONSTRUCTOR
7026 && TREE_CONSTANT (arg)))
7027 return integer_one_node;
7028 if (TREE_CODE (arg) == ADDR_EXPR)
7030 tree op = TREE_OPERAND (arg, 0);
7031 if (TREE_CODE (op) == STRING_CST
7032 || (TREE_CODE (op) == ARRAY_REF
7033 && integer_zerop (TREE_OPERAND (op, 1))
7034 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7035 return integer_one_node;
7038 /* If this expression has side effects, show we don't know it to be a
7039 constant. Likewise if it's a pointer or aggregate type since in
7040 those case we only want literals, since those are only optimized
7041 when generating RTL, not later.
7042 And finally, if we are compiling an initializer, not code, we
7043 need to return a definite result now; there's not going to be any
7044 more optimization done. */
7045 if (TREE_SIDE_EFFECTS (arg)
7046 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7047 || POINTER_TYPE_P (TREE_TYPE (arg))
7048 || cfun == 0
7049 || folding_initializer
7050 || force_folding_builtin_constant_p)
7051 return integer_zero_node;
7053 return NULL_TREE;
7056 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7057 return it as a truthvalue. */
7059 static tree
7060 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7061 tree predictor)
7063 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7065 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7066 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7067 ret_type = TREE_TYPE (TREE_TYPE (fn));
7068 pred_type = TREE_VALUE (arg_types);
7069 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7071 pred = fold_convert_loc (loc, pred_type, pred);
7072 expected = fold_convert_loc (loc, expected_type, expected);
7073 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7074 predictor);
7076 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7077 build_int_cst (ret_type, 0));
7080 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7081 NULL_TREE if no simplification is possible. */
7083 tree
7084 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7086 tree inner, fndecl, inner_arg0;
7087 enum tree_code code;
7089 /* Distribute the expected value over short-circuiting operators.
7090 See through the cast from truthvalue_type_node to long. */
7091 inner_arg0 = arg0;
7092 while (CONVERT_EXPR_P (inner_arg0)
7093 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7094 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7095 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7097 /* If this is a builtin_expect within a builtin_expect keep the
7098 inner one. See through a comparison against a constant. It
7099 might have been added to create a thruthvalue. */
7100 inner = inner_arg0;
7102 if (COMPARISON_CLASS_P (inner)
7103 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7104 inner = TREE_OPERAND (inner, 0);
7106 if (TREE_CODE (inner) == CALL_EXPR
7107 && (fndecl = get_callee_fndecl (inner))
7108 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7109 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7110 return arg0;
7112 inner = inner_arg0;
7113 code = TREE_CODE (inner);
7114 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7116 tree op0 = TREE_OPERAND (inner, 0);
7117 tree op1 = TREE_OPERAND (inner, 1);
7119 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7120 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7121 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7123 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7126 /* If the argument isn't invariant then there's nothing else we can do. */
7127 if (!TREE_CONSTANT (inner_arg0))
7128 return NULL_TREE;
7130 /* If we expect that a comparison against the argument will fold to
7131 a constant return the constant. In practice, this means a true
7132 constant or the address of a non-weak symbol. */
7133 inner = inner_arg0;
7134 STRIP_NOPS (inner);
7135 if (TREE_CODE (inner) == ADDR_EXPR)
7139 inner = TREE_OPERAND (inner, 0);
7141 while (TREE_CODE (inner) == COMPONENT_REF
7142 || TREE_CODE (inner) == ARRAY_REF);
7143 if ((TREE_CODE (inner) == VAR_DECL
7144 || TREE_CODE (inner) == FUNCTION_DECL)
7145 && DECL_WEAK (inner))
7146 return NULL_TREE;
7149 /* Otherwise, ARG0 already has the proper type for the return value. */
7150 return arg0;
7153 /* Fold a call to __builtin_classify_type with argument ARG. */
7155 static tree
7156 fold_builtin_classify_type (tree arg)
7158 if (arg == 0)
7159 return build_int_cst (integer_type_node, no_type_class);
7161 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7164 /* Fold a call to __builtin_strlen with argument ARG. */
7166 static tree
7167 fold_builtin_strlen (location_t loc, tree type, tree arg)
7169 if (!validate_arg (arg, POINTER_TYPE))
7170 return NULL_TREE;
7171 else
7173 tree len = c_strlen (arg, 0);
7175 if (len)
7176 return fold_convert_loc (loc, type, len);
7178 return NULL_TREE;
7182 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7184 static tree
7185 fold_builtin_inf (location_t loc, tree type, int warn)
7187 REAL_VALUE_TYPE real;
7189 /* __builtin_inff is intended to be usable to define INFINITY on all
7190 targets. If an infinity is not available, INFINITY expands "to a
7191 positive constant of type float that overflows at translation
7192 time", footnote "In this case, using INFINITY will violate the
7193 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7194 Thus we pedwarn to ensure this constraint violation is
7195 diagnosed. */
7196 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7197 pedwarn (loc, 0, "target format does not support infinity");
7199 real_inf (&real);
7200 return build_real (type, real);
7203 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7204 NULL_TREE if no simplification can be made. */
7206 static tree
7207 fold_builtin_sincos (location_t loc,
7208 tree arg0, tree arg1, tree arg2)
7210 tree type;
7211 tree fndecl, call = NULL_TREE;
7213 if (!validate_arg (arg0, REAL_TYPE)
7214 || !validate_arg (arg1, POINTER_TYPE)
7215 || !validate_arg (arg2, POINTER_TYPE))
7216 return NULL_TREE;
7218 type = TREE_TYPE (arg0);
7220 /* Calculate the result when the argument is a constant. */
7221 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
7222 if (fn == END_BUILTINS)
7223 return NULL_TREE;
7225 /* Canonicalize sincos to cexpi. */
7226 if (TREE_CODE (arg0) == REAL_CST)
7228 tree complex_type = build_complex_type (type);
7229 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
7231 if (!call)
7233 if (!targetm.libc_has_function (function_c99_math_complex)
7234 || !builtin_decl_implicit_p (fn))
7235 return NULL_TREE;
7236 fndecl = builtin_decl_explicit (fn);
7237 call = build_call_expr_loc (loc, fndecl, 1, arg0);
7238 call = builtin_save_expr (call);
7241 return build2 (COMPOUND_EXPR, void_type_node,
7242 build2 (MODIFY_EXPR, void_type_node,
7243 build_fold_indirect_ref_loc (loc, arg1),
7244 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
7245 build2 (MODIFY_EXPR, void_type_node,
7246 build_fold_indirect_ref_loc (loc, arg2),
7247 fold_build1_loc (loc, REALPART_EXPR, type, call)));
7250 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
7251 arguments to the call, and TYPE is its return type.
7252 Return NULL_TREE if no simplification can be made. */
7254 static tree
7255 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
7257 if (!validate_arg (arg1, POINTER_TYPE)
7258 || !validate_arg (arg2, INTEGER_TYPE)
7259 || !validate_arg (len, INTEGER_TYPE))
7260 return NULL_TREE;
7261 else
7263 const char *p1;
7265 if (TREE_CODE (arg2) != INTEGER_CST
7266 || !tree_fits_uhwi_p (len))
7267 return NULL_TREE;
7269 p1 = c_getstr (arg1);
7270 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
7272 char c;
7273 const char *r;
7274 tree tem;
7276 if (target_char_cast (arg2, &c))
7277 return NULL_TREE;
7279 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
7281 if (r == NULL)
7282 return build_int_cst (TREE_TYPE (arg1), 0);
7284 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
7285 return fold_convert_loc (loc, type, tem);
7287 return NULL_TREE;
7291 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7292 Return NULL_TREE if no simplification can be made. */
7294 static tree
7295 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
7297 if (!validate_arg (arg1, POINTER_TYPE)
7298 || !validate_arg (arg2, POINTER_TYPE)
7299 || !validate_arg (len, INTEGER_TYPE))
7300 return NULL_TREE;
7302 /* If the LEN parameter is zero, return zero. */
7303 if (integer_zerop (len))
7304 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7305 arg1, arg2);
7307 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7308 if (operand_equal_p (arg1, arg2, 0))
7309 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7311 /* If len parameter is one, return an expression corresponding to
7312 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7313 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7315 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7316 tree cst_uchar_ptr_node
7317 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7319 tree ind1
7320 = fold_convert_loc (loc, integer_type_node,
7321 build1 (INDIRECT_REF, cst_uchar_node,
7322 fold_convert_loc (loc,
7323 cst_uchar_ptr_node,
7324 arg1)));
7325 tree ind2
7326 = fold_convert_loc (loc, integer_type_node,
7327 build1 (INDIRECT_REF, cst_uchar_node,
7328 fold_convert_loc (loc,
7329 cst_uchar_ptr_node,
7330 arg2)));
7331 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7334 return NULL_TREE;
7337 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
7338 Return NULL_TREE if no simplification can be made. */
7340 static tree
7341 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
7343 if (!validate_arg (arg1, POINTER_TYPE)
7344 || !validate_arg (arg2, POINTER_TYPE))
7345 return NULL_TREE;
7347 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7348 if (operand_equal_p (arg1, arg2, 0))
7349 return integer_zero_node;
7351 /* If the second arg is "", return *(const unsigned char*)arg1. */
7352 const char *p2 = c_getstr (arg2);
7353 if (p2 && *p2 == '\0')
7355 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7356 tree cst_uchar_ptr_node
7357 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7359 return fold_convert_loc (loc, integer_type_node,
7360 build1 (INDIRECT_REF, cst_uchar_node,
7361 fold_convert_loc (loc,
7362 cst_uchar_ptr_node,
7363 arg1)));
7366 /* If the first arg is "", return -*(const unsigned char*)arg2. */
7367 const char *p1 = c_getstr (arg1);
7368 if (p1 && *p1 == '\0')
7370 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7371 tree cst_uchar_ptr_node
7372 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7374 tree temp
7375 = fold_convert_loc (loc, integer_type_node,
7376 build1 (INDIRECT_REF, cst_uchar_node,
7377 fold_convert_loc (loc,
7378 cst_uchar_ptr_node,
7379 arg2)));
7380 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
7383 return NULL_TREE;
7386 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
7387 Return NULL_TREE if no simplification can be made. */
7389 static tree
7390 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
7392 if (!validate_arg (arg1, POINTER_TYPE)
7393 || !validate_arg (arg2, POINTER_TYPE)
7394 || !validate_arg (len, INTEGER_TYPE))
7395 return NULL_TREE;
7397 /* If the LEN parameter is zero, return zero. */
7398 if (integer_zerop (len))
7399 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7400 arg1, arg2);
7402 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7403 if (operand_equal_p (arg1, arg2, 0))
7404 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7406 /* If the second arg is "", and the length is greater than zero,
7407 return *(const unsigned char*)arg1. */
7408 const char *p2 = c_getstr (arg2);
7409 if (p2 && *p2 == '\0'
7410 && TREE_CODE (len) == INTEGER_CST
7411 && tree_int_cst_sgn (len) == 1)
7413 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7414 tree cst_uchar_ptr_node
7415 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7417 return fold_convert_loc (loc, integer_type_node,
7418 build1 (INDIRECT_REF, cst_uchar_node,
7419 fold_convert_loc (loc,
7420 cst_uchar_ptr_node,
7421 arg1)));
7424 /* If the first arg is "", and the length is greater than zero,
7425 return -*(const unsigned char*)arg2. */
7426 const char *p1 = c_getstr (arg1);
7427 if (p1 && *p1 == '\0'
7428 && TREE_CODE (len) == INTEGER_CST
7429 && tree_int_cst_sgn (len) == 1)
7431 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7432 tree cst_uchar_ptr_node
7433 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7435 tree temp = fold_convert_loc (loc, integer_type_node,
7436 build1 (INDIRECT_REF, cst_uchar_node,
7437 fold_convert_loc (loc,
7438 cst_uchar_ptr_node,
7439 arg2)));
7440 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
7443 /* If len parameter is one, return an expression corresponding to
7444 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7445 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7447 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7448 tree cst_uchar_ptr_node
7449 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7451 tree ind1 = fold_convert_loc (loc, integer_type_node,
7452 build1 (INDIRECT_REF, cst_uchar_node,
7453 fold_convert_loc (loc,
7454 cst_uchar_ptr_node,
7455 arg1)));
7456 tree ind2 = fold_convert_loc (loc, integer_type_node,
7457 build1 (INDIRECT_REF, cst_uchar_node,
7458 fold_convert_loc (loc,
7459 cst_uchar_ptr_node,
7460 arg2)));
7461 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7464 return NULL_TREE;
7467 /* Fold a call to builtin isascii with argument ARG. */
7469 static tree
7470 fold_builtin_isascii (location_t loc, tree arg)
7472 if (!validate_arg (arg, INTEGER_TYPE))
7473 return NULL_TREE;
7474 else
7476 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
7477 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
7478 build_int_cst (integer_type_node,
7479 ~ (unsigned HOST_WIDE_INT) 0x7f));
7480 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
7481 arg, integer_zero_node);
7485 /* Fold a call to builtin toascii with argument ARG. */
7487 static tree
7488 fold_builtin_toascii (location_t loc, tree arg)
7490 if (!validate_arg (arg, INTEGER_TYPE))
7491 return NULL_TREE;
7493 /* Transform toascii(c) -> (c & 0x7f). */
7494 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
7495 build_int_cst (integer_type_node, 0x7f));
7498 /* Fold a call to builtin isdigit with argument ARG. */
7500 static tree
7501 fold_builtin_isdigit (location_t loc, tree arg)
7503 if (!validate_arg (arg, INTEGER_TYPE))
7504 return NULL_TREE;
7505 else
7507 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
7508 /* According to the C standard, isdigit is unaffected by locale.
7509 However, it definitely is affected by the target character set. */
7510 unsigned HOST_WIDE_INT target_digit0
7511 = lang_hooks.to_target_charset ('0');
7513 if (target_digit0 == 0)
7514 return NULL_TREE;
7516 arg = fold_convert_loc (loc, unsigned_type_node, arg);
7517 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
7518 build_int_cst (unsigned_type_node, target_digit0));
7519 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
7520 build_int_cst (unsigned_type_node, 9));
7524 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
7526 static tree
7527 fold_builtin_fabs (location_t loc, tree arg, tree type)
7529 if (!validate_arg (arg, REAL_TYPE))
7530 return NULL_TREE;
7532 arg = fold_convert_loc (loc, type, arg);
7533 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7536 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
7538 static tree
7539 fold_builtin_abs (location_t loc, tree arg, tree type)
7541 if (!validate_arg (arg, INTEGER_TYPE))
7542 return NULL_TREE;
7544 arg = fold_convert_loc (loc, type, arg);
7545 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7548 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
7550 static tree
7551 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
7553 /* ??? Only expand to FMA_EXPR if it's directly supported. */
7554 if (validate_arg (arg0, REAL_TYPE)
7555 && validate_arg (arg1, REAL_TYPE)
7556 && validate_arg (arg2, REAL_TYPE)
7557 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
7558 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
7560 return NULL_TREE;
7563 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
7565 static tree
7566 fold_builtin_carg (location_t loc, tree arg, tree type)
7568 if (validate_arg (arg, COMPLEX_TYPE)
7569 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7571 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
7573 if (atan2_fn)
7575 tree new_arg = builtin_save_expr (arg);
7576 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
7577 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
7578 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
7582 return NULL_TREE;
7585 /* Fold a call to builtin frexp, we can assume the base is 2. */
7587 static tree
7588 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
7590 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7591 return NULL_TREE;
7593 STRIP_NOPS (arg0);
7595 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
7596 return NULL_TREE;
7598 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7600 /* Proceed if a valid pointer type was passed in. */
7601 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
7603 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
7604 tree frac, exp;
7606 switch (value->cl)
7608 case rvc_zero:
7609 /* For +-0, return (*exp = 0, +-0). */
7610 exp = integer_zero_node;
7611 frac = arg0;
7612 break;
7613 case rvc_nan:
7614 case rvc_inf:
7615 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
7616 return omit_one_operand_loc (loc, rettype, arg0, arg1);
7617 case rvc_normal:
7619 /* Since the frexp function always expects base 2, and in
7620 GCC normalized significands are already in the range
7621 [0.5, 1.0), we have exactly what frexp wants. */
7622 REAL_VALUE_TYPE frac_rvt = *value;
7623 SET_REAL_EXP (&frac_rvt, 0);
7624 frac = build_real (rettype, frac_rvt);
7625 exp = build_int_cst (integer_type_node, REAL_EXP (value));
7627 break;
7628 default:
7629 gcc_unreachable ();
7632 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7633 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
7634 TREE_SIDE_EFFECTS (arg1) = 1;
7635 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
7638 return NULL_TREE;
7641 /* Fold a call to builtin modf. */
7643 static tree
7644 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
7646 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7647 return NULL_TREE;
7649 STRIP_NOPS (arg0);
7651 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
7652 return NULL_TREE;
7654 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7656 /* Proceed if a valid pointer type was passed in. */
7657 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
7659 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
7660 REAL_VALUE_TYPE trunc, frac;
7662 switch (value->cl)
7664 case rvc_nan:
7665 case rvc_zero:
7666 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
7667 trunc = frac = *value;
7668 break;
7669 case rvc_inf:
7670 /* For +-Inf, return (*arg1 = arg0, +-0). */
7671 frac = dconst0;
7672 frac.sign = value->sign;
7673 trunc = *value;
7674 break;
7675 case rvc_normal:
7676 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
7677 real_trunc (&trunc, VOIDmode, value);
7678 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
7679 /* If the original number was negative and already
7680 integral, then the fractional part is -0.0. */
7681 if (value->sign && frac.cl == rvc_zero)
7682 frac.sign = value->sign;
7683 break;
7686 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7687 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
7688 build_real (rettype, trunc));
7689 TREE_SIDE_EFFECTS (arg1) = 1;
7690 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
7691 build_real (rettype, frac));
7694 return NULL_TREE;
7697 /* Given a location LOC, an interclass builtin function decl FNDECL
7698 and its single argument ARG, return an folded expression computing
7699 the same, or NULL_TREE if we either couldn't or didn't want to fold
7700 (the latter happen if there's an RTL instruction available). */
7702 static tree
7703 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
7705 machine_mode mode;
7707 if (!validate_arg (arg, REAL_TYPE))
7708 return NULL_TREE;
7710 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
7711 return NULL_TREE;
7713 mode = TYPE_MODE (TREE_TYPE (arg));
7715 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
7717 /* If there is no optab, try generic code. */
7718 switch (DECL_FUNCTION_CODE (fndecl))
7720 tree result;
7722 CASE_FLT_FN (BUILT_IN_ISINF):
7724 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
7725 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
7726 tree type = TREE_TYPE (arg);
7727 REAL_VALUE_TYPE r;
7728 char buf[128];
7730 if (is_ibm_extended)
7732 /* NaN and Inf are encoded in the high-order double value
7733 only. The low-order value is not significant. */
7734 type = double_type_node;
7735 mode = DFmode;
7736 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7738 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7739 real_from_string (&r, buf);
7740 result = build_call_expr (isgr_fn, 2,
7741 fold_build1_loc (loc, ABS_EXPR, type, arg),
7742 build_real (type, r));
7743 return result;
7745 CASE_FLT_FN (BUILT_IN_FINITE):
7746 case BUILT_IN_ISFINITE:
7748 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
7749 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7750 tree type = TREE_TYPE (arg);
7751 REAL_VALUE_TYPE r;
7752 char buf[128];
7754 if (is_ibm_extended)
7756 /* NaN and Inf are encoded in the high-order double value
7757 only. The low-order value is not significant. */
7758 type = double_type_node;
7759 mode = DFmode;
7760 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7762 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7763 real_from_string (&r, buf);
7764 result = build_call_expr (isle_fn, 2,
7765 fold_build1_loc (loc, ABS_EXPR, type, arg),
7766 build_real (type, r));
7767 /*result = fold_build2_loc (loc, UNGT_EXPR,
7768 TREE_TYPE (TREE_TYPE (fndecl)),
7769 fold_build1_loc (loc, ABS_EXPR, type, arg),
7770 build_real (type, r));
7771 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
7772 TREE_TYPE (TREE_TYPE (fndecl)),
7773 result);*/
7774 return result;
7776 case BUILT_IN_ISNORMAL:
7778 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
7779 islessequal(fabs(x),DBL_MAX). */
7780 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7781 tree type = TREE_TYPE (arg);
7782 tree orig_arg, max_exp, min_exp;
7783 machine_mode orig_mode = mode;
7784 REAL_VALUE_TYPE rmax, rmin;
7785 char buf[128];
7787 orig_arg = arg = builtin_save_expr (arg);
7788 if (is_ibm_extended)
7790 /* Use double to test the normal range of IBM extended
7791 precision. Emin for IBM extended precision is
7792 different to emin for IEEE double, being 53 higher
7793 since the low double exponent is at least 53 lower
7794 than the high double exponent. */
7795 type = double_type_node;
7796 mode = DFmode;
7797 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7799 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
7801 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7802 real_from_string (&rmax, buf);
7803 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
7804 real_from_string (&rmin, buf);
7805 max_exp = build_real (type, rmax);
7806 min_exp = build_real (type, rmin);
7808 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
7809 if (is_ibm_extended)
7811 /* Testing the high end of the range is done just using
7812 the high double, using the same test as isfinite().
7813 For the subnormal end of the range we first test the
7814 high double, then if its magnitude is equal to the
7815 limit of 0x1p-969, we test whether the low double is
7816 non-zero and opposite sign to the high double. */
7817 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
7818 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
7819 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
7820 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
7821 arg, min_exp);
7822 tree as_complex = build1 (VIEW_CONVERT_EXPR,
7823 complex_double_type_node, orig_arg);
7824 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
7825 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
7826 tree zero = build_real (type, dconst0);
7827 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
7828 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
7829 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
7830 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
7831 fold_build3 (COND_EXPR,
7832 integer_type_node,
7833 hilt, logt, lolt));
7834 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
7835 eq_min, ok_lo);
7836 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
7837 gt_min, eq_min);
7839 else
7841 tree const isge_fn
7842 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
7843 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
7845 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
7846 max_exp, min_exp);
7847 return result;
7849 default:
7850 break;
7853 return NULL_TREE;
7856 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
7857 ARG is the argument for the call. */
7859 static tree
7860 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
7862 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7864 if (!validate_arg (arg, REAL_TYPE))
7865 return NULL_TREE;
7867 switch (builtin_index)
7869 case BUILT_IN_ISINF:
7870 if (!HONOR_INFINITIES (arg))
7871 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
7873 return NULL_TREE;
7875 case BUILT_IN_ISINF_SIGN:
7877 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
7878 /* In a boolean context, GCC will fold the inner COND_EXPR to
7879 1. So e.g. "if (isinf_sign(x))" would be folded to just
7880 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
7881 tree signbit_fn = mathfn_built_in_1
7882 (TREE_TYPE (arg), CFN_BUILT_IN_SIGNBIT, 0);
7883 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
7884 tree tmp = NULL_TREE;
7886 arg = builtin_save_expr (arg);
7888 if (signbit_fn && isinf_fn)
7890 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
7891 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
7893 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
7894 signbit_call, integer_zero_node);
7895 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
7896 isinf_call, integer_zero_node);
7898 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
7899 integer_minus_one_node, integer_one_node);
7900 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
7901 isinf_call, tmp,
7902 integer_zero_node);
7905 return tmp;
7908 case BUILT_IN_ISFINITE:
7909 if (!HONOR_NANS (arg)
7910 && !HONOR_INFINITIES (arg))
7911 return omit_one_operand_loc (loc, type, integer_one_node, arg);
7913 return NULL_TREE;
7915 case BUILT_IN_ISNAN:
7916 if (!HONOR_NANS (arg))
7917 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
7920 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
7921 if (is_ibm_extended)
7923 /* NaN and Inf are encoded in the high-order double value
7924 only. The low-order value is not significant. */
7925 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
7928 arg = builtin_save_expr (arg);
7929 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
7931 default:
7932 gcc_unreachable ();
7936 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
7937 This builtin will generate code to return the appropriate floating
7938 point classification depending on the value of the floating point
7939 number passed in. The possible return values must be supplied as
7940 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
7941 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
7942 one floating point argument which is "type generic". */
7944 static tree
7945 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
7947 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
7948 arg, type, res, tmp;
7949 machine_mode mode;
7950 REAL_VALUE_TYPE r;
7951 char buf[128];
7953 /* Verify the required arguments in the original call. */
7954 if (nargs != 6
7955 || !validate_arg (args[0], INTEGER_TYPE)
7956 || !validate_arg (args[1], INTEGER_TYPE)
7957 || !validate_arg (args[2], INTEGER_TYPE)
7958 || !validate_arg (args[3], INTEGER_TYPE)
7959 || !validate_arg (args[4], INTEGER_TYPE)
7960 || !validate_arg (args[5], REAL_TYPE))
7961 return NULL_TREE;
7963 fp_nan = args[0];
7964 fp_infinite = args[1];
7965 fp_normal = args[2];
7966 fp_subnormal = args[3];
7967 fp_zero = args[4];
7968 arg = args[5];
7969 type = TREE_TYPE (arg);
7970 mode = TYPE_MODE (type);
7971 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
7973 /* fpclassify(x) ->
7974 isnan(x) ? FP_NAN :
7975 (fabs(x) == Inf ? FP_INFINITE :
7976 (fabs(x) >= DBL_MIN ? FP_NORMAL :
7977 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
7979 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
7980 build_real (type, dconst0));
7981 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
7982 tmp, fp_zero, fp_subnormal);
7984 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
7985 real_from_string (&r, buf);
7986 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
7987 arg, build_real (type, r));
7988 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
7990 if (HONOR_INFINITIES (mode))
7992 real_inf (&r);
7993 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
7994 build_real (type, r));
7995 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
7996 fp_infinite, res);
7999 if (HONOR_NANS (mode))
8001 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8002 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8005 return res;
8008 /* Fold a call to an unordered comparison function such as
8009 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8010 being called and ARG0 and ARG1 are the arguments for the call.
8011 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8012 the opposite of the desired result. UNORDERED_CODE is used
8013 for modes that can hold NaNs and ORDERED_CODE is used for
8014 the rest. */
8016 static tree
8017 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8018 enum tree_code unordered_code,
8019 enum tree_code ordered_code)
8021 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8022 enum tree_code code;
8023 tree type0, type1;
8024 enum tree_code code0, code1;
8025 tree cmp_type = NULL_TREE;
8027 type0 = TREE_TYPE (arg0);
8028 type1 = TREE_TYPE (arg1);
8030 code0 = TREE_CODE (type0);
8031 code1 = TREE_CODE (type1);
8033 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8034 /* Choose the wider of two real types. */
8035 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8036 ? type0 : type1;
8037 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8038 cmp_type = type0;
8039 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8040 cmp_type = type1;
8042 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8043 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8045 if (unordered_code == UNORDERED_EXPR)
8047 if (!HONOR_NANS (arg0))
8048 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8049 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8052 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8053 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8054 fold_build2_loc (loc, code, type, arg0, arg1));
8057 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8058 arithmetics if it can never overflow, or into internal functions that
8059 return both result of arithmetics and overflowed boolean flag in
8060 a complex integer result, or some other check for overflow.
8061 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8062 checking part of that. */
8064 static tree
8065 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8066 tree arg0, tree arg1, tree arg2)
8068 enum internal_fn ifn = IFN_LAST;
8069 /* The code of the expression corresponding to the type-generic
8070 built-in, or ERROR_MARK for the type-specific ones. */
8071 enum tree_code opcode = ERROR_MARK;
8072 bool ovf_only = false;
8074 switch (fcode)
8076 case BUILT_IN_ADD_OVERFLOW_P:
8077 ovf_only = true;
8078 /* FALLTHRU */
8079 case BUILT_IN_ADD_OVERFLOW:
8080 opcode = PLUS_EXPR;
8081 /* FALLTHRU */
8082 case BUILT_IN_SADD_OVERFLOW:
8083 case BUILT_IN_SADDL_OVERFLOW:
8084 case BUILT_IN_SADDLL_OVERFLOW:
8085 case BUILT_IN_UADD_OVERFLOW:
8086 case BUILT_IN_UADDL_OVERFLOW:
8087 case BUILT_IN_UADDLL_OVERFLOW:
8088 ifn = IFN_ADD_OVERFLOW;
8089 break;
8090 case BUILT_IN_SUB_OVERFLOW_P:
8091 ovf_only = true;
8092 /* FALLTHRU */
8093 case BUILT_IN_SUB_OVERFLOW:
8094 opcode = MINUS_EXPR;
8095 /* FALLTHRU */
8096 case BUILT_IN_SSUB_OVERFLOW:
8097 case BUILT_IN_SSUBL_OVERFLOW:
8098 case BUILT_IN_SSUBLL_OVERFLOW:
8099 case BUILT_IN_USUB_OVERFLOW:
8100 case BUILT_IN_USUBL_OVERFLOW:
8101 case BUILT_IN_USUBLL_OVERFLOW:
8102 ifn = IFN_SUB_OVERFLOW;
8103 break;
8104 case BUILT_IN_MUL_OVERFLOW_P:
8105 ovf_only = true;
8106 /* FALLTHRU */
8107 case BUILT_IN_MUL_OVERFLOW:
8108 opcode = MULT_EXPR;
8109 /* FALLTHRU */
8110 case BUILT_IN_SMUL_OVERFLOW:
8111 case BUILT_IN_SMULL_OVERFLOW:
8112 case BUILT_IN_SMULLL_OVERFLOW:
8113 case BUILT_IN_UMUL_OVERFLOW:
8114 case BUILT_IN_UMULL_OVERFLOW:
8115 case BUILT_IN_UMULLL_OVERFLOW:
8116 ifn = IFN_MUL_OVERFLOW;
8117 break;
8118 default:
8119 gcc_unreachable ();
8122 /* For the "generic" overloads, the first two arguments can have different
8123 types and the last argument determines the target type to use to check
8124 for overflow. The arguments of the other overloads all have the same
8125 type. */
8126 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
8128 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8129 arguments are constant, attempt to fold the built-in call into a constant
8130 expression indicating whether or not it detected an overflow. */
8131 if (ovf_only
8132 && TREE_CODE (arg0) == INTEGER_CST
8133 && TREE_CODE (arg1) == INTEGER_CST)
8134 /* Perform the computation in the target type and check for overflow. */
8135 return omit_one_operand_loc (loc, boolean_type_node,
8136 arith_overflowed_p (opcode, type, arg0, arg1)
8137 ? boolean_true_node : boolean_false_node,
8138 arg2);
8140 tree ctype = build_complex_type (type);
8141 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8142 2, arg0, arg1);
8143 tree tgt = save_expr (call);
8144 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8145 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8146 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8148 if (ovf_only)
8149 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
8151 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8152 tree store
8153 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8154 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8157 /* Fold a call to __builtin_FILE to a constant string. */
8159 static inline tree
8160 fold_builtin_FILE (location_t loc)
8162 if (const char *fname = LOCATION_FILE (loc))
8163 return build_string_literal (strlen (fname) + 1, fname);
8165 return build_string_literal (1, "");
8168 /* Fold a call to __builtin_FUNCTION to a constant string. */
8170 static inline tree
8171 fold_builtin_FUNCTION ()
8173 if (current_function_decl)
8175 const char *name = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
8176 return build_string_literal (strlen (name) + 1, name);
8179 return build_string_literal (1, "");
8182 /* Fold a call to __builtin_LINE to an integer constant. */
8184 static inline tree
8185 fold_builtin_LINE (location_t loc, tree type)
8187 return build_int_cst (type, LOCATION_LINE (loc));
8190 /* Fold a call to built-in function FNDECL with 0 arguments.
8191 This function returns NULL_TREE if no simplification was possible. */
8193 static tree
8194 fold_builtin_0 (location_t loc, tree fndecl)
8196 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8197 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8198 switch (fcode)
8200 case BUILT_IN_FILE:
8201 return fold_builtin_FILE (loc);
8203 case BUILT_IN_FUNCTION:
8204 return fold_builtin_FUNCTION ();
8206 case BUILT_IN_LINE:
8207 return fold_builtin_LINE (loc, type);
8209 CASE_FLT_FN (BUILT_IN_INF):
8210 case BUILT_IN_INFD32:
8211 case BUILT_IN_INFD64:
8212 case BUILT_IN_INFD128:
8213 return fold_builtin_inf (loc, type, true);
8215 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8216 return fold_builtin_inf (loc, type, false);
8218 case BUILT_IN_CLASSIFY_TYPE:
8219 return fold_builtin_classify_type (NULL_TREE);
8221 default:
8222 break;
8224 return NULL_TREE;
8227 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8228 This function returns NULL_TREE if no simplification was possible. */
8230 static tree
8231 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8233 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8234 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8236 if (TREE_CODE (arg0) == ERROR_MARK)
8237 return NULL_TREE;
8239 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
8240 return ret;
8242 switch (fcode)
8244 case BUILT_IN_CONSTANT_P:
8246 tree val = fold_builtin_constant_p (arg0);
8248 /* Gimplification will pull the CALL_EXPR for the builtin out of
8249 an if condition. When not optimizing, we'll not CSE it back.
8250 To avoid link error types of regressions, return false now. */
8251 if (!val && !optimize)
8252 val = integer_zero_node;
8254 return val;
8257 case BUILT_IN_CLASSIFY_TYPE:
8258 return fold_builtin_classify_type (arg0);
8260 case BUILT_IN_STRLEN:
8261 return fold_builtin_strlen (loc, type, arg0);
8263 CASE_FLT_FN (BUILT_IN_FABS):
8264 case BUILT_IN_FABSD32:
8265 case BUILT_IN_FABSD64:
8266 case BUILT_IN_FABSD128:
8267 return fold_builtin_fabs (loc, arg0, type);
8269 case BUILT_IN_ABS:
8270 case BUILT_IN_LABS:
8271 case BUILT_IN_LLABS:
8272 case BUILT_IN_IMAXABS:
8273 return fold_builtin_abs (loc, arg0, type);
8275 CASE_FLT_FN (BUILT_IN_CONJ):
8276 if (validate_arg (arg0, COMPLEX_TYPE)
8277 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8278 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8279 break;
8281 CASE_FLT_FN (BUILT_IN_CREAL):
8282 if (validate_arg (arg0, COMPLEX_TYPE)
8283 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8284 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8285 break;
8287 CASE_FLT_FN (BUILT_IN_CIMAG):
8288 if (validate_arg (arg0, COMPLEX_TYPE)
8289 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8290 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8291 break;
8293 CASE_FLT_FN (BUILT_IN_CARG):
8294 return fold_builtin_carg (loc, arg0, type);
8296 case BUILT_IN_ISASCII:
8297 return fold_builtin_isascii (loc, arg0);
8299 case BUILT_IN_TOASCII:
8300 return fold_builtin_toascii (loc, arg0);
8302 case BUILT_IN_ISDIGIT:
8303 return fold_builtin_isdigit (loc, arg0);
8305 CASE_FLT_FN (BUILT_IN_FINITE):
8306 case BUILT_IN_FINITED32:
8307 case BUILT_IN_FINITED64:
8308 case BUILT_IN_FINITED128:
8309 case BUILT_IN_ISFINITE:
8311 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
8312 if (ret)
8313 return ret;
8314 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8317 CASE_FLT_FN (BUILT_IN_ISINF):
8318 case BUILT_IN_ISINFD32:
8319 case BUILT_IN_ISINFD64:
8320 case BUILT_IN_ISINFD128:
8322 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
8323 if (ret)
8324 return ret;
8325 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8328 case BUILT_IN_ISNORMAL:
8329 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8331 case BUILT_IN_ISINF_SIGN:
8332 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
8334 CASE_FLT_FN (BUILT_IN_ISNAN):
8335 case BUILT_IN_ISNAND32:
8336 case BUILT_IN_ISNAND64:
8337 case BUILT_IN_ISNAND128:
8338 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
8340 case BUILT_IN_FREE:
8341 if (integer_zerop (arg0))
8342 return build_empty_stmt (loc);
8343 break;
8345 default:
8346 break;
8349 return NULL_TREE;
8353 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8354 This function returns NULL_TREE if no simplification was possible. */
8356 static tree
8357 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
8359 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8360 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8362 if (TREE_CODE (arg0) == ERROR_MARK
8363 || TREE_CODE (arg1) == ERROR_MARK)
8364 return NULL_TREE;
8366 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
8367 return ret;
8369 switch (fcode)
8371 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
8372 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
8373 if (validate_arg (arg0, REAL_TYPE)
8374 && validate_arg (arg1, POINTER_TYPE))
8375 return do_mpfr_lgamma_r (arg0, arg1, type);
8376 break;
8378 CASE_FLT_FN (BUILT_IN_FREXP):
8379 return fold_builtin_frexp (loc, arg0, arg1, type);
8381 CASE_FLT_FN (BUILT_IN_MODF):
8382 return fold_builtin_modf (loc, arg0, arg1, type);
8384 case BUILT_IN_STRSTR:
8385 return fold_builtin_strstr (loc, arg0, arg1, type);
8387 case BUILT_IN_STRSPN:
8388 return fold_builtin_strspn (loc, arg0, arg1);
8390 case BUILT_IN_STRCSPN:
8391 return fold_builtin_strcspn (loc, arg0, arg1);
8393 case BUILT_IN_STRCHR:
8394 case BUILT_IN_INDEX:
8395 return fold_builtin_strchr (loc, arg0, arg1, type);
8397 case BUILT_IN_STRRCHR:
8398 case BUILT_IN_RINDEX:
8399 return fold_builtin_strrchr (loc, arg0, arg1, type);
8401 case BUILT_IN_STRCMP:
8402 return fold_builtin_strcmp (loc, arg0, arg1);
8404 case BUILT_IN_STRPBRK:
8405 return fold_builtin_strpbrk (loc, arg0, arg1, type);
8407 case BUILT_IN_EXPECT:
8408 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
8410 case BUILT_IN_ISGREATER:
8411 return fold_builtin_unordered_cmp (loc, fndecl,
8412 arg0, arg1, UNLE_EXPR, LE_EXPR);
8413 case BUILT_IN_ISGREATEREQUAL:
8414 return fold_builtin_unordered_cmp (loc, fndecl,
8415 arg0, arg1, UNLT_EXPR, LT_EXPR);
8416 case BUILT_IN_ISLESS:
8417 return fold_builtin_unordered_cmp (loc, fndecl,
8418 arg0, arg1, UNGE_EXPR, GE_EXPR);
8419 case BUILT_IN_ISLESSEQUAL:
8420 return fold_builtin_unordered_cmp (loc, fndecl,
8421 arg0, arg1, UNGT_EXPR, GT_EXPR);
8422 case BUILT_IN_ISLESSGREATER:
8423 return fold_builtin_unordered_cmp (loc, fndecl,
8424 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
8425 case BUILT_IN_ISUNORDERED:
8426 return fold_builtin_unordered_cmp (loc, fndecl,
8427 arg0, arg1, UNORDERED_EXPR,
8428 NOP_EXPR);
8430 /* We do the folding for va_start in the expander. */
8431 case BUILT_IN_VA_START:
8432 break;
8434 case BUILT_IN_OBJECT_SIZE:
8435 return fold_builtin_object_size (arg0, arg1);
8437 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8438 return fold_builtin_atomic_always_lock_free (arg0, arg1);
8440 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8441 return fold_builtin_atomic_is_lock_free (arg0, arg1);
8443 default:
8444 break;
8446 return NULL_TREE;
8449 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
8450 and ARG2.
8451 This function returns NULL_TREE if no simplification was possible. */
8453 static tree
8454 fold_builtin_3 (location_t loc, tree fndecl,
8455 tree arg0, tree arg1, tree arg2)
8457 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8458 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8460 if (TREE_CODE (arg0) == ERROR_MARK
8461 || TREE_CODE (arg1) == ERROR_MARK
8462 || TREE_CODE (arg2) == ERROR_MARK)
8463 return NULL_TREE;
8465 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
8466 arg0, arg1, arg2))
8467 return ret;
8469 switch (fcode)
8472 CASE_FLT_FN (BUILT_IN_SINCOS):
8473 return fold_builtin_sincos (loc, arg0, arg1, arg2);
8475 CASE_FLT_FN (BUILT_IN_FMA):
8476 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
8478 CASE_FLT_FN (BUILT_IN_REMQUO):
8479 if (validate_arg (arg0, REAL_TYPE)
8480 && validate_arg (arg1, REAL_TYPE)
8481 && validate_arg (arg2, POINTER_TYPE))
8482 return do_mpfr_remquo (arg0, arg1, arg2);
8483 break;
8485 case BUILT_IN_STRNCMP:
8486 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
8488 case BUILT_IN_MEMCHR:
8489 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
8491 case BUILT_IN_BCMP:
8492 case BUILT_IN_MEMCMP:
8493 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
8495 case BUILT_IN_EXPECT:
8496 return fold_builtin_expect (loc, arg0, arg1, arg2);
8498 case BUILT_IN_ADD_OVERFLOW:
8499 case BUILT_IN_SUB_OVERFLOW:
8500 case BUILT_IN_MUL_OVERFLOW:
8501 case BUILT_IN_ADD_OVERFLOW_P:
8502 case BUILT_IN_SUB_OVERFLOW_P:
8503 case BUILT_IN_MUL_OVERFLOW_P:
8504 case BUILT_IN_SADD_OVERFLOW:
8505 case BUILT_IN_SADDL_OVERFLOW:
8506 case BUILT_IN_SADDLL_OVERFLOW:
8507 case BUILT_IN_SSUB_OVERFLOW:
8508 case BUILT_IN_SSUBL_OVERFLOW:
8509 case BUILT_IN_SSUBLL_OVERFLOW:
8510 case BUILT_IN_SMUL_OVERFLOW:
8511 case BUILT_IN_SMULL_OVERFLOW:
8512 case BUILT_IN_SMULLL_OVERFLOW:
8513 case BUILT_IN_UADD_OVERFLOW:
8514 case BUILT_IN_UADDL_OVERFLOW:
8515 case BUILT_IN_UADDLL_OVERFLOW:
8516 case BUILT_IN_USUB_OVERFLOW:
8517 case BUILT_IN_USUBL_OVERFLOW:
8518 case BUILT_IN_USUBLL_OVERFLOW:
8519 case BUILT_IN_UMUL_OVERFLOW:
8520 case BUILT_IN_UMULL_OVERFLOW:
8521 case BUILT_IN_UMULLL_OVERFLOW:
8522 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
8524 default:
8525 break;
8527 return NULL_TREE;
8530 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
8531 arguments. IGNORE is true if the result of the
8532 function call is ignored. This function returns NULL_TREE if no
8533 simplification was possible. */
8535 tree
8536 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
8538 tree ret = NULL_TREE;
8540 switch (nargs)
8542 case 0:
8543 ret = fold_builtin_0 (loc, fndecl);
8544 break;
8545 case 1:
8546 ret = fold_builtin_1 (loc, fndecl, args[0]);
8547 break;
8548 case 2:
8549 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
8550 break;
8551 case 3:
8552 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
8553 break;
8554 default:
8555 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
8556 break;
8558 if (ret)
8560 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
8561 SET_EXPR_LOCATION (ret, loc);
8562 TREE_NO_WARNING (ret) = 1;
8563 return ret;
8565 return NULL_TREE;
8568 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
8569 list ARGS along with N new arguments in NEWARGS. SKIP is the number
8570 of arguments in ARGS to be omitted. OLDNARGS is the number of
8571 elements in ARGS. */
8573 static tree
8574 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
8575 int skip, tree fndecl, int n, va_list newargs)
8577 int nargs = oldnargs - skip + n;
8578 tree *buffer;
8580 if (n > 0)
8582 int i, j;
8584 buffer = XALLOCAVEC (tree, nargs);
8585 for (i = 0; i < n; i++)
8586 buffer[i] = va_arg (newargs, tree);
8587 for (j = skip; j < oldnargs; j++, i++)
8588 buffer[i] = args[j];
8590 else
8591 buffer = args + skip;
8593 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
8596 /* Return true if FNDECL shouldn't be folded right now.
8597 If a built-in function has an inline attribute always_inline
8598 wrapper, defer folding it after always_inline functions have
8599 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
8600 might not be performed. */
8602 bool
8603 avoid_folding_inline_builtin (tree fndecl)
8605 return (DECL_DECLARED_INLINE_P (fndecl)
8606 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
8607 && cfun
8608 && !cfun->always_inline_functions_inlined
8609 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
8612 /* A wrapper function for builtin folding that prevents warnings for
8613 "statement without effect" and the like, caused by removing the
8614 call node earlier than the warning is generated. */
8616 tree
8617 fold_call_expr (location_t loc, tree exp, bool ignore)
8619 tree ret = NULL_TREE;
8620 tree fndecl = get_callee_fndecl (exp);
8621 if (fndecl
8622 && TREE_CODE (fndecl) == FUNCTION_DECL
8623 && DECL_BUILT_IN (fndecl)
8624 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
8625 yet. Defer folding until we see all the arguments
8626 (after inlining). */
8627 && !CALL_EXPR_VA_ARG_PACK (exp))
8629 int nargs = call_expr_nargs (exp);
8631 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
8632 instead last argument is __builtin_va_arg_pack (). Defer folding
8633 even in that case, until arguments are finalized. */
8634 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
8636 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
8637 if (fndecl2
8638 && TREE_CODE (fndecl2) == FUNCTION_DECL
8639 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8640 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8641 return NULL_TREE;
8644 if (avoid_folding_inline_builtin (fndecl))
8645 return NULL_TREE;
8647 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8648 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
8649 CALL_EXPR_ARGP (exp), ignore);
8650 else
8652 tree *args = CALL_EXPR_ARGP (exp);
8653 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
8654 if (ret)
8655 return ret;
8658 return NULL_TREE;
8661 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
8662 N arguments are passed in the array ARGARRAY. Return a folded
8663 expression or NULL_TREE if no simplification was possible. */
8665 tree
8666 fold_builtin_call_array (location_t loc, tree,
8667 tree fn,
8668 int n,
8669 tree *argarray)
8671 if (TREE_CODE (fn) != ADDR_EXPR)
8672 return NULL_TREE;
8674 tree fndecl = TREE_OPERAND (fn, 0);
8675 if (TREE_CODE (fndecl) == FUNCTION_DECL
8676 && DECL_BUILT_IN (fndecl))
8678 /* If last argument is __builtin_va_arg_pack (), arguments to this
8679 function are not finalized yet. Defer folding until they are. */
8680 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
8682 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
8683 if (fndecl2
8684 && TREE_CODE (fndecl2) == FUNCTION_DECL
8685 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8686 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8687 return NULL_TREE;
8689 if (avoid_folding_inline_builtin (fndecl))
8690 return NULL_TREE;
8691 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8692 return targetm.fold_builtin (fndecl, n, argarray, false);
8693 else
8694 return fold_builtin_n (loc, fndecl, argarray, n, false);
8697 return NULL_TREE;
8700 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
8701 along with N new arguments specified as the "..." parameters. SKIP
8702 is the number of arguments in EXP to be omitted. This function is used
8703 to do varargs-to-varargs transformations. */
8705 static tree
8706 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
8708 va_list ap;
8709 tree t;
8711 va_start (ap, n);
8712 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
8713 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
8714 va_end (ap);
8716 return t;
8719 /* Validate a single argument ARG against a tree code CODE representing
8720 a type. */
8722 static bool
8723 validate_arg (const_tree arg, enum tree_code code)
8725 if (!arg)
8726 return false;
8727 else if (code == POINTER_TYPE)
8728 return POINTER_TYPE_P (TREE_TYPE (arg));
8729 else if (code == INTEGER_TYPE)
8730 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
8731 return code == TREE_CODE (TREE_TYPE (arg));
8734 /* This function validates the types of a function call argument list
8735 against a specified list of tree_codes. If the last specifier is a 0,
8736 that represents an ellipses, otherwise the last specifier must be a
8737 VOID_TYPE.
8739 This is the GIMPLE version of validate_arglist. Eventually we want to
8740 completely convert builtins.c to work from GIMPLEs and the tree based
8741 validate_arglist will then be removed. */
8743 bool
8744 validate_gimple_arglist (const gcall *call, ...)
8746 enum tree_code code;
8747 bool res = 0;
8748 va_list ap;
8749 const_tree arg;
8750 size_t i;
8752 va_start (ap, call);
8753 i = 0;
8757 code = (enum tree_code) va_arg (ap, int);
8758 switch (code)
8760 case 0:
8761 /* This signifies an ellipses, any further arguments are all ok. */
8762 res = true;
8763 goto end;
8764 case VOID_TYPE:
8765 /* This signifies an endlink, if no arguments remain, return
8766 true, otherwise return false. */
8767 res = (i == gimple_call_num_args (call));
8768 goto end;
8769 default:
8770 /* If no parameters remain or the parameter's code does not
8771 match the specified code, return false. Otherwise continue
8772 checking any remaining arguments. */
8773 arg = gimple_call_arg (call, i++);
8774 if (!validate_arg (arg, code))
8775 goto end;
8776 break;
8779 while (1);
8781 /* We need gotos here since we can only have one VA_CLOSE in a
8782 function. */
8783 end: ;
8784 va_end (ap);
8786 return res;
8789 /* Default target-specific builtin expander that does nothing. */
8792 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
8793 rtx target ATTRIBUTE_UNUSED,
8794 rtx subtarget ATTRIBUTE_UNUSED,
8795 machine_mode mode ATTRIBUTE_UNUSED,
8796 int ignore ATTRIBUTE_UNUSED)
8798 return NULL_RTX;
8801 /* Returns true is EXP represents data that would potentially reside
8802 in a readonly section. */
8804 bool
8805 readonly_data_expr (tree exp)
8807 STRIP_NOPS (exp);
8809 if (TREE_CODE (exp) != ADDR_EXPR)
8810 return false;
8812 exp = get_base_address (TREE_OPERAND (exp, 0));
8813 if (!exp)
8814 return false;
8816 /* Make sure we call decl_readonly_section only for trees it
8817 can handle (since it returns true for everything it doesn't
8818 understand). */
8819 if (TREE_CODE (exp) == STRING_CST
8820 || TREE_CODE (exp) == CONSTRUCTOR
8821 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
8822 return decl_readonly_section (exp, 0);
8823 else
8824 return false;
8827 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
8828 to the call, and TYPE is its return type.
8830 Return NULL_TREE if no simplification was possible, otherwise return the
8831 simplified form of the call as a tree.
8833 The simplified form may be a constant or other expression which
8834 computes the same value, but in a more efficient manner (including
8835 calls to other builtin functions).
8837 The call may contain arguments which need to be evaluated, but
8838 which are not useful to determine the result of the call. In
8839 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8840 COMPOUND_EXPR will be an argument which must be evaluated.
8841 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8842 COMPOUND_EXPR in the chain will contain the tree for the simplified
8843 form of the builtin function call. */
8845 static tree
8846 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
8848 if (!validate_arg (s1, POINTER_TYPE)
8849 || !validate_arg (s2, POINTER_TYPE))
8850 return NULL_TREE;
8851 else
8853 tree fn;
8854 const char *p1, *p2;
8856 p2 = c_getstr (s2);
8857 if (p2 == NULL)
8858 return NULL_TREE;
8860 p1 = c_getstr (s1);
8861 if (p1 != NULL)
8863 const char *r = strstr (p1, p2);
8864 tree tem;
8866 if (r == NULL)
8867 return build_int_cst (TREE_TYPE (s1), 0);
8869 /* Return an offset into the constant string argument. */
8870 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8871 return fold_convert_loc (loc, type, tem);
8874 /* The argument is const char *, and the result is char *, so we need
8875 a type conversion here to avoid a warning. */
8876 if (p2[0] == '\0')
8877 return fold_convert_loc (loc, type, s1);
8879 if (p2[1] != '\0')
8880 return NULL_TREE;
8882 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
8883 if (!fn)
8884 return NULL_TREE;
8886 /* New argument list transforming strstr(s1, s2) to
8887 strchr(s1, s2[0]). */
8888 return build_call_expr_loc (loc, fn, 2, s1,
8889 build_int_cst (integer_type_node, p2[0]));
8893 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
8894 the call, and TYPE is its return type.
8896 Return NULL_TREE if no simplification was possible, otherwise return the
8897 simplified form of the call as a tree.
8899 The simplified form may be a constant or other expression which
8900 computes the same value, but in a more efficient manner (including
8901 calls to other builtin functions).
8903 The call may contain arguments which need to be evaluated, but
8904 which are not useful to determine the result of the call. In
8905 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8906 COMPOUND_EXPR will be an argument which must be evaluated.
8907 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8908 COMPOUND_EXPR in the chain will contain the tree for the simplified
8909 form of the builtin function call. */
8911 static tree
8912 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
8914 if (!validate_arg (s1, POINTER_TYPE)
8915 || !validate_arg (s2, INTEGER_TYPE))
8916 return NULL_TREE;
8917 else
8919 const char *p1;
8921 if (TREE_CODE (s2) != INTEGER_CST)
8922 return NULL_TREE;
8924 p1 = c_getstr (s1);
8925 if (p1 != NULL)
8927 char c;
8928 const char *r;
8929 tree tem;
8931 if (target_char_cast (s2, &c))
8932 return NULL_TREE;
8934 r = strchr (p1, c);
8936 if (r == NULL)
8937 return build_int_cst (TREE_TYPE (s1), 0);
8939 /* Return an offset into the constant string argument. */
8940 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8941 return fold_convert_loc (loc, type, tem);
8943 return NULL_TREE;
8947 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
8948 the call, and TYPE is its return type.
8950 Return NULL_TREE if no simplification was possible, otherwise return the
8951 simplified form of the call as a tree.
8953 The simplified form may be a constant or other expression which
8954 computes the same value, but in a more efficient manner (including
8955 calls to other builtin functions).
8957 The call may contain arguments which need to be evaluated, but
8958 which are not useful to determine the result of the call. In
8959 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8960 COMPOUND_EXPR will be an argument which must be evaluated.
8961 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8962 COMPOUND_EXPR in the chain will contain the tree for the simplified
8963 form of the builtin function call. */
8965 static tree
8966 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
8968 if (!validate_arg (s1, POINTER_TYPE)
8969 || !validate_arg (s2, INTEGER_TYPE))
8970 return NULL_TREE;
8971 else
8973 tree fn;
8974 const char *p1;
8976 if (TREE_CODE (s2) != INTEGER_CST)
8977 return NULL_TREE;
8979 p1 = c_getstr (s1);
8980 if (p1 != NULL)
8982 char c;
8983 const char *r;
8984 tree tem;
8986 if (target_char_cast (s2, &c))
8987 return NULL_TREE;
8989 r = strrchr (p1, c);
8991 if (r == NULL)
8992 return build_int_cst (TREE_TYPE (s1), 0);
8994 /* Return an offset into the constant string argument. */
8995 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8996 return fold_convert_loc (loc, type, tem);
8999 if (! integer_zerop (s2))
9000 return NULL_TREE;
9002 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9003 if (!fn)
9004 return NULL_TREE;
9006 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
9007 return build_call_expr_loc (loc, fn, 2, s1, s2);
9011 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9012 to the call, and TYPE is its return type.
9014 Return NULL_TREE if no simplification was possible, otherwise return the
9015 simplified form of the call as a tree.
9017 The simplified form may be a constant or other expression which
9018 computes the same value, but in a more efficient manner (including
9019 calls to other builtin functions).
9021 The call may contain arguments which need to be evaluated, but
9022 which are not useful to determine the result of the call. In
9023 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9024 COMPOUND_EXPR will be an argument which must be evaluated.
9025 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9026 COMPOUND_EXPR in the chain will contain the tree for the simplified
9027 form of the builtin function call. */
9029 static tree
9030 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9032 if (!validate_arg (s1, POINTER_TYPE)
9033 || !validate_arg (s2, POINTER_TYPE))
9034 return NULL_TREE;
9035 else
9037 tree fn;
9038 const char *p1, *p2;
9040 p2 = c_getstr (s2);
9041 if (p2 == NULL)
9042 return NULL_TREE;
9044 p1 = c_getstr (s1);
9045 if (p1 != NULL)
9047 const char *r = strpbrk (p1, p2);
9048 tree tem;
9050 if (r == NULL)
9051 return build_int_cst (TREE_TYPE (s1), 0);
9053 /* Return an offset into the constant string argument. */
9054 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9055 return fold_convert_loc (loc, type, tem);
9058 if (p2[0] == '\0')
9059 /* strpbrk(x, "") == NULL.
9060 Evaluate and ignore s1 in case it had side-effects. */
9061 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
9063 if (p2[1] != '\0')
9064 return NULL_TREE; /* Really call strpbrk. */
9066 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9067 if (!fn)
9068 return NULL_TREE;
9070 /* New argument list transforming strpbrk(s1, s2) to
9071 strchr(s1, s2[0]). */
9072 return build_call_expr_loc (loc, fn, 2, s1,
9073 build_int_cst (integer_type_node, p2[0]));
9077 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9078 to the call.
9080 Return NULL_TREE if no simplification was possible, otherwise return the
9081 simplified form of the call as a tree.
9083 The simplified form may be a constant or other expression which
9084 computes the same value, but in a more efficient manner (including
9085 calls to other builtin functions).
9087 The call may contain arguments which need to be evaluated, but
9088 which are not useful to determine the result of the call. In
9089 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9090 COMPOUND_EXPR will be an argument which must be evaluated.
9091 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9092 COMPOUND_EXPR in the chain will contain the tree for the simplified
9093 form of the builtin function call. */
9095 static tree
9096 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9098 if (!validate_arg (s1, POINTER_TYPE)
9099 || !validate_arg (s2, POINTER_TYPE))
9100 return NULL_TREE;
9101 else
9103 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9105 /* If either argument is "", return NULL_TREE. */
9106 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9107 /* Evaluate and ignore both arguments in case either one has
9108 side-effects. */
9109 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9110 s1, s2);
9111 return NULL_TREE;
9115 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9116 to the call.
9118 Return NULL_TREE if no simplification was possible, otherwise return the
9119 simplified form of the call as a tree.
9121 The simplified form may be a constant or other expression which
9122 computes the same value, but in a more efficient manner (including
9123 calls to other builtin functions).
9125 The call may contain arguments which need to be evaluated, but
9126 which are not useful to determine the result of the call. In
9127 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9128 COMPOUND_EXPR will be an argument which must be evaluated.
9129 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9130 COMPOUND_EXPR in the chain will contain the tree for the simplified
9131 form of the builtin function call. */
9133 static tree
9134 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9136 if (!validate_arg (s1, POINTER_TYPE)
9137 || !validate_arg (s2, POINTER_TYPE))
9138 return NULL_TREE;
9139 else
9141 /* If the first argument is "", return NULL_TREE. */
9142 const char *p1 = c_getstr (s1);
9143 if (p1 && *p1 == '\0')
9145 /* Evaluate and ignore argument s2 in case it has
9146 side-effects. */
9147 return omit_one_operand_loc (loc, size_type_node,
9148 size_zero_node, s2);
9151 /* If the second argument is "", return __builtin_strlen(s1). */
9152 const char *p2 = c_getstr (s2);
9153 if (p2 && *p2 == '\0')
9155 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9157 /* If the replacement _DECL isn't initialized, don't do the
9158 transformation. */
9159 if (!fn)
9160 return NULL_TREE;
9162 return build_call_expr_loc (loc, fn, 1, s1);
9164 return NULL_TREE;
9168 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9169 produced. False otherwise. This is done so that we don't output the error
9170 or warning twice or three times. */
9172 bool
9173 fold_builtin_next_arg (tree exp, bool va_start_p)
9175 tree fntype = TREE_TYPE (current_function_decl);
9176 int nargs = call_expr_nargs (exp);
9177 tree arg;
9178 /* There is good chance the current input_location points inside the
9179 definition of the va_start macro (perhaps on the token for
9180 builtin) in a system header, so warnings will not be emitted.
9181 Use the location in real source code. */
9182 source_location current_location =
9183 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9184 NULL);
9186 if (!stdarg_p (fntype))
9188 error ("%<va_start%> used in function with fixed args");
9189 return true;
9192 if (va_start_p)
9194 if (va_start_p && (nargs != 2))
9196 error ("wrong number of arguments to function %<va_start%>");
9197 return true;
9199 arg = CALL_EXPR_ARG (exp, 1);
9201 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9202 when we checked the arguments and if needed issued a warning. */
9203 else
9205 if (nargs == 0)
9207 /* Evidently an out of date version of <stdarg.h>; can't validate
9208 va_start's second argument, but can still work as intended. */
9209 warning_at (current_location,
9210 OPT_Wvarargs,
9211 "%<__builtin_next_arg%> called without an argument");
9212 return true;
9214 else if (nargs > 1)
9216 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9217 return true;
9219 arg = CALL_EXPR_ARG (exp, 0);
9222 if (TREE_CODE (arg) == SSA_NAME)
9223 arg = SSA_NAME_VAR (arg);
9225 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9226 or __builtin_next_arg (0) the first time we see it, after checking
9227 the arguments and if needed issuing a warning. */
9228 if (!integer_zerop (arg))
9230 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9232 /* Strip off all nops for the sake of the comparison. This
9233 is not quite the same as STRIP_NOPS. It does more.
9234 We must also strip off INDIRECT_EXPR for C++ reference
9235 parameters. */
9236 while (CONVERT_EXPR_P (arg)
9237 || TREE_CODE (arg) == INDIRECT_REF)
9238 arg = TREE_OPERAND (arg, 0);
9239 if (arg != last_parm)
9241 /* FIXME: Sometimes with the tree optimizers we can get the
9242 not the last argument even though the user used the last
9243 argument. We just warn and set the arg to be the last
9244 argument so that we will get wrong-code because of
9245 it. */
9246 warning_at (current_location,
9247 OPT_Wvarargs,
9248 "second parameter of %<va_start%> not last named argument");
9251 /* Undefined by C99 7.15.1.4p4 (va_start):
9252 "If the parameter parmN is declared with the register storage
9253 class, with a function or array type, or with a type that is
9254 not compatible with the type that results after application of
9255 the default argument promotions, the behavior is undefined."
9257 else if (DECL_REGISTER (arg))
9259 warning_at (current_location,
9260 OPT_Wvarargs,
9261 "undefined behavior when second parameter of "
9262 "%<va_start%> is declared with %<register%> storage");
9265 /* We want to verify the second parameter just once before the tree
9266 optimizers are run and then avoid keeping it in the tree,
9267 as otherwise we could warn even for correct code like:
9268 void foo (int i, ...)
9269 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9270 if (va_start_p)
9271 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9272 else
9273 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9275 return false;
9279 /* Expand a call EXP to __builtin_object_size. */
9281 static rtx
9282 expand_builtin_object_size (tree exp)
9284 tree ost;
9285 int object_size_type;
9286 tree fndecl = get_callee_fndecl (exp);
9288 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9290 error ("%Kfirst argument of %D must be a pointer, second integer constant",
9291 exp, fndecl);
9292 expand_builtin_trap ();
9293 return const0_rtx;
9296 ost = CALL_EXPR_ARG (exp, 1);
9297 STRIP_NOPS (ost);
9299 if (TREE_CODE (ost) != INTEGER_CST
9300 || tree_int_cst_sgn (ost) < 0
9301 || compare_tree_int (ost, 3) > 0)
9303 error ("%Klast argument of %D is not integer constant between 0 and 3",
9304 exp, fndecl);
9305 expand_builtin_trap ();
9306 return const0_rtx;
9309 object_size_type = tree_to_shwi (ost);
9311 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9314 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9315 FCODE is the BUILT_IN_* to use.
9316 Return NULL_RTX if we failed; the caller should emit a normal call,
9317 otherwise try to get the result in TARGET, if convenient (and in
9318 mode MODE if that's convenient). */
9320 static rtx
9321 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9322 enum built_in_function fcode)
9324 tree dest, src, len, size;
9326 if (!validate_arglist (exp,
9327 POINTER_TYPE,
9328 fcode == BUILT_IN_MEMSET_CHK
9329 ? INTEGER_TYPE : POINTER_TYPE,
9330 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9331 return NULL_RTX;
9333 dest = CALL_EXPR_ARG (exp, 0);
9334 src = CALL_EXPR_ARG (exp, 1);
9335 len = CALL_EXPR_ARG (exp, 2);
9336 size = CALL_EXPR_ARG (exp, 3);
9338 if (! tree_fits_uhwi_p (size))
9339 return NULL_RTX;
9341 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
9343 tree fn;
9345 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
9347 warning_at (tree_nonartificial_location (exp),
9348 0, "%Kcall to %D will always overflow destination buffer",
9349 exp, get_callee_fndecl (exp));
9350 return NULL_RTX;
9353 fn = NULL_TREE;
9354 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9355 mem{cpy,pcpy,move,set} is available. */
9356 switch (fcode)
9358 case BUILT_IN_MEMCPY_CHK:
9359 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
9360 break;
9361 case BUILT_IN_MEMPCPY_CHK:
9362 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
9363 break;
9364 case BUILT_IN_MEMMOVE_CHK:
9365 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
9366 break;
9367 case BUILT_IN_MEMSET_CHK:
9368 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
9369 break;
9370 default:
9371 break;
9374 if (! fn)
9375 return NULL_RTX;
9377 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
9378 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9379 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9380 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9382 else if (fcode == BUILT_IN_MEMSET_CHK)
9383 return NULL_RTX;
9384 else
9386 unsigned int dest_align = get_pointer_alignment (dest);
9388 /* If DEST is not a pointer type, call the normal function. */
9389 if (dest_align == 0)
9390 return NULL_RTX;
9392 /* If SRC and DEST are the same (and not volatile), do nothing. */
9393 if (operand_equal_p (src, dest, 0))
9395 tree expr;
9397 if (fcode != BUILT_IN_MEMPCPY_CHK)
9399 /* Evaluate and ignore LEN in case it has side-effects. */
9400 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9401 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9404 expr = fold_build_pointer_plus (dest, len);
9405 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9408 /* __memmove_chk special case. */
9409 if (fcode == BUILT_IN_MEMMOVE_CHK)
9411 unsigned int src_align = get_pointer_alignment (src);
9413 if (src_align == 0)
9414 return NULL_RTX;
9416 /* If src is categorized for a readonly section we can use
9417 normal __memcpy_chk. */
9418 if (readonly_data_expr (src))
9420 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
9421 if (!fn)
9422 return NULL_RTX;
9423 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9424 dest, src, len, size);
9425 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9426 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9427 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9430 return NULL_RTX;
9434 /* Emit warning if a buffer overflow is detected at compile time. */
9436 static void
9437 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9439 int is_strlen = 0;
9440 tree len, size;
9441 location_t loc = tree_nonartificial_location (exp);
9443 switch (fcode)
9445 case BUILT_IN_STRCPY_CHK:
9446 case BUILT_IN_STPCPY_CHK:
9447 /* For __strcat_chk the warning will be emitted only if overflowing
9448 by at least strlen (dest) + 1 bytes. */
9449 case BUILT_IN_STRCAT_CHK:
9450 len = CALL_EXPR_ARG (exp, 1);
9451 size = CALL_EXPR_ARG (exp, 2);
9452 is_strlen = 1;
9453 break;
9454 case BUILT_IN_STRNCAT_CHK:
9455 case BUILT_IN_STRNCPY_CHK:
9456 case BUILT_IN_STPNCPY_CHK:
9457 len = CALL_EXPR_ARG (exp, 2);
9458 size = CALL_EXPR_ARG (exp, 3);
9459 break;
9460 case BUILT_IN_SNPRINTF_CHK:
9461 case BUILT_IN_VSNPRINTF_CHK:
9462 len = CALL_EXPR_ARG (exp, 1);
9463 size = CALL_EXPR_ARG (exp, 3);
9464 break;
9465 default:
9466 gcc_unreachable ();
9469 if (!len || !size)
9470 return;
9472 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9473 return;
9475 if (is_strlen)
9477 len = c_strlen (len, 1);
9478 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
9479 return;
9481 else if (fcode == BUILT_IN_STRNCAT_CHK)
9483 tree src = CALL_EXPR_ARG (exp, 1);
9484 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
9485 return;
9486 src = c_strlen (src, 1);
9487 if (! src || ! tree_fits_uhwi_p (src))
9489 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
9490 exp, get_callee_fndecl (exp));
9491 return;
9493 else if (tree_int_cst_lt (src, size))
9494 return;
9496 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
9497 return;
9499 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
9500 exp, get_callee_fndecl (exp));
9503 /* Emit warning if a buffer overflow is detected at compile time
9504 in __sprintf_chk/__vsprintf_chk calls. */
9506 static void
9507 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
9509 tree size, len, fmt;
9510 const char *fmt_str;
9511 int nargs = call_expr_nargs (exp);
9513 /* Verify the required arguments in the original call. */
9515 if (nargs < 4)
9516 return;
9517 size = CALL_EXPR_ARG (exp, 2);
9518 fmt = CALL_EXPR_ARG (exp, 3);
9520 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9521 return;
9523 /* Check whether the format is a literal string constant. */
9524 fmt_str = c_getstr (fmt);
9525 if (fmt_str == NULL)
9526 return;
9528 if (!init_target_chars ())
9529 return;
9531 /* If the format doesn't contain % args or %%, we know its size. */
9532 if (strchr (fmt_str, target_percent) == 0)
9533 len = build_int_cstu (size_type_node, strlen (fmt_str));
9534 /* If the format is "%s" and first ... argument is a string literal,
9535 we know it too. */
9536 else if (fcode == BUILT_IN_SPRINTF_CHK
9537 && strcmp (fmt_str, target_percent_s) == 0)
9539 tree arg;
9541 if (nargs < 5)
9542 return;
9543 arg = CALL_EXPR_ARG (exp, 4);
9544 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
9545 return;
9547 len = c_strlen (arg, 1);
9548 if (!len || ! tree_fits_uhwi_p (len))
9549 return;
9551 else
9552 return;
9554 if (! tree_int_cst_lt (len, size))
9555 warning_at (tree_nonartificial_location (exp),
9556 0, "%Kcall to %D will always overflow destination buffer",
9557 exp, get_callee_fndecl (exp));
9560 /* Emit warning if a free is called with address of a variable. */
9562 static void
9563 maybe_emit_free_warning (tree exp)
9565 tree arg = CALL_EXPR_ARG (exp, 0);
9567 STRIP_NOPS (arg);
9568 if (TREE_CODE (arg) != ADDR_EXPR)
9569 return;
9571 arg = get_base_address (TREE_OPERAND (arg, 0));
9572 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
9573 return;
9575 if (SSA_VAR_P (arg))
9576 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9577 "%Kattempt to free a non-heap object %qD", exp, arg);
9578 else
9579 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9580 "%Kattempt to free a non-heap object", exp);
9583 /* Fold a call to __builtin_object_size with arguments PTR and OST,
9584 if possible. */
9586 static tree
9587 fold_builtin_object_size (tree ptr, tree ost)
9589 unsigned HOST_WIDE_INT bytes;
9590 int object_size_type;
9592 if (!validate_arg (ptr, POINTER_TYPE)
9593 || !validate_arg (ost, INTEGER_TYPE))
9594 return NULL_TREE;
9596 STRIP_NOPS (ost);
9598 if (TREE_CODE (ost) != INTEGER_CST
9599 || tree_int_cst_sgn (ost) < 0
9600 || compare_tree_int (ost, 3) > 0)
9601 return NULL_TREE;
9603 object_size_type = tree_to_shwi (ost);
9605 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
9606 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
9607 and (size_t) 0 for types 2 and 3. */
9608 if (TREE_SIDE_EFFECTS (ptr))
9609 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
9611 if (TREE_CODE (ptr) == ADDR_EXPR)
9613 bytes = compute_builtin_object_size (ptr, object_size_type);
9614 if (wi::fits_to_tree_p (bytes, size_type_node))
9615 return build_int_cstu (size_type_node, bytes);
9617 else if (TREE_CODE (ptr) == SSA_NAME)
9619 /* If object size is not known yet, delay folding until
9620 later. Maybe subsequent passes will help determining
9621 it. */
9622 bytes = compute_builtin_object_size (ptr, object_size_type);
9623 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
9624 && wi::fits_to_tree_p (bytes, size_type_node))
9625 return build_int_cstu (size_type_node, bytes);
9628 return NULL_TREE;
9631 /* Builtins with folding operations that operate on "..." arguments
9632 need special handling; we need to store the arguments in a convenient
9633 data structure before attempting any folding. Fortunately there are
9634 only a few builtins that fall into this category. FNDECL is the
9635 function, EXP is the CALL_EXPR for the call. */
9637 static tree
9638 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
9640 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9641 tree ret = NULL_TREE;
9643 switch (fcode)
9645 case BUILT_IN_FPCLASSIFY:
9646 ret = fold_builtin_fpclassify (loc, args, nargs);
9647 break;
9649 default:
9650 break;
9652 if (ret)
9654 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9655 SET_EXPR_LOCATION (ret, loc);
9656 TREE_NO_WARNING (ret) = 1;
9657 return ret;
9659 return NULL_TREE;
9662 /* Initialize format string characters in the target charset. */
9664 bool
9665 init_target_chars (void)
9667 static bool init;
9668 if (!init)
9670 target_newline = lang_hooks.to_target_charset ('\n');
9671 target_percent = lang_hooks.to_target_charset ('%');
9672 target_c = lang_hooks.to_target_charset ('c');
9673 target_s = lang_hooks.to_target_charset ('s');
9674 if (target_newline == 0 || target_percent == 0 || target_c == 0
9675 || target_s == 0)
9676 return false;
9678 target_percent_c[0] = target_percent;
9679 target_percent_c[1] = target_c;
9680 target_percent_c[2] = '\0';
9682 target_percent_s[0] = target_percent;
9683 target_percent_s[1] = target_s;
9684 target_percent_s[2] = '\0';
9686 target_percent_s_newline[0] = target_percent;
9687 target_percent_s_newline[1] = target_s;
9688 target_percent_s_newline[2] = target_newline;
9689 target_percent_s_newline[3] = '\0';
9691 init = true;
9693 return true;
9696 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
9697 and no overflow/underflow occurred. INEXACT is true if M was not
9698 exactly calculated. TYPE is the tree type for the result. This
9699 function assumes that you cleared the MPFR flags and then
9700 calculated M to see if anything subsequently set a flag prior to
9701 entering this function. Return NULL_TREE if any checks fail. */
9703 static tree
9704 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
9706 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9707 overflow/underflow occurred. If -frounding-math, proceed iff the
9708 result of calling FUNC was exact. */
9709 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
9710 && (!flag_rounding_math || !inexact))
9712 REAL_VALUE_TYPE rr;
9714 real_from_mpfr (&rr, m, type, GMP_RNDN);
9715 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
9716 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9717 but the mpft_t is not, then we underflowed in the
9718 conversion. */
9719 if (real_isfinite (&rr)
9720 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
9722 REAL_VALUE_TYPE rmode;
9724 real_convert (&rmode, TYPE_MODE (type), &rr);
9725 /* Proceed iff the specified mode can hold the value. */
9726 if (real_identical (&rmode, &rr))
9727 return build_real (type, rmode);
9730 return NULL_TREE;
9733 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
9734 number and no overflow/underflow occurred. INEXACT is true if M
9735 was not exactly calculated. TYPE is the tree type for the result.
9736 This function assumes that you cleared the MPFR flags and then
9737 calculated M to see if anything subsequently set a flag prior to
9738 entering this function. Return NULL_TREE if any checks fail, if
9739 FORCE_CONVERT is true, then bypass the checks. */
9741 static tree
9742 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
9744 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9745 overflow/underflow occurred. If -frounding-math, proceed iff the
9746 result of calling FUNC was exact. */
9747 if (force_convert
9748 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
9749 && !mpfr_overflow_p () && !mpfr_underflow_p ()
9750 && (!flag_rounding_math || !inexact)))
9752 REAL_VALUE_TYPE re, im;
9754 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
9755 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
9756 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
9757 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9758 but the mpft_t is not, then we underflowed in the
9759 conversion. */
9760 if (force_convert
9761 || (real_isfinite (&re) && real_isfinite (&im)
9762 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
9763 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
9765 REAL_VALUE_TYPE re_mode, im_mode;
9767 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
9768 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
9769 /* Proceed iff the specified mode can hold the value. */
9770 if (force_convert
9771 || (real_identical (&re_mode, &re)
9772 && real_identical (&im_mode, &im)))
9773 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
9774 build_real (TREE_TYPE (type), im_mode));
9777 return NULL_TREE;
9780 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
9781 the pointer *(ARG_QUO) and return the result. The type is taken
9782 from the type of ARG0 and is used for setting the precision of the
9783 calculation and results. */
9785 static tree
9786 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
9788 tree const type = TREE_TYPE (arg0);
9789 tree result = NULL_TREE;
9791 STRIP_NOPS (arg0);
9792 STRIP_NOPS (arg1);
9794 /* To proceed, MPFR must exactly represent the target floating point
9795 format, which only happens when the target base equals two. */
9796 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9797 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9798 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
9800 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
9801 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
9803 if (real_isfinite (ra0) && real_isfinite (ra1))
9805 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9806 const int prec = fmt->p;
9807 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9808 tree result_rem;
9809 long integer_quo;
9810 mpfr_t m0, m1;
9812 mpfr_inits2 (prec, m0, m1, NULL);
9813 mpfr_from_real (m0, ra0, GMP_RNDN);
9814 mpfr_from_real (m1, ra1, GMP_RNDN);
9815 mpfr_clear_flags ();
9816 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
9817 /* Remquo is independent of the rounding mode, so pass
9818 inexact=0 to do_mpfr_ckconv(). */
9819 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
9820 mpfr_clears (m0, m1, NULL);
9821 if (result_rem)
9823 /* MPFR calculates quo in the host's long so it may
9824 return more bits in quo than the target int can hold
9825 if sizeof(host long) > sizeof(target int). This can
9826 happen even for native compilers in LP64 mode. In
9827 these cases, modulo the quo value with the largest
9828 number that the target int can hold while leaving one
9829 bit for the sign. */
9830 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
9831 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
9833 /* Dereference the quo pointer argument. */
9834 arg_quo = build_fold_indirect_ref (arg_quo);
9835 /* Proceed iff a valid pointer type was passed in. */
9836 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
9838 /* Set the value. */
9839 tree result_quo
9840 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
9841 build_int_cst (TREE_TYPE (arg_quo),
9842 integer_quo));
9843 TREE_SIDE_EFFECTS (result_quo) = 1;
9844 /* Combine the quo assignment with the rem. */
9845 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9846 result_quo, result_rem));
9851 return result;
9854 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
9855 resulting value as a tree with type TYPE. The mpfr precision is
9856 set to the precision of TYPE. We assume that this mpfr function
9857 returns zero if the result could be calculated exactly within the
9858 requested precision. In addition, the integer pointer represented
9859 by ARG_SG will be dereferenced and set to the appropriate signgam
9860 (-1,1) value. */
9862 static tree
9863 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
9865 tree result = NULL_TREE;
9867 STRIP_NOPS (arg);
9869 /* To proceed, MPFR must exactly represent the target floating point
9870 format, which only happens when the target base equals two. Also
9871 verify ARG is a constant and that ARG_SG is an int pointer. */
9872 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9873 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
9874 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
9875 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
9877 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
9879 /* In addition to NaN and Inf, the argument cannot be zero or a
9880 negative integer. */
9881 if (real_isfinite (ra)
9882 && ra->cl != rvc_zero
9883 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
9885 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9886 const int prec = fmt->p;
9887 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9888 int inexact, sg;
9889 mpfr_t m;
9890 tree result_lg;
9892 mpfr_init2 (m, prec);
9893 mpfr_from_real (m, ra, GMP_RNDN);
9894 mpfr_clear_flags ();
9895 inexact = mpfr_lgamma (m, &sg, m, rnd);
9896 result_lg = do_mpfr_ckconv (m, type, inexact);
9897 mpfr_clear (m);
9898 if (result_lg)
9900 tree result_sg;
9902 /* Dereference the arg_sg pointer argument. */
9903 arg_sg = build_fold_indirect_ref (arg_sg);
9904 /* Assign the signgam value into *arg_sg. */
9905 result_sg = fold_build2 (MODIFY_EXPR,
9906 TREE_TYPE (arg_sg), arg_sg,
9907 build_int_cst (TREE_TYPE (arg_sg), sg));
9908 TREE_SIDE_EFFECTS (result_sg) = 1;
9909 /* Combine the signgam assignment with the lgamma result. */
9910 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9911 result_sg, result_lg));
9916 return result;
9919 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
9920 mpc function FUNC on it and return the resulting value as a tree
9921 with type TYPE. The mpfr precision is set to the precision of
9922 TYPE. We assume that function FUNC returns zero if the result
9923 could be calculated exactly within the requested precision. If
9924 DO_NONFINITE is true, then fold expressions containing Inf or NaN
9925 in the arguments and/or results. */
9927 tree
9928 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
9929 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
9931 tree result = NULL_TREE;
9933 STRIP_NOPS (arg0);
9934 STRIP_NOPS (arg1);
9936 /* To proceed, MPFR must exactly represent the target floating point
9937 format, which only happens when the target base equals two. */
9938 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
9939 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
9940 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
9941 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
9942 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
9944 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
9945 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
9946 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
9947 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
9949 if (do_nonfinite
9950 || (real_isfinite (re0) && real_isfinite (im0)
9951 && real_isfinite (re1) && real_isfinite (im1)))
9953 const struct real_format *const fmt =
9954 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
9955 const int prec = fmt->p;
9956 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
9957 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
9958 int inexact;
9959 mpc_t m0, m1;
9961 mpc_init2 (m0, prec);
9962 mpc_init2 (m1, prec);
9963 mpfr_from_real (mpc_realref (m0), re0, rnd);
9964 mpfr_from_real (mpc_imagref (m0), im0, rnd);
9965 mpfr_from_real (mpc_realref (m1), re1, rnd);
9966 mpfr_from_real (mpc_imagref (m1), im1, rnd);
9967 mpfr_clear_flags ();
9968 inexact = func (m0, m0, m1, crnd);
9969 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
9970 mpc_clear (m0);
9971 mpc_clear (m1);
9975 return result;
9978 /* A wrapper function for builtin folding that prevents warnings for
9979 "statement without effect" and the like, caused by removing the
9980 call node earlier than the warning is generated. */
9982 tree
9983 fold_call_stmt (gcall *stmt, bool ignore)
9985 tree ret = NULL_TREE;
9986 tree fndecl = gimple_call_fndecl (stmt);
9987 location_t loc = gimple_location (stmt);
9988 if (fndecl
9989 && TREE_CODE (fndecl) == FUNCTION_DECL
9990 && DECL_BUILT_IN (fndecl)
9991 && !gimple_call_va_arg_pack_p (stmt))
9993 int nargs = gimple_call_num_args (stmt);
9994 tree *args = (nargs > 0
9995 ? gimple_call_arg_ptr (stmt, 0)
9996 : &error_mark_node);
9998 if (avoid_folding_inline_builtin (fndecl))
9999 return NULL_TREE;
10000 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10002 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10004 else
10006 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10007 if (ret)
10009 /* Propagate location information from original call to
10010 expansion of builtin. Otherwise things like
10011 maybe_emit_chk_warning, that operate on the expansion
10012 of a builtin, will use the wrong location information. */
10013 if (gimple_has_location (stmt))
10015 tree realret = ret;
10016 if (TREE_CODE (ret) == NOP_EXPR)
10017 realret = TREE_OPERAND (ret, 0);
10018 if (CAN_HAVE_LOCATION_P (realret)
10019 && !EXPR_HAS_LOCATION (realret))
10020 SET_EXPR_LOCATION (realret, loc);
10021 return realret;
10023 return ret;
10027 return NULL_TREE;
10030 /* Look up the function in builtin_decl that corresponds to DECL
10031 and set ASMSPEC as its user assembler name. DECL must be a
10032 function decl that declares a builtin. */
10034 void
10035 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10037 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10038 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10039 && asmspec != 0);
10041 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10042 set_user_assembler_name (builtin, asmspec);
10044 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10045 && INT_TYPE_SIZE < BITS_PER_WORD)
10047 set_user_assembler_libfunc ("ffs", asmspec);
10048 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE, MODE_INT, 0),
10049 "ffs");
10053 /* Return true if DECL is a builtin that expands to a constant or similarly
10054 simple code. */
10055 bool
10056 is_simple_builtin (tree decl)
10058 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10059 switch (DECL_FUNCTION_CODE (decl))
10061 /* Builtins that expand to constants. */
10062 case BUILT_IN_CONSTANT_P:
10063 case BUILT_IN_EXPECT:
10064 case BUILT_IN_OBJECT_SIZE:
10065 case BUILT_IN_UNREACHABLE:
10066 /* Simple register moves or loads from stack. */
10067 case BUILT_IN_ASSUME_ALIGNED:
10068 case BUILT_IN_RETURN_ADDRESS:
10069 case BUILT_IN_EXTRACT_RETURN_ADDR:
10070 case BUILT_IN_FROB_RETURN_ADDR:
10071 case BUILT_IN_RETURN:
10072 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10073 case BUILT_IN_FRAME_ADDRESS:
10074 case BUILT_IN_VA_END:
10075 case BUILT_IN_STACK_SAVE:
10076 case BUILT_IN_STACK_RESTORE:
10077 /* Exception state returns or moves registers around. */
10078 case BUILT_IN_EH_FILTER:
10079 case BUILT_IN_EH_POINTER:
10080 case BUILT_IN_EH_COPY_VALUES:
10081 return true;
10083 default:
10084 return false;
10087 return false;
10090 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10091 most probably expanded inline into reasonably simple code. This is a
10092 superset of is_simple_builtin. */
10093 bool
10094 is_inexpensive_builtin (tree decl)
10096 if (!decl)
10097 return false;
10098 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10099 return true;
10100 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10101 switch (DECL_FUNCTION_CODE (decl))
10103 case BUILT_IN_ABS:
10104 case BUILT_IN_ALLOCA:
10105 case BUILT_IN_ALLOCA_WITH_ALIGN:
10106 case BUILT_IN_BSWAP16:
10107 case BUILT_IN_BSWAP32:
10108 case BUILT_IN_BSWAP64:
10109 case BUILT_IN_CLZ:
10110 case BUILT_IN_CLZIMAX:
10111 case BUILT_IN_CLZL:
10112 case BUILT_IN_CLZLL:
10113 case BUILT_IN_CTZ:
10114 case BUILT_IN_CTZIMAX:
10115 case BUILT_IN_CTZL:
10116 case BUILT_IN_CTZLL:
10117 case BUILT_IN_FFS:
10118 case BUILT_IN_FFSIMAX:
10119 case BUILT_IN_FFSL:
10120 case BUILT_IN_FFSLL:
10121 case BUILT_IN_IMAXABS:
10122 case BUILT_IN_FINITE:
10123 case BUILT_IN_FINITEF:
10124 case BUILT_IN_FINITEL:
10125 case BUILT_IN_FINITED32:
10126 case BUILT_IN_FINITED64:
10127 case BUILT_IN_FINITED128:
10128 case BUILT_IN_FPCLASSIFY:
10129 case BUILT_IN_ISFINITE:
10130 case BUILT_IN_ISINF_SIGN:
10131 case BUILT_IN_ISINF:
10132 case BUILT_IN_ISINFF:
10133 case BUILT_IN_ISINFL:
10134 case BUILT_IN_ISINFD32:
10135 case BUILT_IN_ISINFD64:
10136 case BUILT_IN_ISINFD128:
10137 case BUILT_IN_ISNAN:
10138 case BUILT_IN_ISNANF:
10139 case BUILT_IN_ISNANL:
10140 case BUILT_IN_ISNAND32:
10141 case BUILT_IN_ISNAND64:
10142 case BUILT_IN_ISNAND128:
10143 case BUILT_IN_ISNORMAL:
10144 case BUILT_IN_ISGREATER:
10145 case BUILT_IN_ISGREATEREQUAL:
10146 case BUILT_IN_ISLESS:
10147 case BUILT_IN_ISLESSEQUAL:
10148 case BUILT_IN_ISLESSGREATER:
10149 case BUILT_IN_ISUNORDERED:
10150 case BUILT_IN_VA_ARG_PACK:
10151 case BUILT_IN_VA_ARG_PACK_LEN:
10152 case BUILT_IN_VA_COPY:
10153 case BUILT_IN_TRAP:
10154 case BUILT_IN_SAVEREGS:
10155 case BUILT_IN_POPCOUNTL:
10156 case BUILT_IN_POPCOUNTLL:
10157 case BUILT_IN_POPCOUNTIMAX:
10158 case BUILT_IN_POPCOUNT:
10159 case BUILT_IN_PARITYL:
10160 case BUILT_IN_PARITYLL:
10161 case BUILT_IN_PARITYIMAX:
10162 case BUILT_IN_PARITY:
10163 case BUILT_IN_LABS:
10164 case BUILT_IN_LLABS:
10165 case BUILT_IN_PREFETCH:
10166 case BUILT_IN_ACC_ON_DEVICE:
10167 return true;
10169 default:
10170 return is_simple_builtin (decl);
10173 return false;