Short-cut generation of simple built-in functions
[official-gcc.git] / gcc / builtins.c
blobc422d0dc5b834b33cb7c2fc0fcdbbf98eae4c826
1 /* Expand builtin functions.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "predict.h"
33 #include "tm_p.h"
34 #include "stringpool.h"
35 #include "tree-ssanames.h"
36 #include "expmed.h"
37 #include "optabs.h"
38 #include "emit-rtl.h"
39 #include "recog.h"
40 #include "diagnostic-core.h"
41 #include "alias.h"
42 #include "fold-const.h"
43 #include "fold-const-call.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "varasm.h"
47 #include "tree-object-size.h"
48 #include "realmpfr.h"
49 #include "cfgrtl.h"
50 #include "except.h"
51 #include "dojump.h"
52 #include "explow.h"
53 #include "stmt.h"
54 #include "expr.h"
55 #include "libfuncs.h"
56 #include "output.h"
57 #include "typeclass.h"
58 #include "langhooks.h"
59 #include "value-prof.h"
60 #include "builtins.h"
61 #include "asan.h"
62 #include "cilk.h"
63 #include "tree-chkp.h"
64 #include "rtl-chkp.h"
65 #include "internal-fn.h"
66 #include "case-cfn-macros.h"
69 struct target_builtins default_target_builtins;
70 #if SWITCHABLE_TARGET
71 struct target_builtins *this_target_builtins = &default_target_builtins;
72 #endif
74 /* Define the names of the builtin function types and codes. */
75 const char *const built_in_class_names[BUILT_IN_LAST]
76 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
78 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
79 const char * built_in_names[(int) END_BUILTINS] =
81 #include "builtins.def"
84 /* Setup an array of builtin_info_type, make sure each element decl is
85 initialized to NULL_TREE. */
86 builtin_info_type builtin_info[(int)END_BUILTINS];
88 /* Non-zero if __builtin_constant_p should be folded right away. */
89 bool force_folding_builtin_constant_p;
91 static rtx c_readstr (const char *, machine_mode);
92 static int target_char_cast (tree, char *);
93 static rtx get_memory_rtx (tree, tree);
94 static int apply_args_size (void);
95 static int apply_result_size (void);
96 static rtx result_vector (int, rtx);
97 static void expand_builtin_prefetch (tree);
98 static rtx expand_builtin_apply_args (void);
99 static rtx expand_builtin_apply_args_1 (void);
100 static rtx expand_builtin_apply (rtx, rtx, rtx);
101 static void expand_builtin_return (rtx);
102 static enum type_class type_to_class (tree);
103 static rtx expand_builtin_classify_type (tree);
104 static void expand_errno_check (tree, rtx);
105 static rtx expand_builtin_mathfn (tree, rtx, rtx);
106 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
107 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
109 static rtx expand_builtin_interclass_mathfn (tree, rtx);
110 static rtx expand_builtin_sincos (tree);
111 static rtx expand_builtin_cexpi (tree, rtx);
112 static rtx expand_builtin_int_roundingfn (tree, rtx);
113 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
114 static rtx expand_builtin_next_arg (void);
115 static rtx expand_builtin_va_start (tree);
116 static rtx expand_builtin_va_end (tree);
117 static rtx expand_builtin_va_copy (tree);
118 static rtx expand_builtin_strcmp (tree, rtx);
119 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
120 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
121 static rtx expand_builtin_memcpy (tree, rtx);
122 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
123 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
124 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
125 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
126 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
127 machine_mode, int, tree);
128 static rtx expand_builtin_strcpy (tree, rtx);
129 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
130 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
131 static rtx expand_builtin_strncpy (tree, rtx);
132 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
133 static rtx expand_builtin_memset (tree, rtx, machine_mode);
134 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
135 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
136 static rtx expand_builtin_bzero (tree);
137 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
138 static rtx expand_builtin_alloca (tree, bool);
139 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
140 static rtx expand_builtin_frame_address (tree, tree);
141 static tree stabilize_va_list_loc (location_t, tree, int);
142 static rtx expand_builtin_expect (tree, rtx);
143 static tree fold_builtin_constant_p (tree);
144 static tree fold_builtin_classify_type (tree);
145 static tree fold_builtin_strlen (location_t, tree, tree);
146 static tree fold_builtin_inf (location_t, tree, int);
147 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
148 static bool validate_arg (const_tree, enum tree_code code);
149 static rtx expand_builtin_fabs (tree, rtx, rtx);
150 static rtx expand_builtin_signbit (tree, rtx);
151 static tree fold_builtin_strchr (location_t, tree, tree, tree);
152 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
153 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
154 static tree fold_builtin_strcmp (location_t, tree, tree);
155 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
156 static tree fold_builtin_isascii (location_t, tree);
157 static tree fold_builtin_toascii (location_t, tree);
158 static tree fold_builtin_isdigit (location_t, tree);
159 static tree fold_builtin_fabs (location_t, tree, tree);
160 static tree fold_builtin_abs (location_t, tree, tree);
161 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
162 enum tree_code);
163 static tree fold_builtin_0 (location_t, tree);
164 static tree fold_builtin_1 (location_t, tree, tree);
165 static tree fold_builtin_2 (location_t, tree, tree, tree);
166 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
167 static tree fold_builtin_varargs (location_t, tree, tree*, int);
169 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
170 static tree fold_builtin_strstr (location_t, tree, tree, tree);
171 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
172 static tree fold_builtin_strspn (location_t, tree, tree);
173 static tree fold_builtin_strcspn (location_t, tree, tree);
175 static rtx expand_builtin_object_size (tree);
176 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
177 enum built_in_function);
178 static void maybe_emit_chk_warning (tree, enum built_in_function);
179 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
180 static void maybe_emit_free_warning (tree);
181 static tree fold_builtin_object_size (tree, tree);
183 unsigned HOST_WIDE_INT target_newline;
184 unsigned HOST_WIDE_INT target_percent;
185 static unsigned HOST_WIDE_INT target_c;
186 static unsigned HOST_WIDE_INT target_s;
187 char target_percent_c[3];
188 char target_percent_s[3];
189 char target_percent_s_newline[4];
190 static tree do_mpfr_remquo (tree, tree, tree);
191 static tree do_mpfr_lgamma_r (tree, tree, tree);
192 static void expand_builtin_sync_synchronize (void);
194 /* Return true if NAME starts with __builtin_ or __sync_. */
196 static bool
197 is_builtin_name (const char *name)
199 if (strncmp (name, "__builtin_", 10) == 0)
200 return true;
201 if (strncmp (name, "__sync_", 7) == 0)
202 return true;
203 if (strncmp (name, "__atomic_", 9) == 0)
204 return true;
205 if (flag_cilkplus
206 && (!strcmp (name, "__cilkrts_detach")
207 || !strcmp (name, "__cilkrts_pop_frame")))
208 return true;
209 return false;
213 /* Return true if DECL is a function symbol representing a built-in. */
215 bool
216 is_builtin_fn (tree decl)
218 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
221 /* Return true if NODE should be considered for inline expansion regardless
222 of the optimization level. This means whenever a function is invoked with
223 its "internal" name, which normally contains the prefix "__builtin". */
225 bool
226 called_as_built_in (tree node)
228 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
229 we want the name used to call the function, not the name it
230 will have. */
231 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
232 return is_builtin_name (name);
235 /* Compute values M and N such that M divides (address of EXP - N) and such
236 that N < M. If these numbers can be determined, store M in alignp and N in
237 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
238 *alignp and any bit-offset to *bitposp.
240 Note that the address (and thus the alignment) computed here is based
241 on the address to which a symbol resolves, whereas DECL_ALIGN is based
242 on the address at which an object is actually located. These two
243 addresses are not always the same. For example, on ARM targets,
244 the address &foo of a Thumb function foo() has the lowest bit set,
245 whereas foo() itself starts on an even address.
247 If ADDR_P is true we are taking the address of the memory reference EXP
248 and thus cannot rely on the access taking place. */
250 static bool
251 get_object_alignment_2 (tree exp, unsigned int *alignp,
252 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
254 HOST_WIDE_INT bitsize, bitpos;
255 tree offset;
256 machine_mode mode;
257 int unsignedp, reversep, volatilep;
258 unsigned int align = BITS_PER_UNIT;
259 bool known_alignment = false;
261 /* Get the innermost object and the constant (bitpos) and possibly
262 variable (offset) offset of the access. */
263 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
264 &unsignedp, &reversep, &volatilep, true);
266 /* Extract alignment information from the innermost object and
267 possibly adjust bitpos and offset. */
268 if (TREE_CODE (exp) == FUNCTION_DECL)
270 /* Function addresses can encode extra information besides their
271 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
272 allows the low bit to be used as a virtual bit, we know
273 that the address itself must be at least 2-byte aligned. */
274 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
275 align = 2 * BITS_PER_UNIT;
277 else if (TREE_CODE (exp) == LABEL_DECL)
279 else if (TREE_CODE (exp) == CONST_DECL)
281 /* The alignment of a CONST_DECL is determined by its initializer. */
282 exp = DECL_INITIAL (exp);
283 align = TYPE_ALIGN (TREE_TYPE (exp));
284 if (CONSTANT_CLASS_P (exp))
285 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
287 known_alignment = true;
289 else if (DECL_P (exp))
291 align = DECL_ALIGN (exp);
292 known_alignment = true;
294 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
296 align = TYPE_ALIGN (TREE_TYPE (exp));
298 else if (TREE_CODE (exp) == INDIRECT_REF
299 || TREE_CODE (exp) == MEM_REF
300 || TREE_CODE (exp) == TARGET_MEM_REF)
302 tree addr = TREE_OPERAND (exp, 0);
303 unsigned ptr_align;
304 unsigned HOST_WIDE_INT ptr_bitpos;
305 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
307 /* If the address is explicitely aligned, handle that. */
308 if (TREE_CODE (addr) == BIT_AND_EXPR
309 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
311 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
312 ptr_bitmask *= BITS_PER_UNIT;
313 align = ptr_bitmask & -ptr_bitmask;
314 addr = TREE_OPERAND (addr, 0);
317 known_alignment
318 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
319 align = MAX (ptr_align, align);
321 /* Re-apply explicit alignment to the bitpos. */
322 ptr_bitpos &= ptr_bitmask;
324 /* The alignment of the pointer operand in a TARGET_MEM_REF
325 has to take the variable offset parts into account. */
326 if (TREE_CODE (exp) == TARGET_MEM_REF)
328 if (TMR_INDEX (exp))
330 unsigned HOST_WIDE_INT step = 1;
331 if (TMR_STEP (exp))
332 step = TREE_INT_CST_LOW (TMR_STEP (exp));
333 align = MIN (align, (step & -step) * BITS_PER_UNIT);
335 if (TMR_INDEX2 (exp))
336 align = BITS_PER_UNIT;
337 known_alignment = false;
340 /* When EXP is an actual memory reference then we can use
341 TYPE_ALIGN of a pointer indirection to derive alignment.
342 Do so only if get_pointer_alignment_1 did not reveal absolute
343 alignment knowledge and if using that alignment would
344 improve the situation. */
345 if (!addr_p && !known_alignment
346 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
347 align = TYPE_ALIGN (TREE_TYPE (exp));
348 else
350 /* Else adjust bitpos accordingly. */
351 bitpos += ptr_bitpos;
352 if (TREE_CODE (exp) == MEM_REF
353 || TREE_CODE (exp) == TARGET_MEM_REF)
354 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
357 else if (TREE_CODE (exp) == STRING_CST)
359 /* STRING_CST are the only constant objects we allow to be not
360 wrapped inside a CONST_DECL. */
361 align = TYPE_ALIGN (TREE_TYPE (exp));
362 if (CONSTANT_CLASS_P (exp))
363 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
365 known_alignment = true;
368 /* If there is a non-constant offset part extract the maximum
369 alignment that can prevail. */
370 if (offset)
372 unsigned int trailing_zeros = tree_ctz (offset);
373 if (trailing_zeros < HOST_BITS_PER_INT)
375 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
376 if (inner)
377 align = MIN (align, inner);
381 *alignp = align;
382 *bitposp = bitpos & (*alignp - 1);
383 return known_alignment;
386 /* For a memory reference expression EXP compute values M and N such that M
387 divides (&EXP - N) and such that N < M. If these numbers can be determined,
388 store M in alignp and N in *BITPOSP and return true. Otherwise return false
389 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
391 bool
392 get_object_alignment_1 (tree exp, unsigned int *alignp,
393 unsigned HOST_WIDE_INT *bitposp)
395 return get_object_alignment_2 (exp, alignp, bitposp, false);
398 /* Return the alignment in bits of EXP, an object. */
400 unsigned int
401 get_object_alignment (tree exp)
403 unsigned HOST_WIDE_INT bitpos = 0;
404 unsigned int align;
406 get_object_alignment_1 (exp, &align, &bitpos);
408 /* align and bitpos now specify known low bits of the pointer.
409 ptr & (align - 1) == bitpos. */
411 if (bitpos != 0)
412 align = (bitpos & -bitpos);
413 return align;
416 /* For a pointer valued expression EXP compute values M and N such that M
417 divides (EXP - N) and such that N < M. If these numbers can be determined,
418 store M in alignp and N in *BITPOSP and return true. Return false if
419 the results are just a conservative approximation.
421 If EXP is not a pointer, false is returned too. */
423 bool
424 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
425 unsigned HOST_WIDE_INT *bitposp)
427 STRIP_NOPS (exp);
429 if (TREE_CODE (exp) == ADDR_EXPR)
430 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
431 alignp, bitposp, true);
432 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
434 unsigned int align;
435 unsigned HOST_WIDE_INT bitpos;
436 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
437 &align, &bitpos);
438 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
439 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
440 else
442 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
443 if (trailing_zeros < HOST_BITS_PER_INT)
445 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
446 if (inner)
447 align = MIN (align, inner);
450 *alignp = align;
451 *bitposp = bitpos & (align - 1);
452 return res;
454 else if (TREE_CODE (exp) == SSA_NAME
455 && POINTER_TYPE_P (TREE_TYPE (exp)))
457 unsigned int ptr_align, ptr_misalign;
458 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
460 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
462 *bitposp = ptr_misalign * BITS_PER_UNIT;
463 *alignp = ptr_align * BITS_PER_UNIT;
464 /* We cannot really tell whether this result is an approximation. */
465 return true;
467 else
469 *bitposp = 0;
470 *alignp = BITS_PER_UNIT;
471 return false;
474 else if (TREE_CODE (exp) == INTEGER_CST)
476 *alignp = BIGGEST_ALIGNMENT;
477 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
478 & (BIGGEST_ALIGNMENT - 1));
479 return true;
482 *bitposp = 0;
483 *alignp = BITS_PER_UNIT;
484 return false;
487 /* Return the alignment in bits of EXP, a pointer valued expression.
488 The alignment returned is, by default, the alignment of the thing that
489 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
491 Otherwise, look at the expression to see if we can do better, i.e., if the
492 expression is actually pointing at an object whose alignment is tighter. */
494 unsigned int
495 get_pointer_alignment (tree exp)
497 unsigned HOST_WIDE_INT bitpos = 0;
498 unsigned int align;
500 get_pointer_alignment_1 (exp, &align, &bitpos);
502 /* align and bitpos now specify known low bits of the pointer.
503 ptr & (align - 1) == bitpos. */
505 if (bitpos != 0)
506 align = (bitpos & -bitpos);
508 return align;
511 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
512 way, because it could contain a zero byte in the middle.
513 TREE_STRING_LENGTH is the size of the character array, not the string.
515 ONLY_VALUE should be nonzero if the result is not going to be emitted
516 into the instruction stream and zero if it is going to be expanded.
517 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
518 is returned, otherwise NULL, since
519 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
520 evaluate the side-effects.
522 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
523 accesses. Note that this implies the result is not going to be emitted
524 into the instruction stream.
526 The value returned is of type `ssizetype'.
528 Unfortunately, string_constant can't access the values of const char
529 arrays with initializers, so neither can we do so here. */
531 tree
532 c_strlen (tree src, int only_value)
534 tree offset_node;
535 HOST_WIDE_INT offset;
536 int max;
537 const char *ptr;
538 location_t loc;
540 STRIP_NOPS (src);
541 if (TREE_CODE (src) == COND_EXPR
542 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
544 tree len1, len2;
546 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
547 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
548 if (tree_int_cst_equal (len1, len2))
549 return len1;
552 if (TREE_CODE (src) == COMPOUND_EXPR
553 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
554 return c_strlen (TREE_OPERAND (src, 1), only_value);
556 loc = EXPR_LOC_OR_LOC (src, input_location);
558 src = string_constant (src, &offset_node);
559 if (src == 0)
560 return NULL_TREE;
562 max = TREE_STRING_LENGTH (src) - 1;
563 ptr = TREE_STRING_POINTER (src);
565 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
567 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
568 compute the offset to the following null if we don't know where to
569 start searching for it. */
570 int i;
572 for (i = 0; i < max; i++)
573 if (ptr[i] == 0)
574 return NULL_TREE;
576 /* We don't know the starting offset, but we do know that the string
577 has no internal zero bytes. We can assume that the offset falls
578 within the bounds of the string; otherwise, the programmer deserves
579 what he gets. Subtract the offset from the length of the string,
580 and return that. This would perhaps not be valid if we were dealing
581 with named arrays in addition to literal string constants. */
583 return size_diffop_loc (loc, size_int (max), offset_node);
586 /* We have a known offset into the string. Start searching there for
587 a null character if we can represent it as a single HOST_WIDE_INT. */
588 if (offset_node == 0)
589 offset = 0;
590 else if (! tree_fits_shwi_p (offset_node))
591 offset = -1;
592 else
593 offset = tree_to_shwi (offset_node);
595 /* If the offset is known to be out of bounds, warn, and call strlen at
596 runtime. */
597 if (offset < 0 || offset > max)
599 /* Suppress multiple warnings for propagated constant strings. */
600 if (only_value != 2
601 && !TREE_NO_WARNING (src))
603 warning_at (loc, 0, "offset outside bounds of constant string");
604 TREE_NO_WARNING (src) = 1;
606 return NULL_TREE;
609 /* Use strlen to search for the first zero byte. Since any strings
610 constructed with build_string will have nulls appended, we win even
611 if we get handed something like (char[4])"abcd".
613 Since OFFSET is our starting index into the string, no further
614 calculation is needed. */
615 return ssize_int (strlen (ptr + offset));
618 /* Return a constant integer corresponding to target reading
619 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
621 static rtx
622 c_readstr (const char *str, machine_mode mode)
624 HOST_WIDE_INT ch;
625 unsigned int i, j;
626 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
628 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
629 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
630 / HOST_BITS_PER_WIDE_INT;
632 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
633 for (i = 0; i < len; i++)
634 tmp[i] = 0;
636 ch = 1;
637 for (i = 0; i < GET_MODE_SIZE (mode); i++)
639 j = i;
640 if (WORDS_BIG_ENDIAN)
641 j = GET_MODE_SIZE (mode) - i - 1;
642 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
643 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
644 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
645 j *= BITS_PER_UNIT;
647 if (ch)
648 ch = (unsigned char) str[i];
649 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
652 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
653 return immed_wide_int_const (c, mode);
656 /* Cast a target constant CST to target CHAR and if that value fits into
657 host char type, return zero and put that value into variable pointed to by
658 P. */
660 static int
661 target_char_cast (tree cst, char *p)
663 unsigned HOST_WIDE_INT val, hostval;
665 if (TREE_CODE (cst) != INTEGER_CST
666 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
667 return 1;
669 /* Do not care if it fits or not right here. */
670 val = TREE_INT_CST_LOW (cst);
672 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
673 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
675 hostval = val;
676 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
677 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
679 if (val != hostval)
680 return 1;
682 *p = hostval;
683 return 0;
686 /* Similar to save_expr, but assumes that arbitrary code is not executed
687 in between the multiple evaluations. In particular, we assume that a
688 non-addressable local variable will not be modified. */
690 static tree
691 builtin_save_expr (tree exp)
693 if (TREE_CODE (exp) == SSA_NAME
694 || (TREE_ADDRESSABLE (exp) == 0
695 && (TREE_CODE (exp) == PARM_DECL
696 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
697 return exp;
699 return save_expr (exp);
702 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
703 times to get the address of either a higher stack frame, or a return
704 address located within it (depending on FNDECL_CODE). */
706 static rtx
707 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
709 int i;
710 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
711 if (tem == NULL_RTX)
713 /* For a zero count with __builtin_return_address, we don't care what
714 frame address we return, because target-specific definitions will
715 override us. Therefore frame pointer elimination is OK, and using
716 the soft frame pointer is OK.
718 For a nonzero count, or a zero count with __builtin_frame_address,
719 we require a stable offset from the current frame pointer to the
720 previous one, so we must use the hard frame pointer, and
721 we must disable frame pointer elimination. */
722 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
723 tem = frame_pointer_rtx;
724 else
726 tem = hard_frame_pointer_rtx;
728 /* Tell reload not to eliminate the frame pointer. */
729 crtl->accesses_prior_frames = 1;
733 if (count > 0)
734 SETUP_FRAME_ADDRESSES ();
736 /* On the SPARC, the return address is not in the frame, it is in a
737 register. There is no way to access it off of the current frame
738 pointer, but it can be accessed off the previous frame pointer by
739 reading the value from the register window save area. */
740 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
741 count--;
743 /* Scan back COUNT frames to the specified frame. */
744 for (i = 0; i < count; i++)
746 /* Assume the dynamic chain pointer is in the word that the
747 frame address points to, unless otherwise specified. */
748 tem = DYNAMIC_CHAIN_ADDRESS (tem);
749 tem = memory_address (Pmode, tem);
750 tem = gen_frame_mem (Pmode, tem);
751 tem = copy_to_reg (tem);
754 /* For __builtin_frame_address, return what we've got. But, on
755 the SPARC for example, we may have to add a bias. */
756 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
757 return FRAME_ADDR_RTX (tem);
759 /* For __builtin_return_address, get the return address from that frame. */
760 #ifdef RETURN_ADDR_RTX
761 tem = RETURN_ADDR_RTX (count, tem);
762 #else
763 tem = memory_address (Pmode,
764 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
765 tem = gen_frame_mem (Pmode, tem);
766 #endif
767 return tem;
770 /* Alias set used for setjmp buffer. */
771 static alias_set_type setjmp_alias_set = -1;
773 /* Construct the leading half of a __builtin_setjmp call. Control will
774 return to RECEIVER_LABEL. This is also called directly by the SJLJ
775 exception handling code. */
777 void
778 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
780 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
781 rtx stack_save;
782 rtx mem;
784 if (setjmp_alias_set == -1)
785 setjmp_alias_set = new_alias_set ();
787 buf_addr = convert_memory_address (Pmode, buf_addr);
789 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
791 /* We store the frame pointer and the address of receiver_label in
792 the buffer and use the rest of it for the stack save area, which
793 is machine-dependent. */
795 mem = gen_rtx_MEM (Pmode, buf_addr);
796 set_mem_alias_set (mem, setjmp_alias_set);
797 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
799 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
800 GET_MODE_SIZE (Pmode))),
801 set_mem_alias_set (mem, setjmp_alias_set);
803 emit_move_insn (validize_mem (mem),
804 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
806 stack_save = gen_rtx_MEM (sa_mode,
807 plus_constant (Pmode, buf_addr,
808 2 * GET_MODE_SIZE (Pmode)));
809 set_mem_alias_set (stack_save, setjmp_alias_set);
810 emit_stack_save (SAVE_NONLOCAL, &stack_save);
812 /* If there is further processing to do, do it. */
813 if (targetm.have_builtin_setjmp_setup ())
814 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
816 /* We have a nonlocal label. */
817 cfun->has_nonlocal_label = 1;
820 /* Construct the trailing part of a __builtin_setjmp call. This is
821 also called directly by the SJLJ exception handling code.
822 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
824 void
825 expand_builtin_setjmp_receiver (rtx receiver_label)
827 rtx chain;
829 /* Mark the FP as used when we get here, so we have to make sure it's
830 marked as used by this function. */
831 emit_use (hard_frame_pointer_rtx);
833 /* Mark the static chain as clobbered here so life information
834 doesn't get messed up for it. */
835 chain = targetm.calls.static_chain (current_function_decl, true);
836 if (chain && REG_P (chain))
837 emit_clobber (chain);
839 /* Now put in the code to restore the frame pointer, and argument
840 pointer, if needed. */
841 if (! targetm.have_nonlocal_goto ())
843 /* First adjust our frame pointer to its actual value. It was
844 previously set to the start of the virtual area corresponding to
845 the stacked variables when we branched here and now needs to be
846 adjusted to the actual hardware fp value.
848 Assignments to virtual registers are converted by
849 instantiate_virtual_regs into the corresponding assignment
850 to the underlying register (fp in this case) that makes
851 the original assignment true.
852 So the following insn will actually be decrementing fp by
853 STARTING_FRAME_OFFSET. */
854 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
856 /* Restoring the frame pointer also modifies the hard frame pointer.
857 Mark it used (so that the previous assignment remains live once
858 the frame pointer is eliminated) and clobbered (to represent the
859 implicit update from the assignment). */
860 emit_use (hard_frame_pointer_rtx);
861 emit_clobber (hard_frame_pointer_rtx);
864 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
866 #ifdef ELIMINABLE_REGS
867 /* If the argument pointer can be eliminated in favor of the
868 frame pointer, we don't need to restore it. We assume here
869 that if such an elimination is present, it can always be used.
870 This is the case on all known machines; if we don't make this
871 assumption, we do unnecessary saving on many machines. */
872 size_t i;
873 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
875 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
876 if (elim_regs[i].from == ARG_POINTER_REGNUM
877 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
878 break;
880 if (i == ARRAY_SIZE (elim_regs))
881 #endif
883 /* Now restore our arg pointer from the address at which it
884 was saved in our stack frame. */
885 emit_move_insn (crtl->args.internal_arg_pointer,
886 copy_to_reg (get_arg_pointer_save_area ()));
890 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
891 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
892 else if (targetm.have_nonlocal_goto_receiver ())
893 emit_insn (targetm.gen_nonlocal_goto_receiver ());
894 else
895 { /* Nothing */ }
897 /* We must not allow the code we just generated to be reordered by
898 scheduling. Specifically, the update of the frame pointer must
899 happen immediately, not later. */
900 emit_insn (gen_blockage ());
903 /* __builtin_longjmp is passed a pointer to an array of five words (not
904 all will be used on all machines). It operates similarly to the C
905 library function of the same name, but is more efficient. Much of
906 the code below is copied from the handling of non-local gotos. */
908 static void
909 expand_builtin_longjmp (rtx buf_addr, rtx value)
911 rtx fp, lab, stack;
912 rtx_insn *insn, *last;
913 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
915 /* DRAP is needed for stack realign if longjmp is expanded to current
916 function */
917 if (SUPPORTS_STACK_ALIGNMENT)
918 crtl->need_drap = true;
920 if (setjmp_alias_set == -1)
921 setjmp_alias_set = new_alias_set ();
923 buf_addr = convert_memory_address (Pmode, buf_addr);
925 buf_addr = force_reg (Pmode, buf_addr);
927 /* We require that the user must pass a second argument of 1, because
928 that is what builtin_setjmp will return. */
929 gcc_assert (value == const1_rtx);
931 last = get_last_insn ();
932 if (targetm.have_builtin_longjmp ())
933 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
934 else
936 fp = gen_rtx_MEM (Pmode, buf_addr);
937 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
938 GET_MODE_SIZE (Pmode)));
940 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
941 2 * GET_MODE_SIZE (Pmode)));
942 set_mem_alias_set (fp, setjmp_alias_set);
943 set_mem_alias_set (lab, setjmp_alias_set);
944 set_mem_alias_set (stack, setjmp_alias_set);
946 /* Pick up FP, label, and SP from the block and jump. This code is
947 from expand_goto in stmt.c; see there for detailed comments. */
948 if (targetm.have_nonlocal_goto ())
949 /* We have to pass a value to the nonlocal_goto pattern that will
950 get copied into the static_chain pointer, but it does not matter
951 what that value is, because builtin_setjmp does not use it. */
952 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
953 else
955 lab = copy_to_reg (lab);
957 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
958 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
960 emit_move_insn (hard_frame_pointer_rtx, fp);
961 emit_stack_restore (SAVE_NONLOCAL, stack);
963 emit_use (hard_frame_pointer_rtx);
964 emit_use (stack_pointer_rtx);
965 emit_indirect_jump (lab);
969 /* Search backwards and mark the jump insn as a non-local goto.
970 Note that this precludes the use of __builtin_longjmp to a
971 __builtin_setjmp target in the same function. However, we've
972 already cautioned the user that these functions are for
973 internal exception handling use only. */
974 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
976 gcc_assert (insn != last);
978 if (JUMP_P (insn))
980 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
981 break;
983 else if (CALL_P (insn))
984 break;
988 static inline bool
989 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
991 return (iter->i < iter->n);
994 /* This function validates the types of a function call argument list
995 against a specified list of tree_codes. If the last specifier is a 0,
996 that represents an ellipses, otherwise the last specifier must be a
997 VOID_TYPE. */
999 static bool
1000 validate_arglist (const_tree callexpr, ...)
1002 enum tree_code code;
1003 bool res = 0;
1004 va_list ap;
1005 const_call_expr_arg_iterator iter;
1006 const_tree arg;
1008 va_start (ap, callexpr);
1009 init_const_call_expr_arg_iterator (callexpr, &iter);
1013 code = (enum tree_code) va_arg (ap, int);
1014 switch (code)
1016 case 0:
1017 /* This signifies an ellipses, any further arguments are all ok. */
1018 res = true;
1019 goto end;
1020 case VOID_TYPE:
1021 /* This signifies an endlink, if no arguments remain, return
1022 true, otherwise return false. */
1023 res = !more_const_call_expr_args_p (&iter);
1024 goto end;
1025 default:
1026 /* If no parameters remain or the parameter's code does not
1027 match the specified code, return false. Otherwise continue
1028 checking any remaining arguments. */
1029 arg = next_const_call_expr_arg (&iter);
1030 if (!validate_arg (arg, code))
1031 goto end;
1032 break;
1035 while (1);
1037 /* We need gotos here since we can only have one VA_CLOSE in a
1038 function. */
1039 end: ;
1040 va_end (ap);
1042 return res;
1045 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1046 and the address of the save area. */
1048 static rtx
1049 expand_builtin_nonlocal_goto (tree exp)
1051 tree t_label, t_save_area;
1052 rtx r_label, r_save_area, r_fp, r_sp;
1053 rtx_insn *insn;
1055 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1056 return NULL_RTX;
1058 t_label = CALL_EXPR_ARG (exp, 0);
1059 t_save_area = CALL_EXPR_ARG (exp, 1);
1061 r_label = expand_normal (t_label);
1062 r_label = convert_memory_address (Pmode, r_label);
1063 r_save_area = expand_normal (t_save_area);
1064 r_save_area = convert_memory_address (Pmode, r_save_area);
1065 /* Copy the address of the save location to a register just in case it was
1066 based on the frame pointer. */
1067 r_save_area = copy_to_reg (r_save_area);
1068 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1069 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1070 plus_constant (Pmode, r_save_area,
1071 GET_MODE_SIZE (Pmode)));
1073 crtl->has_nonlocal_goto = 1;
1075 /* ??? We no longer need to pass the static chain value, afaik. */
1076 if (targetm.have_nonlocal_goto ())
1077 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1078 else
1080 r_label = copy_to_reg (r_label);
1082 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1083 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1085 /* Restore frame pointer for containing function. */
1086 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1087 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1089 /* USE of hard_frame_pointer_rtx added for consistency;
1090 not clear if really needed. */
1091 emit_use (hard_frame_pointer_rtx);
1092 emit_use (stack_pointer_rtx);
1094 /* If the architecture is using a GP register, we must
1095 conservatively assume that the target function makes use of it.
1096 The prologue of functions with nonlocal gotos must therefore
1097 initialize the GP register to the appropriate value, and we
1098 must then make sure that this value is live at the point
1099 of the jump. (Note that this doesn't necessarily apply
1100 to targets with a nonlocal_goto pattern; they are free
1101 to implement it in their own way. Note also that this is
1102 a no-op if the GP register is a global invariant.) */
1103 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1104 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1105 emit_use (pic_offset_table_rtx);
1107 emit_indirect_jump (r_label);
1110 /* Search backwards to the jump insn and mark it as a
1111 non-local goto. */
1112 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1114 if (JUMP_P (insn))
1116 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1117 break;
1119 else if (CALL_P (insn))
1120 break;
1123 return const0_rtx;
1126 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1127 (not all will be used on all machines) that was passed to __builtin_setjmp.
1128 It updates the stack pointer in that block to the current value. This is
1129 also called directly by the SJLJ exception handling code. */
1131 void
1132 expand_builtin_update_setjmp_buf (rtx buf_addr)
1134 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1135 rtx stack_save
1136 = gen_rtx_MEM (sa_mode,
1137 memory_address
1138 (sa_mode,
1139 plus_constant (Pmode, buf_addr,
1140 2 * GET_MODE_SIZE (Pmode))));
1142 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1145 /* Expand a call to __builtin_prefetch. For a target that does not support
1146 data prefetch, evaluate the memory address argument in case it has side
1147 effects. */
1149 static void
1150 expand_builtin_prefetch (tree exp)
1152 tree arg0, arg1, arg2;
1153 int nargs;
1154 rtx op0, op1, op2;
1156 if (!validate_arglist (exp, POINTER_TYPE, 0))
1157 return;
1159 arg0 = CALL_EXPR_ARG (exp, 0);
1161 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1162 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1163 locality). */
1164 nargs = call_expr_nargs (exp);
1165 if (nargs > 1)
1166 arg1 = CALL_EXPR_ARG (exp, 1);
1167 else
1168 arg1 = integer_zero_node;
1169 if (nargs > 2)
1170 arg2 = CALL_EXPR_ARG (exp, 2);
1171 else
1172 arg2 = integer_three_node;
1174 /* Argument 0 is an address. */
1175 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1177 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1178 if (TREE_CODE (arg1) != INTEGER_CST)
1180 error ("second argument to %<__builtin_prefetch%> must be a constant");
1181 arg1 = integer_zero_node;
1183 op1 = expand_normal (arg1);
1184 /* Argument 1 must be either zero or one. */
1185 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1187 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1188 " using zero");
1189 op1 = const0_rtx;
1192 /* Argument 2 (locality) must be a compile-time constant int. */
1193 if (TREE_CODE (arg2) != INTEGER_CST)
1195 error ("third argument to %<__builtin_prefetch%> must be a constant");
1196 arg2 = integer_zero_node;
1198 op2 = expand_normal (arg2);
1199 /* Argument 2 must be 0, 1, 2, or 3. */
1200 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1202 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1203 op2 = const0_rtx;
1206 if (targetm.have_prefetch ())
1208 struct expand_operand ops[3];
1210 create_address_operand (&ops[0], op0);
1211 create_integer_operand (&ops[1], INTVAL (op1));
1212 create_integer_operand (&ops[2], INTVAL (op2));
1213 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1214 return;
1217 /* Don't do anything with direct references to volatile memory, but
1218 generate code to handle other side effects. */
1219 if (!MEM_P (op0) && side_effects_p (op0))
1220 emit_insn (op0);
1223 /* Get a MEM rtx for expression EXP which is the address of an operand
1224 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1225 the maximum length of the block of memory that might be accessed or
1226 NULL if unknown. */
1228 static rtx
1229 get_memory_rtx (tree exp, tree len)
1231 tree orig_exp = exp;
1232 rtx addr, mem;
1234 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1235 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1236 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1237 exp = TREE_OPERAND (exp, 0);
1239 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1240 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1242 /* Get an expression we can use to find the attributes to assign to MEM.
1243 First remove any nops. */
1244 while (CONVERT_EXPR_P (exp)
1245 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1246 exp = TREE_OPERAND (exp, 0);
1248 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1249 (as builtin stringops may alias with anything). */
1250 exp = fold_build2 (MEM_REF,
1251 build_array_type (char_type_node,
1252 build_range_type (sizetype,
1253 size_one_node, len)),
1254 exp, build_int_cst (ptr_type_node, 0));
1256 /* If the MEM_REF has no acceptable address, try to get the base object
1257 from the original address we got, and build an all-aliasing
1258 unknown-sized access to that one. */
1259 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1260 set_mem_attributes (mem, exp, 0);
1261 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1262 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1263 0))))
1265 exp = build_fold_addr_expr (exp);
1266 exp = fold_build2 (MEM_REF,
1267 build_array_type (char_type_node,
1268 build_range_type (sizetype,
1269 size_zero_node,
1270 NULL)),
1271 exp, build_int_cst (ptr_type_node, 0));
1272 set_mem_attributes (mem, exp, 0);
1274 set_mem_alias_set (mem, 0);
1275 return mem;
1278 /* Built-in functions to perform an untyped call and return. */
1280 #define apply_args_mode \
1281 (this_target_builtins->x_apply_args_mode)
1282 #define apply_result_mode \
1283 (this_target_builtins->x_apply_result_mode)
1285 /* Return the size required for the block returned by __builtin_apply_args,
1286 and initialize apply_args_mode. */
1288 static int
1289 apply_args_size (void)
1291 static int size = -1;
1292 int align;
1293 unsigned int regno;
1294 machine_mode mode;
1296 /* The values computed by this function never change. */
1297 if (size < 0)
1299 /* The first value is the incoming arg-pointer. */
1300 size = GET_MODE_SIZE (Pmode);
1302 /* The second value is the structure value address unless this is
1303 passed as an "invisible" first argument. */
1304 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1305 size += GET_MODE_SIZE (Pmode);
1307 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1308 if (FUNCTION_ARG_REGNO_P (regno))
1310 mode = targetm.calls.get_raw_arg_mode (regno);
1312 gcc_assert (mode != VOIDmode);
1314 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1315 if (size % align != 0)
1316 size = CEIL (size, align) * align;
1317 size += GET_MODE_SIZE (mode);
1318 apply_args_mode[regno] = mode;
1320 else
1322 apply_args_mode[regno] = VOIDmode;
1325 return size;
1328 /* Return the size required for the block returned by __builtin_apply,
1329 and initialize apply_result_mode. */
1331 static int
1332 apply_result_size (void)
1334 static int size = -1;
1335 int align, regno;
1336 machine_mode mode;
1338 /* The values computed by this function never change. */
1339 if (size < 0)
1341 size = 0;
1343 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1344 if (targetm.calls.function_value_regno_p (regno))
1346 mode = targetm.calls.get_raw_result_mode (regno);
1348 gcc_assert (mode != VOIDmode);
1350 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1351 if (size % align != 0)
1352 size = CEIL (size, align) * align;
1353 size += GET_MODE_SIZE (mode);
1354 apply_result_mode[regno] = mode;
1356 else
1357 apply_result_mode[regno] = VOIDmode;
1359 /* Allow targets that use untyped_call and untyped_return to override
1360 the size so that machine-specific information can be stored here. */
1361 #ifdef APPLY_RESULT_SIZE
1362 size = APPLY_RESULT_SIZE;
1363 #endif
1365 return size;
1368 /* Create a vector describing the result block RESULT. If SAVEP is true,
1369 the result block is used to save the values; otherwise it is used to
1370 restore the values. */
1372 static rtx
1373 result_vector (int savep, rtx result)
1375 int regno, size, align, nelts;
1376 machine_mode mode;
1377 rtx reg, mem;
1378 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1380 size = nelts = 0;
1381 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1382 if ((mode = apply_result_mode[regno]) != VOIDmode)
1384 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1385 if (size % align != 0)
1386 size = CEIL (size, align) * align;
1387 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1388 mem = adjust_address (result, mode, size);
1389 savevec[nelts++] = (savep
1390 ? gen_rtx_SET (mem, reg)
1391 : gen_rtx_SET (reg, mem));
1392 size += GET_MODE_SIZE (mode);
1394 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1397 /* Save the state required to perform an untyped call with the same
1398 arguments as were passed to the current function. */
1400 static rtx
1401 expand_builtin_apply_args_1 (void)
1403 rtx registers, tem;
1404 int size, align, regno;
1405 machine_mode mode;
1406 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1408 /* Create a block where the arg-pointer, structure value address,
1409 and argument registers can be saved. */
1410 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1412 /* Walk past the arg-pointer and structure value address. */
1413 size = GET_MODE_SIZE (Pmode);
1414 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1415 size += GET_MODE_SIZE (Pmode);
1417 /* Save each register used in calling a function to the block. */
1418 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1419 if ((mode = apply_args_mode[regno]) != VOIDmode)
1421 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1422 if (size % align != 0)
1423 size = CEIL (size, align) * align;
1425 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1427 emit_move_insn (adjust_address (registers, mode, size), tem);
1428 size += GET_MODE_SIZE (mode);
1431 /* Save the arg pointer to the block. */
1432 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1433 /* We need the pointer as the caller actually passed them to us, not
1434 as we might have pretended they were passed. Make sure it's a valid
1435 operand, as emit_move_insn isn't expected to handle a PLUS. */
1436 if (STACK_GROWS_DOWNWARD)
1438 = force_operand (plus_constant (Pmode, tem,
1439 crtl->args.pretend_args_size),
1440 NULL_RTX);
1441 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1443 size = GET_MODE_SIZE (Pmode);
1445 /* Save the structure value address unless this is passed as an
1446 "invisible" first argument. */
1447 if (struct_incoming_value)
1449 emit_move_insn (adjust_address (registers, Pmode, size),
1450 copy_to_reg (struct_incoming_value));
1451 size += GET_MODE_SIZE (Pmode);
1454 /* Return the address of the block. */
1455 return copy_addr_to_reg (XEXP (registers, 0));
1458 /* __builtin_apply_args returns block of memory allocated on
1459 the stack into which is stored the arg pointer, structure
1460 value address, static chain, and all the registers that might
1461 possibly be used in performing a function call. The code is
1462 moved to the start of the function so the incoming values are
1463 saved. */
1465 static rtx
1466 expand_builtin_apply_args (void)
1468 /* Don't do __builtin_apply_args more than once in a function.
1469 Save the result of the first call and reuse it. */
1470 if (apply_args_value != 0)
1471 return apply_args_value;
1473 /* When this function is called, it means that registers must be
1474 saved on entry to this function. So we migrate the
1475 call to the first insn of this function. */
1476 rtx temp;
1478 start_sequence ();
1479 temp = expand_builtin_apply_args_1 ();
1480 rtx_insn *seq = get_insns ();
1481 end_sequence ();
1483 apply_args_value = temp;
1485 /* Put the insns after the NOTE that starts the function.
1486 If this is inside a start_sequence, make the outer-level insn
1487 chain current, so the code is placed at the start of the
1488 function. If internal_arg_pointer is a non-virtual pseudo,
1489 it needs to be placed after the function that initializes
1490 that pseudo. */
1491 push_topmost_sequence ();
1492 if (REG_P (crtl->args.internal_arg_pointer)
1493 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1494 emit_insn_before (seq, parm_birth_insn);
1495 else
1496 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1497 pop_topmost_sequence ();
1498 return temp;
1502 /* Perform an untyped call and save the state required to perform an
1503 untyped return of whatever value was returned by the given function. */
1505 static rtx
1506 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1508 int size, align, regno;
1509 machine_mode mode;
1510 rtx incoming_args, result, reg, dest, src;
1511 rtx_call_insn *call_insn;
1512 rtx old_stack_level = 0;
1513 rtx call_fusage = 0;
1514 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1516 arguments = convert_memory_address (Pmode, arguments);
1518 /* Create a block where the return registers can be saved. */
1519 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1521 /* Fetch the arg pointer from the ARGUMENTS block. */
1522 incoming_args = gen_reg_rtx (Pmode);
1523 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1524 if (!STACK_GROWS_DOWNWARD)
1525 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1526 incoming_args, 0, OPTAB_LIB_WIDEN);
1528 /* Push a new argument block and copy the arguments. Do not allow
1529 the (potential) memcpy call below to interfere with our stack
1530 manipulations. */
1531 do_pending_stack_adjust ();
1532 NO_DEFER_POP;
1534 /* Save the stack with nonlocal if available. */
1535 if (targetm.have_save_stack_nonlocal ())
1536 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1537 else
1538 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1540 /* Allocate a block of memory onto the stack and copy the memory
1541 arguments to the outgoing arguments address. We can pass TRUE
1542 as the 4th argument because we just saved the stack pointer
1543 and will restore it right after the call. */
1544 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1546 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1547 may have already set current_function_calls_alloca to true.
1548 current_function_calls_alloca won't be set if argsize is zero,
1549 so we have to guarantee need_drap is true here. */
1550 if (SUPPORTS_STACK_ALIGNMENT)
1551 crtl->need_drap = true;
1553 dest = virtual_outgoing_args_rtx;
1554 if (!STACK_GROWS_DOWNWARD)
1556 if (CONST_INT_P (argsize))
1557 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1558 else
1559 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1561 dest = gen_rtx_MEM (BLKmode, dest);
1562 set_mem_align (dest, PARM_BOUNDARY);
1563 src = gen_rtx_MEM (BLKmode, incoming_args);
1564 set_mem_align (src, PARM_BOUNDARY);
1565 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1567 /* Refer to the argument block. */
1568 apply_args_size ();
1569 arguments = gen_rtx_MEM (BLKmode, arguments);
1570 set_mem_align (arguments, PARM_BOUNDARY);
1572 /* Walk past the arg-pointer and structure value address. */
1573 size = GET_MODE_SIZE (Pmode);
1574 if (struct_value)
1575 size += GET_MODE_SIZE (Pmode);
1577 /* Restore each of the registers previously saved. Make USE insns
1578 for each of these registers for use in making the call. */
1579 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1580 if ((mode = apply_args_mode[regno]) != VOIDmode)
1582 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1583 if (size % align != 0)
1584 size = CEIL (size, align) * align;
1585 reg = gen_rtx_REG (mode, regno);
1586 emit_move_insn (reg, adjust_address (arguments, mode, size));
1587 use_reg (&call_fusage, reg);
1588 size += GET_MODE_SIZE (mode);
1591 /* Restore the structure value address unless this is passed as an
1592 "invisible" first argument. */
1593 size = GET_MODE_SIZE (Pmode);
1594 if (struct_value)
1596 rtx value = gen_reg_rtx (Pmode);
1597 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1598 emit_move_insn (struct_value, value);
1599 if (REG_P (struct_value))
1600 use_reg (&call_fusage, struct_value);
1601 size += GET_MODE_SIZE (Pmode);
1604 /* All arguments and registers used for the call are set up by now! */
1605 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1607 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1608 and we don't want to load it into a register as an optimization,
1609 because prepare_call_address already did it if it should be done. */
1610 if (GET_CODE (function) != SYMBOL_REF)
1611 function = memory_address (FUNCTION_MODE, function);
1613 /* Generate the actual call instruction and save the return value. */
1614 if (targetm.have_untyped_call ())
1616 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1617 emit_call_insn (targetm.gen_untyped_call (mem, result,
1618 result_vector (1, result)));
1620 else if (targetm.have_call_value ())
1622 rtx valreg = 0;
1624 /* Locate the unique return register. It is not possible to
1625 express a call that sets more than one return register using
1626 call_value; use untyped_call for that. In fact, untyped_call
1627 only needs to save the return registers in the given block. */
1628 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1629 if ((mode = apply_result_mode[regno]) != VOIDmode)
1631 gcc_assert (!valreg); /* have_untyped_call required. */
1633 valreg = gen_rtx_REG (mode, regno);
1636 emit_insn (targetm.gen_call_value (valreg,
1637 gen_rtx_MEM (FUNCTION_MODE, function),
1638 const0_rtx, NULL_RTX, const0_rtx));
1640 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1642 else
1643 gcc_unreachable ();
1645 /* Find the CALL insn we just emitted, and attach the register usage
1646 information. */
1647 call_insn = last_call_insn ();
1648 add_function_usage_to (call_insn, call_fusage);
1650 /* Restore the stack. */
1651 if (targetm.have_save_stack_nonlocal ())
1652 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1653 else
1654 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1655 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1657 OK_DEFER_POP;
1659 /* Return the address of the result block. */
1660 result = copy_addr_to_reg (XEXP (result, 0));
1661 return convert_memory_address (ptr_mode, result);
1664 /* Perform an untyped return. */
1666 static void
1667 expand_builtin_return (rtx result)
1669 int size, align, regno;
1670 machine_mode mode;
1671 rtx reg;
1672 rtx_insn *call_fusage = 0;
1674 result = convert_memory_address (Pmode, result);
1676 apply_result_size ();
1677 result = gen_rtx_MEM (BLKmode, result);
1679 if (targetm.have_untyped_return ())
1681 rtx vector = result_vector (0, result);
1682 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1683 emit_barrier ();
1684 return;
1687 /* Restore the return value and note that each value is used. */
1688 size = 0;
1689 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1690 if ((mode = apply_result_mode[regno]) != VOIDmode)
1692 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1693 if (size % align != 0)
1694 size = CEIL (size, align) * align;
1695 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1696 emit_move_insn (reg, adjust_address (result, mode, size));
1698 push_to_sequence (call_fusage);
1699 emit_use (reg);
1700 call_fusage = get_insns ();
1701 end_sequence ();
1702 size += GET_MODE_SIZE (mode);
1705 /* Put the USE insns before the return. */
1706 emit_insn (call_fusage);
1708 /* Return whatever values was restored by jumping directly to the end
1709 of the function. */
1710 expand_naked_return ();
1713 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1715 static enum type_class
1716 type_to_class (tree type)
1718 switch (TREE_CODE (type))
1720 case VOID_TYPE: return void_type_class;
1721 case INTEGER_TYPE: return integer_type_class;
1722 case ENUMERAL_TYPE: return enumeral_type_class;
1723 case BOOLEAN_TYPE: return boolean_type_class;
1724 case POINTER_TYPE: return pointer_type_class;
1725 case REFERENCE_TYPE: return reference_type_class;
1726 case OFFSET_TYPE: return offset_type_class;
1727 case REAL_TYPE: return real_type_class;
1728 case COMPLEX_TYPE: return complex_type_class;
1729 case FUNCTION_TYPE: return function_type_class;
1730 case METHOD_TYPE: return method_type_class;
1731 case RECORD_TYPE: return record_type_class;
1732 case UNION_TYPE:
1733 case QUAL_UNION_TYPE: return union_type_class;
1734 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1735 ? string_type_class : array_type_class);
1736 case LANG_TYPE: return lang_type_class;
1737 default: return no_type_class;
1741 /* Expand a call EXP to __builtin_classify_type. */
1743 static rtx
1744 expand_builtin_classify_type (tree exp)
1746 if (call_expr_nargs (exp))
1747 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1748 return GEN_INT (no_type_class);
1751 /* This helper macro, meant to be used in mathfn_built_in below,
1752 determines which among a set of three builtin math functions is
1753 appropriate for a given type mode. The `F' and `L' cases are
1754 automatically generated from the `double' case. */
1755 #define CASE_MATHFN(MATHFN) \
1756 CASE_CFN_##MATHFN: \
1757 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1758 fcodel = BUILT_IN_##MATHFN##L ; break;
1759 /* Similar to above, but appends _R after any F/L suffix. */
1760 #define CASE_MATHFN_REENT(MATHFN) \
1761 case CFN_BUILT_IN_##MATHFN##_R: \
1762 case CFN_BUILT_IN_##MATHFN##F_R: \
1763 case CFN_BUILT_IN_##MATHFN##L_R: \
1764 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1765 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1767 /* Return a function equivalent to FN but operating on floating-point
1768 values of type TYPE, or END_BUILTINS if no such function exists.
1769 This is purely an operation on function codes; it does not guarantee
1770 that the target actually has an implementation of the function. */
1772 static built_in_function
1773 mathfn_built_in_2 (tree type, combined_fn fn)
1775 built_in_function fcode, fcodef, fcodel;
1777 switch (fn)
1779 CASE_MATHFN (ACOS)
1780 CASE_MATHFN (ACOSH)
1781 CASE_MATHFN (ASIN)
1782 CASE_MATHFN (ASINH)
1783 CASE_MATHFN (ATAN)
1784 CASE_MATHFN (ATAN2)
1785 CASE_MATHFN (ATANH)
1786 CASE_MATHFN (CBRT)
1787 CASE_MATHFN (CEIL)
1788 CASE_MATHFN (CEXPI)
1789 CASE_MATHFN (COPYSIGN)
1790 CASE_MATHFN (COS)
1791 CASE_MATHFN (COSH)
1792 CASE_MATHFN (DREM)
1793 CASE_MATHFN (ERF)
1794 CASE_MATHFN (ERFC)
1795 CASE_MATHFN (EXP)
1796 CASE_MATHFN (EXP10)
1797 CASE_MATHFN (EXP2)
1798 CASE_MATHFN (EXPM1)
1799 CASE_MATHFN (FABS)
1800 CASE_MATHFN (FDIM)
1801 CASE_MATHFN (FLOOR)
1802 CASE_MATHFN (FMA)
1803 CASE_MATHFN (FMAX)
1804 CASE_MATHFN (FMIN)
1805 CASE_MATHFN (FMOD)
1806 CASE_MATHFN (FREXP)
1807 CASE_MATHFN (GAMMA)
1808 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1809 CASE_MATHFN (HUGE_VAL)
1810 CASE_MATHFN (HYPOT)
1811 CASE_MATHFN (ILOGB)
1812 CASE_MATHFN (ICEIL)
1813 CASE_MATHFN (IFLOOR)
1814 CASE_MATHFN (INF)
1815 CASE_MATHFN (IRINT)
1816 CASE_MATHFN (IROUND)
1817 CASE_MATHFN (ISINF)
1818 CASE_MATHFN (J0)
1819 CASE_MATHFN (J1)
1820 CASE_MATHFN (JN)
1821 CASE_MATHFN (LCEIL)
1822 CASE_MATHFN (LDEXP)
1823 CASE_MATHFN (LFLOOR)
1824 CASE_MATHFN (LGAMMA)
1825 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1826 CASE_MATHFN (LLCEIL)
1827 CASE_MATHFN (LLFLOOR)
1828 CASE_MATHFN (LLRINT)
1829 CASE_MATHFN (LLROUND)
1830 CASE_MATHFN (LOG)
1831 CASE_MATHFN (LOG10)
1832 CASE_MATHFN (LOG1P)
1833 CASE_MATHFN (LOG2)
1834 CASE_MATHFN (LOGB)
1835 CASE_MATHFN (LRINT)
1836 CASE_MATHFN (LROUND)
1837 CASE_MATHFN (MODF)
1838 CASE_MATHFN (NAN)
1839 CASE_MATHFN (NANS)
1840 CASE_MATHFN (NEARBYINT)
1841 CASE_MATHFN (NEXTAFTER)
1842 CASE_MATHFN (NEXTTOWARD)
1843 CASE_MATHFN (POW)
1844 CASE_MATHFN (POWI)
1845 CASE_MATHFN (POW10)
1846 CASE_MATHFN (REMAINDER)
1847 CASE_MATHFN (REMQUO)
1848 CASE_MATHFN (RINT)
1849 CASE_MATHFN (ROUND)
1850 CASE_MATHFN (SCALB)
1851 CASE_MATHFN (SCALBLN)
1852 CASE_MATHFN (SCALBN)
1853 CASE_MATHFN (SIGNBIT)
1854 CASE_MATHFN (SIGNIFICAND)
1855 CASE_MATHFN (SIN)
1856 CASE_MATHFN (SINCOS)
1857 CASE_MATHFN (SINH)
1858 CASE_MATHFN (SQRT)
1859 CASE_MATHFN (TAN)
1860 CASE_MATHFN (TANH)
1861 CASE_MATHFN (TGAMMA)
1862 CASE_MATHFN (TRUNC)
1863 CASE_MATHFN (Y0)
1864 CASE_MATHFN (Y1)
1865 CASE_MATHFN (YN)
1867 default:
1868 return END_BUILTINS;
1871 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1872 return fcode;
1873 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1874 return fcodef;
1875 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1876 return fcodel;
1877 else
1878 return END_BUILTINS;
1881 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1882 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1883 otherwise use the explicit declaration. If we can't do the conversion,
1884 return null. */
1886 static tree
1887 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
1889 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1890 if (fcode2 == END_BUILTINS)
1891 return NULL_TREE;
1893 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1894 return NULL_TREE;
1896 return builtin_decl_explicit (fcode2);
1899 /* Like mathfn_built_in_1, but always use the implicit array. */
1901 tree
1902 mathfn_built_in (tree type, combined_fn fn)
1904 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1907 /* Like mathfn_built_in_1, but take a built_in_function and
1908 always use the implicit array. */
1910 tree
1911 mathfn_built_in (tree type, enum built_in_function fn)
1913 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
1916 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
1917 return its code, otherwise return IFN_LAST. Note that this function
1918 only tests whether the function is defined in internals.def, not whether
1919 it is actually available on the target. */
1921 internal_fn
1922 associated_internal_fn (tree fndecl)
1924 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
1925 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
1926 switch (DECL_FUNCTION_CODE (fndecl))
1928 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
1929 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1930 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
1931 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1932 #include "internal-fn.def"
1934 CASE_FLT_FN (BUILT_IN_POW10):
1935 return IFN_EXP10;
1937 CASE_FLT_FN (BUILT_IN_DREM):
1938 return IFN_REMAINDER;
1940 CASE_FLT_FN (BUILT_IN_SCALBN):
1941 CASE_FLT_FN (BUILT_IN_SCALBLN):
1942 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
1943 return IFN_LDEXP;
1944 return IFN_LAST;
1946 default:
1947 return IFN_LAST;
1951 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
1952 on the current target by a call to an internal function, return the
1953 code of that internal function, otherwise return IFN_LAST. The caller
1954 is responsible for ensuring that any side-effects of the built-in
1955 call are dealt with correctly. E.g. if CALL sets errno, the caller
1956 must decide that the errno result isn't needed or make it available
1957 in some other way. */
1959 internal_fn
1960 replacement_internal_fn (gcall *call)
1962 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1964 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
1965 if (ifn != IFN_LAST)
1967 tree_pair types = direct_internal_fn_types (ifn, call);
1968 if (direct_internal_fn_supported_p (ifn, types))
1969 return ifn;
1972 return IFN_LAST;
1975 /* If errno must be maintained, expand the RTL to check if the result,
1976 TARGET, of a built-in function call, EXP, is NaN, and if so set
1977 errno to EDOM. */
1979 static void
1980 expand_errno_check (tree exp, rtx target)
1982 rtx_code_label *lab = gen_label_rtx ();
1984 /* Test the result; if it is NaN, set errno=EDOM because
1985 the argument was not in the domain. */
1986 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1987 NULL_RTX, NULL, lab,
1988 /* The jump is very likely. */
1989 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1991 #ifdef TARGET_EDOM
1992 /* If this built-in doesn't throw an exception, set errno directly. */
1993 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1995 #ifdef GEN_ERRNO_RTX
1996 rtx errno_rtx = GEN_ERRNO_RTX;
1997 #else
1998 rtx errno_rtx
1999 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
2000 #endif
2001 emit_move_insn (errno_rtx,
2002 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
2003 emit_label (lab);
2004 return;
2006 #endif
2008 /* Make sure the library call isn't expanded as a tail call. */
2009 CALL_EXPR_TAILCALL (exp) = 0;
2011 /* We can't set errno=EDOM directly; let the library call do it.
2012 Pop the arguments right away in case the call gets deleted. */
2013 NO_DEFER_POP;
2014 expand_call (exp, target, 0);
2015 OK_DEFER_POP;
2016 emit_label (lab);
2019 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2020 Return NULL_RTX if a normal call should be emitted rather than expanding
2021 the function in-line. EXP is the expression that is a call to the builtin
2022 function; if convenient, the result should be placed in TARGET.
2023 SUBTARGET may be used as the target for computing one of EXP's operands. */
2025 static rtx
2026 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2028 optab builtin_optab;
2029 rtx op0;
2030 rtx_insn *insns;
2031 tree fndecl = get_callee_fndecl (exp);
2032 machine_mode mode;
2033 bool errno_set = false;
2034 bool try_widening = false;
2035 tree arg;
2037 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2038 return NULL_RTX;
2040 arg = CALL_EXPR_ARG (exp, 0);
2042 switch (DECL_FUNCTION_CODE (fndecl))
2044 CASE_FLT_FN (BUILT_IN_SQRT):
2045 errno_set = ! tree_expr_nonnegative_p (arg);
2046 try_widening = true;
2047 builtin_optab = sqrt_optab;
2048 break;
2049 CASE_FLT_FN (BUILT_IN_EXP):
2050 errno_set = true; builtin_optab = exp_optab; break;
2051 CASE_FLT_FN (BUILT_IN_EXP10):
2052 CASE_FLT_FN (BUILT_IN_POW10):
2053 errno_set = true; builtin_optab = exp10_optab; break;
2054 CASE_FLT_FN (BUILT_IN_EXP2):
2055 errno_set = true; builtin_optab = exp2_optab; break;
2056 CASE_FLT_FN (BUILT_IN_EXPM1):
2057 errno_set = true; builtin_optab = expm1_optab; break;
2058 CASE_FLT_FN (BUILT_IN_LOGB):
2059 errno_set = true; builtin_optab = logb_optab; break;
2060 CASE_FLT_FN (BUILT_IN_LOG):
2061 errno_set = true; builtin_optab = log_optab; break;
2062 CASE_FLT_FN (BUILT_IN_LOG10):
2063 errno_set = true; builtin_optab = log10_optab; break;
2064 CASE_FLT_FN (BUILT_IN_LOG2):
2065 errno_set = true; builtin_optab = log2_optab; break;
2066 CASE_FLT_FN (BUILT_IN_LOG1P):
2067 errno_set = true; builtin_optab = log1p_optab; break;
2068 CASE_FLT_FN (BUILT_IN_ASIN):
2069 builtin_optab = asin_optab; break;
2070 CASE_FLT_FN (BUILT_IN_ACOS):
2071 builtin_optab = acos_optab; break;
2072 CASE_FLT_FN (BUILT_IN_TAN):
2073 builtin_optab = tan_optab; break;
2074 CASE_FLT_FN (BUILT_IN_ATAN):
2075 builtin_optab = atan_optab; break;
2076 CASE_FLT_FN (BUILT_IN_FLOOR):
2077 builtin_optab = floor_optab; break;
2078 CASE_FLT_FN (BUILT_IN_CEIL):
2079 builtin_optab = ceil_optab; break;
2080 CASE_FLT_FN (BUILT_IN_TRUNC):
2081 builtin_optab = btrunc_optab; break;
2082 CASE_FLT_FN (BUILT_IN_ROUND):
2083 builtin_optab = round_optab; break;
2084 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2085 builtin_optab = nearbyint_optab;
2086 if (flag_trapping_math)
2087 break;
2088 /* Else fallthrough and expand as rint. */
2089 CASE_FLT_FN (BUILT_IN_RINT):
2090 builtin_optab = rint_optab; break;
2091 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2092 builtin_optab = significand_optab; break;
2093 default:
2094 gcc_unreachable ();
2097 /* Make a suitable register to place result in. */
2098 mode = TYPE_MODE (TREE_TYPE (exp));
2100 if (! flag_errno_math || ! HONOR_NANS (mode))
2101 errno_set = false;
2103 /* Before working hard, check whether the instruction is available, but try
2104 to widen the mode for specific operations. */
2105 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2106 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2107 && (!errno_set || !optimize_insn_for_size_p ()))
2109 rtx result = gen_reg_rtx (mode);
2111 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2112 need to expand the argument again. This way, we will not perform
2113 side-effects more the once. */
2114 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2116 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2118 start_sequence ();
2120 /* Compute into RESULT.
2121 Set RESULT to wherever the result comes back. */
2122 result = expand_unop (mode, builtin_optab, op0, result, 0);
2124 if (result != 0)
2126 if (errno_set)
2127 expand_errno_check (exp, result);
2129 /* Output the entire sequence. */
2130 insns = get_insns ();
2131 end_sequence ();
2132 emit_insn (insns);
2133 return result;
2136 /* If we were unable to expand via the builtin, stop the sequence
2137 (without outputting the insns) and call to the library function
2138 with the stabilized argument list. */
2139 end_sequence ();
2142 return expand_call (exp, target, target == const0_rtx);
2145 /* Expand a call to the builtin binary math functions (pow and atan2).
2146 Return NULL_RTX if a normal call should be emitted rather than expanding the
2147 function in-line. EXP is the expression that is a call to the builtin
2148 function; if convenient, the result should be placed in TARGET.
2149 SUBTARGET may be used as the target for computing one of EXP's
2150 operands. */
2152 static rtx
2153 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2155 optab builtin_optab;
2156 rtx op0, op1, result;
2157 rtx_insn *insns;
2158 int op1_type = REAL_TYPE;
2159 tree fndecl = get_callee_fndecl (exp);
2160 tree arg0, arg1;
2161 machine_mode mode;
2162 bool errno_set = true;
2164 switch (DECL_FUNCTION_CODE (fndecl))
2166 CASE_FLT_FN (BUILT_IN_SCALBN):
2167 CASE_FLT_FN (BUILT_IN_SCALBLN):
2168 CASE_FLT_FN (BUILT_IN_LDEXP):
2169 op1_type = INTEGER_TYPE;
2170 default:
2171 break;
2174 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2175 return NULL_RTX;
2177 arg0 = CALL_EXPR_ARG (exp, 0);
2178 arg1 = CALL_EXPR_ARG (exp, 1);
2180 switch (DECL_FUNCTION_CODE (fndecl))
2182 CASE_FLT_FN (BUILT_IN_POW):
2183 builtin_optab = pow_optab; break;
2184 CASE_FLT_FN (BUILT_IN_ATAN2):
2185 builtin_optab = atan2_optab; break;
2186 CASE_FLT_FN (BUILT_IN_SCALB):
2187 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2188 return 0;
2189 builtin_optab = scalb_optab; break;
2190 CASE_FLT_FN (BUILT_IN_SCALBN):
2191 CASE_FLT_FN (BUILT_IN_SCALBLN):
2192 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2193 return 0;
2194 /* Fall through... */
2195 CASE_FLT_FN (BUILT_IN_LDEXP):
2196 builtin_optab = ldexp_optab; break;
2197 CASE_FLT_FN (BUILT_IN_FMOD):
2198 builtin_optab = fmod_optab; break;
2199 CASE_FLT_FN (BUILT_IN_REMAINDER):
2200 CASE_FLT_FN (BUILT_IN_DREM):
2201 builtin_optab = remainder_optab; break;
2202 default:
2203 gcc_unreachable ();
2206 /* Make a suitable register to place result in. */
2207 mode = TYPE_MODE (TREE_TYPE (exp));
2209 /* Before working hard, check whether the instruction is available. */
2210 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2211 return NULL_RTX;
2213 result = gen_reg_rtx (mode);
2215 if (! flag_errno_math || ! HONOR_NANS (mode))
2216 errno_set = false;
2218 if (errno_set && optimize_insn_for_size_p ())
2219 return 0;
2221 /* Always stabilize the argument list. */
2222 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2223 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2225 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2226 op1 = expand_normal (arg1);
2228 start_sequence ();
2230 /* Compute into RESULT.
2231 Set RESULT to wherever the result comes back. */
2232 result = expand_binop (mode, builtin_optab, op0, op1,
2233 result, 0, OPTAB_DIRECT);
2235 /* If we were unable to expand via the builtin, stop the sequence
2236 (without outputting the insns) and call to the library function
2237 with the stabilized argument list. */
2238 if (result == 0)
2240 end_sequence ();
2241 return expand_call (exp, target, target == const0_rtx);
2244 if (errno_set)
2245 expand_errno_check (exp, result);
2247 /* Output the entire sequence. */
2248 insns = get_insns ();
2249 end_sequence ();
2250 emit_insn (insns);
2252 return result;
2255 /* Expand a call to the builtin trinary math functions (fma).
2256 Return NULL_RTX if a normal call should be emitted rather than expanding the
2257 function in-line. EXP is the expression that is a call to the builtin
2258 function; if convenient, the result should be placed in TARGET.
2259 SUBTARGET may be used as the target for computing one of EXP's
2260 operands. */
2262 static rtx
2263 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2265 optab builtin_optab;
2266 rtx op0, op1, op2, result;
2267 rtx_insn *insns;
2268 tree fndecl = get_callee_fndecl (exp);
2269 tree arg0, arg1, arg2;
2270 machine_mode mode;
2272 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2273 return NULL_RTX;
2275 arg0 = CALL_EXPR_ARG (exp, 0);
2276 arg1 = CALL_EXPR_ARG (exp, 1);
2277 arg2 = CALL_EXPR_ARG (exp, 2);
2279 switch (DECL_FUNCTION_CODE (fndecl))
2281 CASE_FLT_FN (BUILT_IN_FMA):
2282 builtin_optab = fma_optab; break;
2283 default:
2284 gcc_unreachable ();
2287 /* Make a suitable register to place result in. */
2288 mode = TYPE_MODE (TREE_TYPE (exp));
2290 /* Before working hard, check whether the instruction is available. */
2291 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2292 return NULL_RTX;
2294 result = gen_reg_rtx (mode);
2296 /* Always stabilize the argument list. */
2297 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2298 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2299 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2301 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2302 op1 = expand_normal (arg1);
2303 op2 = expand_normal (arg2);
2305 start_sequence ();
2307 /* Compute into RESULT.
2308 Set RESULT to wherever the result comes back. */
2309 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2310 result, 0);
2312 /* If we were unable to expand via the builtin, stop the sequence
2313 (without outputting the insns) and call to the library function
2314 with the stabilized argument list. */
2315 if (result == 0)
2317 end_sequence ();
2318 return expand_call (exp, target, target == const0_rtx);
2321 /* Output the entire sequence. */
2322 insns = get_insns ();
2323 end_sequence ();
2324 emit_insn (insns);
2326 return result;
2329 /* Expand a call to the builtin sin and cos math functions.
2330 Return NULL_RTX if a normal call should be emitted rather than expanding the
2331 function in-line. EXP is the expression that is a call to the builtin
2332 function; if convenient, the result should be placed in TARGET.
2333 SUBTARGET may be used as the target for computing one of EXP's
2334 operands. */
2336 static rtx
2337 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2339 optab builtin_optab;
2340 rtx op0;
2341 rtx_insn *insns;
2342 tree fndecl = get_callee_fndecl (exp);
2343 machine_mode mode;
2344 tree arg;
2346 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2347 return NULL_RTX;
2349 arg = CALL_EXPR_ARG (exp, 0);
2351 switch (DECL_FUNCTION_CODE (fndecl))
2353 CASE_FLT_FN (BUILT_IN_SIN):
2354 CASE_FLT_FN (BUILT_IN_COS):
2355 builtin_optab = sincos_optab; break;
2356 default:
2357 gcc_unreachable ();
2360 /* Make a suitable register to place result in. */
2361 mode = TYPE_MODE (TREE_TYPE (exp));
2363 /* Check if sincos insn is available, otherwise fallback
2364 to sin or cos insn. */
2365 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2366 switch (DECL_FUNCTION_CODE (fndecl))
2368 CASE_FLT_FN (BUILT_IN_SIN):
2369 builtin_optab = sin_optab; break;
2370 CASE_FLT_FN (BUILT_IN_COS):
2371 builtin_optab = cos_optab; break;
2372 default:
2373 gcc_unreachable ();
2376 /* Before working hard, check whether the instruction is available. */
2377 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2379 rtx result = gen_reg_rtx (mode);
2381 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2382 need to expand the argument again. This way, we will not perform
2383 side-effects more the once. */
2384 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2386 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2388 start_sequence ();
2390 /* Compute into RESULT.
2391 Set RESULT to wherever the result comes back. */
2392 if (builtin_optab == sincos_optab)
2394 int ok;
2396 switch (DECL_FUNCTION_CODE (fndecl))
2398 CASE_FLT_FN (BUILT_IN_SIN):
2399 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2400 break;
2401 CASE_FLT_FN (BUILT_IN_COS):
2402 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2403 break;
2404 default:
2405 gcc_unreachable ();
2407 gcc_assert (ok);
2409 else
2410 result = expand_unop (mode, builtin_optab, op0, result, 0);
2412 if (result != 0)
2414 /* Output the entire sequence. */
2415 insns = get_insns ();
2416 end_sequence ();
2417 emit_insn (insns);
2418 return result;
2421 /* If we were unable to expand via the builtin, stop the sequence
2422 (without outputting the insns) and call to the library function
2423 with the stabilized argument list. */
2424 end_sequence ();
2427 return expand_call (exp, target, target == const0_rtx);
2430 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2431 return an RTL instruction code that implements the functionality.
2432 If that isn't possible or available return CODE_FOR_nothing. */
2434 static enum insn_code
2435 interclass_mathfn_icode (tree arg, tree fndecl)
2437 bool errno_set = false;
2438 optab builtin_optab = unknown_optab;
2439 machine_mode mode;
2441 switch (DECL_FUNCTION_CODE (fndecl))
2443 CASE_FLT_FN (BUILT_IN_ILOGB):
2444 errno_set = true; builtin_optab = ilogb_optab; break;
2445 CASE_FLT_FN (BUILT_IN_ISINF):
2446 builtin_optab = isinf_optab; break;
2447 case BUILT_IN_ISNORMAL:
2448 case BUILT_IN_ISFINITE:
2449 CASE_FLT_FN (BUILT_IN_FINITE):
2450 case BUILT_IN_FINITED32:
2451 case BUILT_IN_FINITED64:
2452 case BUILT_IN_FINITED128:
2453 case BUILT_IN_ISINFD32:
2454 case BUILT_IN_ISINFD64:
2455 case BUILT_IN_ISINFD128:
2456 /* These builtins have no optabs (yet). */
2457 break;
2458 default:
2459 gcc_unreachable ();
2462 /* There's no easy way to detect the case we need to set EDOM. */
2463 if (flag_errno_math && errno_set)
2464 return CODE_FOR_nothing;
2466 /* Optab mode depends on the mode of the input argument. */
2467 mode = TYPE_MODE (TREE_TYPE (arg));
2469 if (builtin_optab)
2470 return optab_handler (builtin_optab, mode);
2471 return CODE_FOR_nothing;
2474 /* Expand a call to one of the builtin math functions that operate on
2475 floating point argument and output an integer result (ilogb, isinf,
2476 isnan, etc).
2477 Return 0 if a normal call should be emitted rather than expanding the
2478 function in-line. EXP is the expression that is a call to the builtin
2479 function; if convenient, the result should be placed in TARGET. */
2481 static rtx
2482 expand_builtin_interclass_mathfn (tree exp, rtx target)
2484 enum insn_code icode = CODE_FOR_nothing;
2485 rtx op0;
2486 tree fndecl = get_callee_fndecl (exp);
2487 machine_mode mode;
2488 tree arg;
2490 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2491 return NULL_RTX;
2493 arg = CALL_EXPR_ARG (exp, 0);
2494 icode = interclass_mathfn_icode (arg, fndecl);
2495 mode = TYPE_MODE (TREE_TYPE (arg));
2497 if (icode != CODE_FOR_nothing)
2499 struct expand_operand ops[1];
2500 rtx_insn *last = get_last_insn ();
2501 tree orig_arg = arg;
2503 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2504 need to expand the argument again. This way, we will not perform
2505 side-effects more the once. */
2506 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2508 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2510 if (mode != GET_MODE (op0))
2511 op0 = convert_to_mode (mode, op0, 0);
2513 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2514 if (maybe_legitimize_operands (icode, 0, 1, ops)
2515 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2516 return ops[0].value;
2518 delete_insns_since (last);
2519 CALL_EXPR_ARG (exp, 0) = orig_arg;
2522 return NULL_RTX;
2525 /* Expand a call to the builtin sincos math function.
2526 Return NULL_RTX if a normal call should be emitted rather than expanding the
2527 function in-line. EXP is the expression that is a call to the builtin
2528 function. */
2530 static rtx
2531 expand_builtin_sincos (tree exp)
2533 rtx op0, op1, op2, target1, target2;
2534 machine_mode mode;
2535 tree arg, sinp, cosp;
2536 int result;
2537 location_t loc = EXPR_LOCATION (exp);
2538 tree alias_type, alias_off;
2540 if (!validate_arglist (exp, REAL_TYPE,
2541 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2542 return NULL_RTX;
2544 arg = CALL_EXPR_ARG (exp, 0);
2545 sinp = CALL_EXPR_ARG (exp, 1);
2546 cosp = CALL_EXPR_ARG (exp, 2);
2548 /* Make a suitable register to place result in. */
2549 mode = TYPE_MODE (TREE_TYPE (arg));
2551 /* Check if sincos insn is available, otherwise emit the call. */
2552 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2553 return NULL_RTX;
2555 target1 = gen_reg_rtx (mode);
2556 target2 = gen_reg_rtx (mode);
2558 op0 = expand_normal (arg);
2559 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2560 alias_off = build_int_cst (alias_type, 0);
2561 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2562 sinp, alias_off));
2563 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2564 cosp, alias_off));
2566 /* Compute into target1 and target2.
2567 Set TARGET to wherever the result comes back. */
2568 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2569 gcc_assert (result);
2571 /* Move target1 and target2 to the memory locations indicated
2572 by op1 and op2. */
2573 emit_move_insn (op1, target1);
2574 emit_move_insn (op2, target2);
2576 return const0_rtx;
2579 /* Expand a call to the internal cexpi builtin to the sincos math function.
2580 EXP is the expression that is a call to the builtin function; if convenient,
2581 the result should be placed in TARGET. */
2583 static rtx
2584 expand_builtin_cexpi (tree exp, rtx target)
2586 tree fndecl = get_callee_fndecl (exp);
2587 tree arg, type;
2588 machine_mode mode;
2589 rtx op0, op1, op2;
2590 location_t loc = EXPR_LOCATION (exp);
2592 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2593 return NULL_RTX;
2595 arg = CALL_EXPR_ARG (exp, 0);
2596 type = TREE_TYPE (arg);
2597 mode = TYPE_MODE (TREE_TYPE (arg));
2599 /* Try expanding via a sincos optab, fall back to emitting a libcall
2600 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2601 is only generated from sincos, cexp or if we have either of them. */
2602 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2604 op1 = gen_reg_rtx (mode);
2605 op2 = gen_reg_rtx (mode);
2607 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2609 /* Compute into op1 and op2. */
2610 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2612 else if (targetm.libc_has_function (function_sincos))
2614 tree call, fn = NULL_TREE;
2615 tree top1, top2;
2616 rtx op1a, op2a;
2618 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2619 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2620 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2621 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2622 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2623 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2624 else
2625 gcc_unreachable ();
2627 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2628 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2629 op1a = copy_addr_to_reg (XEXP (op1, 0));
2630 op2a = copy_addr_to_reg (XEXP (op2, 0));
2631 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2632 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2634 /* Make sure not to fold the sincos call again. */
2635 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2636 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2637 call, 3, arg, top1, top2));
2639 else
2641 tree call, fn = NULL_TREE, narg;
2642 tree ctype = build_complex_type (type);
2644 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2645 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2646 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2647 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2648 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2649 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2650 else
2651 gcc_unreachable ();
2653 /* If we don't have a decl for cexp create one. This is the
2654 friendliest fallback if the user calls __builtin_cexpi
2655 without full target C99 function support. */
2656 if (fn == NULL_TREE)
2658 tree fntype;
2659 const char *name = NULL;
2661 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2662 name = "cexpf";
2663 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2664 name = "cexp";
2665 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2666 name = "cexpl";
2668 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2669 fn = build_fn_decl (name, fntype);
2672 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2673 build_real (type, dconst0), arg);
2675 /* Make sure not to fold the cexp call again. */
2676 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2677 return expand_expr (build_call_nary (ctype, call, 1, narg),
2678 target, VOIDmode, EXPAND_NORMAL);
2681 /* Now build the proper return type. */
2682 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2683 make_tree (TREE_TYPE (arg), op2),
2684 make_tree (TREE_TYPE (arg), op1)),
2685 target, VOIDmode, EXPAND_NORMAL);
2688 /* Conveniently construct a function call expression. FNDECL names the
2689 function to be called, N is the number of arguments, and the "..."
2690 parameters are the argument expressions. Unlike build_call_exr
2691 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2693 static tree
2694 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2696 va_list ap;
2697 tree fntype = TREE_TYPE (fndecl);
2698 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2700 va_start (ap, n);
2701 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2702 va_end (ap);
2703 SET_EXPR_LOCATION (fn, loc);
2704 return fn;
2707 /* Expand a call to one of the builtin rounding functions gcc defines
2708 as an extension (lfloor and lceil). As these are gcc extensions we
2709 do not need to worry about setting errno to EDOM.
2710 If expanding via optab fails, lower expression to (int)(floor(x)).
2711 EXP is the expression that is a call to the builtin function;
2712 if convenient, the result should be placed in TARGET. */
2714 static rtx
2715 expand_builtin_int_roundingfn (tree exp, rtx target)
2717 convert_optab builtin_optab;
2718 rtx op0, tmp;
2719 rtx_insn *insns;
2720 tree fndecl = get_callee_fndecl (exp);
2721 enum built_in_function fallback_fn;
2722 tree fallback_fndecl;
2723 machine_mode mode;
2724 tree arg;
2726 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2727 gcc_unreachable ();
2729 arg = CALL_EXPR_ARG (exp, 0);
2731 switch (DECL_FUNCTION_CODE (fndecl))
2733 CASE_FLT_FN (BUILT_IN_ICEIL):
2734 CASE_FLT_FN (BUILT_IN_LCEIL):
2735 CASE_FLT_FN (BUILT_IN_LLCEIL):
2736 builtin_optab = lceil_optab;
2737 fallback_fn = BUILT_IN_CEIL;
2738 break;
2740 CASE_FLT_FN (BUILT_IN_IFLOOR):
2741 CASE_FLT_FN (BUILT_IN_LFLOOR):
2742 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2743 builtin_optab = lfloor_optab;
2744 fallback_fn = BUILT_IN_FLOOR;
2745 break;
2747 default:
2748 gcc_unreachable ();
2751 /* Make a suitable register to place result in. */
2752 mode = TYPE_MODE (TREE_TYPE (exp));
2754 target = gen_reg_rtx (mode);
2756 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2757 need to expand the argument again. This way, we will not perform
2758 side-effects more the once. */
2759 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2761 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2763 start_sequence ();
2765 /* Compute into TARGET. */
2766 if (expand_sfix_optab (target, op0, builtin_optab))
2768 /* Output the entire sequence. */
2769 insns = get_insns ();
2770 end_sequence ();
2771 emit_insn (insns);
2772 return target;
2775 /* If we were unable to expand via the builtin, stop the sequence
2776 (without outputting the insns). */
2777 end_sequence ();
2779 /* Fall back to floating point rounding optab. */
2780 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2782 /* For non-C99 targets we may end up without a fallback fndecl here
2783 if the user called __builtin_lfloor directly. In this case emit
2784 a call to the floor/ceil variants nevertheless. This should result
2785 in the best user experience for not full C99 targets. */
2786 if (fallback_fndecl == NULL_TREE)
2788 tree fntype;
2789 const char *name = NULL;
2791 switch (DECL_FUNCTION_CODE (fndecl))
2793 case BUILT_IN_ICEIL:
2794 case BUILT_IN_LCEIL:
2795 case BUILT_IN_LLCEIL:
2796 name = "ceil";
2797 break;
2798 case BUILT_IN_ICEILF:
2799 case BUILT_IN_LCEILF:
2800 case BUILT_IN_LLCEILF:
2801 name = "ceilf";
2802 break;
2803 case BUILT_IN_ICEILL:
2804 case BUILT_IN_LCEILL:
2805 case BUILT_IN_LLCEILL:
2806 name = "ceill";
2807 break;
2808 case BUILT_IN_IFLOOR:
2809 case BUILT_IN_LFLOOR:
2810 case BUILT_IN_LLFLOOR:
2811 name = "floor";
2812 break;
2813 case BUILT_IN_IFLOORF:
2814 case BUILT_IN_LFLOORF:
2815 case BUILT_IN_LLFLOORF:
2816 name = "floorf";
2817 break;
2818 case BUILT_IN_IFLOORL:
2819 case BUILT_IN_LFLOORL:
2820 case BUILT_IN_LLFLOORL:
2821 name = "floorl";
2822 break;
2823 default:
2824 gcc_unreachable ();
2827 fntype = build_function_type_list (TREE_TYPE (arg),
2828 TREE_TYPE (arg), NULL_TREE);
2829 fallback_fndecl = build_fn_decl (name, fntype);
2832 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2834 tmp = expand_normal (exp);
2835 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2837 /* Truncate the result of floating point optab to integer
2838 via expand_fix (). */
2839 target = gen_reg_rtx (mode);
2840 expand_fix (target, tmp, 0);
2842 return target;
2845 /* Expand a call to one of the builtin math functions doing integer
2846 conversion (lrint).
2847 Return 0 if a normal call should be emitted rather than expanding the
2848 function in-line. EXP is the expression that is a call to the builtin
2849 function; if convenient, the result should be placed in TARGET. */
2851 static rtx
2852 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2854 convert_optab builtin_optab;
2855 rtx op0;
2856 rtx_insn *insns;
2857 tree fndecl = get_callee_fndecl (exp);
2858 tree arg;
2859 machine_mode mode;
2860 enum built_in_function fallback_fn = BUILT_IN_NONE;
2862 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2863 gcc_unreachable ();
2865 arg = CALL_EXPR_ARG (exp, 0);
2867 switch (DECL_FUNCTION_CODE (fndecl))
2869 CASE_FLT_FN (BUILT_IN_IRINT):
2870 fallback_fn = BUILT_IN_LRINT;
2871 /* FALLTHRU */
2872 CASE_FLT_FN (BUILT_IN_LRINT):
2873 CASE_FLT_FN (BUILT_IN_LLRINT):
2874 builtin_optab = lrint_optab;
2875 break;
2877 CASE_FLT_FN (BUILT_IN_IROUND):
2878 fallback_fn = BUILT_IN_LROUND;
2879 /* FALLTHRU */
2880 CASE_FLT_FN (BUILT_IN_LROUND):
2881 CASE_FLT_FN (BUILT_IN_LLROUND):
2882 builtin_optab = lround_optab;
2883 break;
2885 default:
2886 gcc_unreachable ();
2889 /* There's no easy way to detect the case we need to set EDOM. */
2890 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2891 return NULL_RTX;
2893 /* Make a suitable register to place result in. */
2894 mode = TYPE_MODE (TREE_TYPE (exp));
2896 /* There's no easy way to detect the case we need to set EDOM. */
2897 if (!flag_errno_math)
2899 rtx result = gen_reg_rtx (mode);
2901 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2902 need to expand the argument again. This way, we will not perform
2903 side-effects more the once. */
2904 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2906 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2908 start_sequence ();
2910 if (expand_sfix_optab (result, op0, builtin_optab))
2912 /* Output the entire sequence. */
2913 insns = get_insns ();
2914 end_sequence ();
2915 emit_insn (insns);
2916 return result;
2919 /* If we were unable to expand via the builtin, stop the sequence
2920 (without outputting the insns) and call to the library function
2921 with the stabilized argument list. */
2922 end_sequence ();
2925 if (fallback_fn != BUILT_IN_NONE)
2927 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2928 targets, (int) round (x) should never be transformed into
2929 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2930 a call to lround in the hope that the target provides at least some
2931 C99 functions. This should result in the best user experience for
2932 not full C99 targets. */
2933 tree fallback_fndecl = mathfn_built_in_1
2934 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2936 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2937 fallback_fndecl, 1, arg);
2939 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2940 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2941 return convert_to_mode (mode, target, 0);
2944 return expand_call (exp, target, target == const0_rtx);
2947 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2948 a normal call should be emitted rather than expanding the function
2949 in-line. EXP is the expression that is a call to the builtin
2950 function; if convenient, the result should be placed in TARGET. */
2952 static rtx
2953 expand_builtin_powi (tree exp, rtx target)
2955 tree arg0, arg1;
2956 rtx op0, op1;
2957 machine_mode mode;
2958 machine_mode mode2;
2960 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2961 return NULL_RTX;
2963 arg0 = CALL_EXPR_ARG (exp, 0);
2964 arg1 = CALL_EXPR_ARG (exp, 1);
2965 mode = TYPE_MODE (TREE_TYPE (exp));
2967 /* Emit a libcall to libgcc. */
2969 /* Mode of the 2nd argument must match that of an int. */
2970 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2972 if (target == NULL_RTX)
2973 target = gen_reg_rtx (mode);
2975 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2976 if (GET_MODE (op0) != mode)
2977 op0 = convert_to_mode (mode, op0, 0);
2978 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2979 if (GET_MODE (op1) != mode2)
2980 op1 = convert_to_mode (mode2, op1, 0);
2982 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2983 target, LCT_CONST, mode, 2,
2984 op0, mode, op1, mode2);
2986 return target;
2989 /* Expand expression EXP which is a call to the strlen builtin. Return
2990 NULL_RTX if we failed the caller should emit a normal call, otherwise
2991 try to get the result in TARGET, if convenient. */
2993 static rtx
2994 expand_builtin_strlen (tree exp, rtx target,
2995 machine_mode target_mode)
2997 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2998 return NULL_RTX;
2999 else
3001 struct expand_operand ops[4];
3002 rtx pat;
3003 tree len;
3004 tree src = CALL_EXPR_ARG (exp, 0);
3005 rtx src_reg;
3006 rtx_insn *before_strlen;
3007 machine_mode insn_mode = target_mode;
3008 enum insn_code icode = CODE_FOR_nothing;
3009 unsigned int align;
3011 /* If the length can be computed at compile-time, return it. */
3012 len = c_strlen (src, 0);
3013 if (len)
3014 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3016 /* If the length can be computed at compile-time and is constant
3017 integer, but there are side-effects in src, evaluate
3018 src for side-effects, then return len.
3019 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3020 can be optimized into: i++; x = 3; */
3021 len = c_strlen (src, 1);
3022 if (len && TREE_CODE (len) == INTEGER_CST)
3024 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3025 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3028 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3030 /* If SRC is not a pointer type, don't do this operation inline. */
3031 if (align == 0)
3032 return NULL_RTX;
3034 /* Bail out if we can't compute strlen in the right mode. */
3035 while (insn_mode != VOIDmode)
3037 icode = optab_handler (strlen_optab, insn_mode);
3038 if (icode != CODE_FOR_nothing)
3039 break;
3041 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3043 if (insn_mode == VOIDmode)
3044 return NULL_RTX;
3046 /* Make a place to hold the source address. We will not expand
3047 the actual source until we are sure that the expansion will
3048 not fail -- there are trees that cannot be expanded twice. */
3049 src_reg = gen_reg_rtx (Pmode);
3051 /* Mark the beginning of the strlen sequence so we can emit the
3052 source operand later. */
3053 before_strlen = get_last_insn ();
3055 create_output_operand (&ops[0], target, insn_mode);
3056 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3057 create_integer_operand (&ops[2], 0);
3058 create_integer_operand (&ops[3], align);
3059 if (!maybe_expand_insn (icode, 4, ops))
3060 return NULL_RTX;
3062 /* Now that we are assured of success, expand the source. */
3063 start_sequence ();
3064 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3065 if (pat != src_reg)
3067 #ifdef POINTERS_EXTEND_UNSIGNED
3068 if (GET_MODE (pat) != Pmode)
3069 pat = convert_to_mode (Pmode, pat,
3070 POINTERS_EXTEND_UNSIGNED);
3071 #endif
3072 emit_move_insn (src_reg, pat);
3074 pat = get_insns ();
3075 end_sequence ();
3077 if (before_strlen)
3078 emit_insn_after (pat, before_strlen);
3079 else
3080 emit_insn_before (pat, get_insns ());
3082 /* Return the value in the proper mode for this function. */
3083 if (GET_MODE (ops[0].value) == target_mode)
3084 target = ops[0].value;
3085 else if (target != 0)
3086 convert_move (target, ops[0].value, 0);
3087 else
3088 target = convert_to_mode (target_mode, ops[0].value, 0);
3090 return target;
3094 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3095 bytes from constant string DATA + OFFSET and return it as target
3096 constant. */
3098 static rtx
3099 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3100 machine_mode mode)
3102 const char *str = (const char *) data;
3104 gcc_assert (offset >= 0
3105 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3106 <= strlen (str) + 1));
3108 return c_readstr (str + offset, mode);
3111 /* LEN specify length of the block of memcpy/memset operation.
3112 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3113 In some cases we can make very likely guess on max size, then we
3114 set it into PROBABLE_MAX_SIZE. */
3116 static void
3117 determine_block_size (tree len, rtx len_rtx,
3118 unsigned HOST_WIDE_INT *min_size,
3119 unsigned HOST_WIDE_INT *max_size,
3120 unsigned HOST_WIDE_INT *probable_max_size)
3122 if (CONST_INT_P (len_rtx))
3124 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3125 return;
3127 else
3129 wide_int min, max;
3130 enum value_range_type range_type = VR_UNDEFINED;
3132 /* Determine bounds from the type. */
3133 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3134 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3135 else
3136 *min_size = 0;
3137 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3138 *probable_max_size = *max_size
3139 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3140 else
3141 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3143 if (TREE_CODE (len) == SSA_NAME)
3144 range_type = get_range_info (len, &min, &max);
3145 if (range_type == VR_RANGE)
3147 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3148 *min_size = min.to_uhwi ();
3149 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3150 *probable_max_size = *max_size = max.to_uhwi ();
3152 else if (range_type == VR_ANTI_RANGE)
3154 /* Anti range 0...N lets us to determine minimal size to N+1. */
3155 if (min == 0)
3157 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3158 *min_size = max.to_uhwi () + 1;
3160 /* Code like
3162 int n;
3163 if (n < 100)
3164 memcpy (a, b, n)
3166 Produce anti range allowing negative values of N. We still
3167 can use the information and make a guess that N is not negative.
3169 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3170 *probable_max_size = min.to_uhwi () - 1;
3173 gcc_checking_assert (*max_size <=
3174 (unsigned HOST_WIDE_INT)
3175 GET_MODE_MASK (GET_MODE (len_rtx)));
3178 /* Helper function to do the actual work for expand_builtin_memcpy. */
3180 static rtx
3181 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
3183 const char *src_str;
3184 unsigned int src_align = get_pointer_alignment (src);
3185 unsigned int dest_align = get_pointer_alignment (dest);
3186 rtx dest_mem, src_mem, dest_addr, len_rtx;
3187 HOST_WIDE_INT expected_size = -1;
3188 unsigned int expected_align = 0;
3189 unsigned HOST_WIDE_INT min_size;
3190 unsigned HOST_WIDE_INT max_size;
3191 unsigned HOST_WIDE_INT probable_max_size;
3193 /* If DEST is not a pointer type, call the normal function. */
3194 if (dest_align == 0)
3195 return NULL_RTX;
3197 /* If either SRC is not a pointer type, don't do this
3198 operation in-line. */
3199 if (src_align == 0)
3200 return NULL_RTX;
3202 if (currently_expanding_gimple_stmt)
3203 stringop_block_profile (currently_expanding_gimple_stmt,
3204 &expected_align, &expected_size);
3206 if (expected_align < dest_align)
3207 expected_align = dest_align;
3208 dest_mem = get_memory_rtx (dest, len);
3209 set_mem_align (dest_mem, dest_align);
3210 len_rtx = expand_normal (len);
3211 determine_block_size (len, len_rtx, &min_size, &max_size,
3212 &probable_max_size);
3213 src_str = c_getstr (src);
3215 /* If SRC is a string constant and block move would be done
3216 by pieces, we can avoid loading the string from memory
3217 and only stored the computed constants. */
3218 if (src_str
3219 && CONST_INT_P (len_rtx)
3220 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3221 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3222 CONST_CAST (char *, src_str),
3223 dest_align, false))
3225 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3226 builtin_memcpy_read_str,
3227 CONST_CAST (char *, src_str),
3228 dest_align, false, 0);
3229 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3230 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3231 return dest_mem;
3234 src_mem = get_memory_rtx (src, len);
3235 set_mem_align (src_mem, src_align);
3237 /* Copy word part most expediently. */
3238 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3239 CALL_EXPR_TAILCALL (exp)
3240 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3241 expected_align, expected_size,
3242 min_size, max_size, probable_max_size);
3244 if (dest_addr == 0)
3246 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3247 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3250 return dest_addr;
3253 /* Expand a call EXP to the memcpy builtin.
3254 Return NULL_RTX if we failed, the caller should emit a normal call,
3255 otherwise try to get the result in TARGET, if convenient (and in
3256 mode MODE if that's convenient). */
3258 static rtx
3259 expand_builtin_memcpy (tree exp, rtx target)
3261 if (!validate_arglist (exp,
3262 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3263 return NULL_RTX;
3264 else
3266 tree dest = CALL_EXPR_ARG (exp, 0);
3267 tree src = CALL_EXPR_ARG (exp, 1);
3268 tree len = CALL_EXPR_ARG (exp, 2);
3269 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3273 /* Expand an instrumented call EXP to the memcpy builtin.
3274 Return NULL_RTX if we failed, the caller should emit a normal call,
3275 otherwise try to get the result in TARGET, if convenient (and in
3276 mode MODE if that's convenient). */
3278 static rtx
3279 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3281 if (!validate_arglist (exp,
3282 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3283 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3284 INTEGER_TYPE, VOID_TYPE))
3285 return NULL_RTX;
3286 else
3288 tree dest = CALL_EXPR_ARG (exp, 0);
3289 tree src = CALL_EXPR_ARG (exp, 2);
3290 tree len = CALL_EXPR_ARG (exp, 4);
3291 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3293 /* Return src bounds with the result. */
3294 if (res)
3296 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3297 expand_normal (CALL_EXPR_ARG (exp, 1)));
3298 res = chkp_join_splitted_slot (res, bnd);
3300 return res;
3304 /* Expand a call EXP to the mempcpy builtin.
3305 Return NULL_RTX if we failed; the caller should emit a normal call,
3306 otherwise try to get the result in TARGET, if convenient (and in
3307 mode MODE if that's convenient). If ENDP is 0 return the
3308 destination pointer, if ENDP is 1 return the end pointer ala
3309 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3310 stpcpy. */
3312 static rtx
3313 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3315 if (!validate_arglist (exp,
3316 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3317 return NULL_RTX;
3318 else
3320 tree dest = CALL_EXPR_ARG (exp, 0);
3321 tree src = CALL_EXPR_ARG (exp, 1);
3322 tree len = CALL_EXPR_ARG (exp, 2);
3323 return expand_builtin_mempcpy_args (dest, src, len,
3324 target, mode, /*endp=*/ 1,
3325 exp);
3329 /* Expand an instrumented call EXP to the mempcpy builtin.
3330 Return NULL_RTX if we failed, the caller should emit a normal call,
3331 otherwise try to get the result in TARGET, if convenient (and in
3332 mode MODE if that's convenient). */
3334 static rtx
3335 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3337 if (!validate_arglist (exp,
3338 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3339 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3340 INTEGER_TYPE, VOID_TYPE))
3341 return NULL_RTX;
3342 else
3344 tree dest = CALL_EXPR_ARG (exp, 0);
3345 tree src = CALL_EXPR_ARG (exp, 2);
3346 tree len = CALL_EXPR_ARG (exp, 4);
3347 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3348 mode, 1, exp);
3350 /* Return src bounds with the result. */
3351 if (res)
3353 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3354 expand_normal (CALL_EXPR_ARG (exp, 1)));
3355 res = chkp_join_splitted_slot (res, bnd);
3357 return res;
3361 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3362 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3363 so that this can also be called without constructing an actual CALL_EXPR.
3364 The other arguments and return value are the same as for
3365 expand_builtin_mempcpy. */
3367 static rtx
3368 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3369 rtx target, machine_mode mode, int endp,
3370 tree orig_exp)
3372 tree fndecl = get_callee_fndecl (orig_exp);
3374 /* If return value is ignored, transform mempcpy into memcpy. */
3375 if (target == const0_rtx
3376 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3377 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3379 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3380 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3381 dest, src, len);
3382 return expand_expr (result, target, mode, EXPAND_NORMAL);
3384 else if (target == const0_rtx
3385 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3387 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3388 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3389 dest, src, len);
3390 return expand_expr (result, target, mode, EXPAND_NORMAL);
3392 else
3394 const char *src_str;
3395 unsigned int src_align = get_pointer_alignment (src);
3396 unsigned int dest_align = get_pointer_alignment (dest);
3397 rtx dest_mem, src_mem, len_rtx;
3399 /* If either SRC or DEST is not a pointer type, don't do this
3400 operation in-line. */
3401 if (dest_align == 0 || src_align == 0)
3402 return NULL_RTX;
3404 /* If LEN is not constant, call the normal function. */
3405 if (! tree_fits_uhwi_p (len))
3406 return NULL_RTX;
3408 len_rtx = expand_normal (len);
3409 src_str = c_getstr (src);
3411 /* If SRC is a string constant and block move would be done
3412 by pieces, we can avoid loading the string from memory
3413 and only stored the computed constants. */
3414 if (src_str
3415 && CONST_INT_P (len_rtx)
3416 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3417 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3418 CONST_CAST (char *, src_str),
3419 dest_align, false))
3421 dest_mem = get_memory_rtx (dest, len);
3422 set_mem_align (dest_mem, dest_align);
3423 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3424 builtin_memcpy_read_str,
3425 CONST_CAST (char *, src_str),
3426 dest_align, false, endp);
3427 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3428 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3429 return dest_mem;
3432 if (CONST_INT_P (len_rtx)
3433 && can_move_by_pieces (INTVAL (len_rtx),
3434 MIN (dest_align, src_align)))
3436 dest_mem = get_memory_rtx (dest, len);
3437 set_mem_align (dest_mem, dest_align);
3438 src_mem = get_memory_rtx (src, len);
3439 set_mem_align (src_mem, src_align);
3440 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3441 MIN (dest_align, src_align), endp);
3442 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3443 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3444 return dest_mem;
3447 return NULL_RTX;
3451 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3452 we failed, the caller should emit a normal call, otherwise try to
3453 get the result in TARGET, if convenient. If ENDP is 0 return the
3454 destination pointer, if ENDP is 1 return the end pointer ala
3455 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3456 stpcpy. */
3458 static rtx
3459 expand_movstr (tree dest, tree src, rtx target, int endp)
3461 struct expand_operand ops[3];
3462 rtx dest_mem;
3463 rtx src_mem;
3465 if (!targetm.have_movstr ())
3466 return NULL_RTX;
3468 dest_mem = get_memory_rtx (dest, NULL);
3469 src_mem = get_memory_rtx (src, NULL);
3470 if (!endp)
3472 target = force_reg (Pmode, XEXP (dest_mem, 0));
3473 dest_mem = replace_equiv_address (dest_mem, target);
3476 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3477 create_fixed_operand (&ops[1], dest_mem);
3478 create_fixed_operand (&ops[2], src_mem);
3479 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3480 return NULL_RTX;
3482 if (endp && target != const0_rtx)
3484 target = ops[0].value;
3485 /* movstr is supposed to set end to the address of the NUL
3486 terminator. If the caller requested a mempcpy-like return value,
3487 adjust it. */
3488 if (endp == 1)
3490 rtx tem = plus_constant (GET_MODE (target),
3491 gen_lowpart (GET_MODE (target), target), 1);
3492 emit_move_insn (target, force_operand (tem, NULL_RTX));
3495 return target;
3498 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3499 NULL_RTX if we failed the caller should emit a normal call, otherwise
3500 try to get the result in TARGET, if convenient (and in mode MODE if that's
3501 convenient). */
3503 static rtx
3504 expand_builtin_strcpy (tree exp, rtx target)
3506 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3508 tree dest = CALL_EXPR_ARG (exp, 0);
3509 tree src = CALL_EXPR_ARG (exp, 1);
3510 return expand_builtin_strcpy_args (dest, src, target);
3512 return NULL_RTX;
3515 /* Helper function to do the actual work for expand_builtin_strcpy. The
3516 arguments to the builtin_strcpy call DEST and SRC are broken out
3517 so that this can also be called without constructing an actual CALL_EXPR.
3518 The other arguments and return value are the same as for
3519 expand_builtin_strcpy. */
3521 static rtx
3522 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3524 return expand_movstr (dest, src, target, /*endp=*/0);
3527 /* Expand a call EXP to the stpcpy builtin.
3528 Return NULL_RTX if we failed the caller should emit a normal call,
3529 otherwise try to get the result in TARGET, if convenient (and in
3530 mode MODE if that's convenient). */
3532 static rtx
3533 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3535 tree dst, src;
3536 location_t loc = EXPR_LOCATION (exp);
3538 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3539 return NULL_RTX;
3541 dst = CALL_EXPR_ARG (exp, 0);
3542 src = CALL_EXPR_ARG (exp, 1);
3544 /* If return value is ignored, transform stpcpy into strcpy. */
3545 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3547 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3548 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3549 return expand_expr (result, target, mode, EXPAND_NORMAL);
3551 else
3553 tree len, lenp1;
3554 rtx ret;
3556 /* Ensure we get an actual string whose length can be evaluated at
3557 compile-time, not an expression containing a string. This is
3558 because the latter will potentially produce pessimized code
3559 when used to produce the return value. */
3560 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3561 return expand_movstr (dst, src, target, /*endp=*/2);
3563 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3564 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3565 target, mode, /*endp=*/2,
3566 exp);
3568 if (ret)
3569 return ret;
3571 if (TREE_CODE (len) == INTEGER_CST)
3573 rtx len_rtx = expand_normal (len);
3575 if (CONST_INT_P (len_rtx))
3577 ret = expand_builtin_strcpy_args (dst, src, target);
3579 if (ret)
3581 if (! target)
3583 if (mode != VOIDmode)
3584 target = gen_reg_rtx (mode);
3585 else
3586 target = gen_reg_rtx (GET_MODE (ret));
3588 if (GET_MODE (target) != GET_MODE (ret))
3589 ret = gen_lowpart (GET_MODE (target), ret);
3591 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3592 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3593 gcc_assert (ret);
3595 return target;
3600 return expand_movstr (dst, src, target, /*endp=*/2);
3604 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3605 bytes from constant string DATA + OFFSET and return it as target
3606 constant. */
3609 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3610 machine_mode mode)
3612 const char *str = (const char *) data;
3614 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3615 return const0_rtx;
3617 return c_readstr (str + offset, mode);
3620 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3621 NULL_RTX if we failed the caller should emit a normal call. */
3623 static rtx
3624 expand_builtin_strncpy (tree exp, rtx target)
3626 location_t loc = EXPR_LOCATION (exp);
3628 if (validate_arglist (exp,
3629 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3631 tree dest = CALL_EXPR_ARG (exp, 0);
3632 tree src = CALL_EXPR_ARG (exp, 1);
3633 tree len = CALL_EXPR_ARG (exp, 2);
3634 tree slen = c_strlen (src, 1);
3636 /* We must be passed a constant len and src parameter. */
3637 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3638 return NULL_RTX;
3640 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3642 /* We're required to pad with trailing zeros if the requested
3643 len is greater than strlen(s2)+1. In that case try to
3644 use store_by_pieces, if it fails, punt. */
3645 if (tree_int_cst_lt (slen, len))
3647 unsigned int dest_align = get_pointer_alignment (dest);
3648 const char *p = c_getstr (src);
3649 rtx dest_mem;
3651 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3652 || !can_store_by_pieces (tree_to_uhwi (len),
3653 builtin_strncpy_read_str,
3654 CONST_CAST (char *, p),
3655 dest_align, false))
3656 return NULL_RTX;
3658 dest_mem = get_memory_rtx (dest, len);
3659 store_by_pieces (dest_mem, tree_to_uhwi (len),
3660 builtin_strncpy_read_str,
3661 CONST_CAST (char *, p), dest_align, false, 0);
3662 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3663 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3664 return dest_mem;
3667 return NULL_RTX;
3670 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3671 bytes from constant string DATA + OFFSET and return it as target
3672 constant. */
3675 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3676 machine_mode mode)
3678 const char *c = (const char *) data;
3679 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3681 memset (p, *c, GET_MODE_SIZE (mode));
3683 return c_readstr (p, mode);
3686 /* Callback routine for store_by_pieces. Return the RTL of a register
3687 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3688 char value given in the RTL register data. For example, if mode is
3689 4 bytes wide, return the RTL for 0x01010101*data. */
3691 static rtx
3692 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3693 machine_mode mode)
3695 rtx target, coeff;
3696 size_t size;
3697 char *p;
3699 size = GET_MODE_SIZE (mode);
3700 if (size == 1)
3701 return (rtx) data;
3703 p = XALLOCAVEC (char, size);
3704 memset (p, 1, size);
3705 coeff = c_readstr (p, mode);
3707 target = convert_to_mode (mode, (rtx) data, 1);
3708 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3709 return force_reg (mode, target);
3712 /* Expand expression EXP, which is a call to the memset builtin. Return
3713 NULL_RTX if we failed the caller should emit a normal call, otherwise
3714 try to get the result in TARGET, if convenient (and in mode MODE if that's
3715 convenient). */
3717 static rtx
3718 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3720 if (!validate_arglist (exp,
3721 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3722 return NULL_RTX;
3723 else
3725 tree dest = CALL_EXPR_ARG (exp, 0);
3726 tree val = CALL_EXPR_ARG (exp, 1);
3727 tree len = CALL_EXPR_ARG (exp, 2);
3728 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3732 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3733 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3734 try to get the result in TARGET, if convenient (and in mode MODE if that's
3735 convenient). */
3737 static rtx
3738 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3740 if (!validate_arglist (exp,
3741 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3742 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3743 return NULL_RTX;
3744 else
3746 tree dest = CALL_EXPR_ARG (exp, 0);
3747 tree val = CALL_EXPR_ARG (exp, 2);
3748 tree len = CALL_EXPR_ARG (exp, 3);
3749 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3751 /* Return src bounds with the result. */
3752 if (res)
3754 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3755 expand_normal (CALL_EXPR_ARG (exp, 1)));
3756 res = chkp_join_splitted_slot (res, bnd);
3758 return res;
3762 /* Helper function to do the actual work for expand_builtin_memset. The
3763 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3764 so that this can also be called without constructing an actual CALL_EXPR.
3765 The other arguments and return value are the same as for
3766 expand_builtin_memset. */
3768 static rtx
3769 expand_builtin_memset_args (tree dest, tree val, tree len,
3770 rtx target, machine_mode mode, tree orig_exp)
3772 tree fndecl, fn;
3773 enum built_in_function fcode;
3774 machine_mode val_mode;
3775 char c;
3776 unsigned int dest_align;
3777 rtx dest_mem, dest_addr, len_rtx;
3778 HOST_WIDE_INT expected_size = -1;
3779 unsigned int expected_align = 0;
3780 unsigned HOST_WIDE_INT min_size;
3781 unsigned HOST_WIDE_INT max_size;
3782 unsigned HOST_WIDE_INT probable_max_size;
3784 dest_align = get_pointer_alignment (dest);
3786 /* If DEST is not a pointer type, don't do this operation in-line. */
3787 if (dest_align == 0)
3788 return NULL_RTX;
3790 if (currently_expanding_gimple_stmt)
3791 stringop_block_profile (currently_expanding_gimple_stmt,
3792 &expected_align, &expected_size);
3794 if (expected_align < dest_align)
3795 expected_align = dest_align;
3797 /* If the LEN parameter is zero, return DEST. */
3798 if (integer_zerop (len))
3800 /* Evaluate and ignore VAL in case it has side-effects. */
3801 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3802 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3805 /* Stabilize the arguments in case we fail. */
3806 dest = builtin_save_expr (dest);
3807 val = builtin_save_expr (val);
3808 len = builtin_save_expr (len);
3810 len_rtx = expand_normal (len);
3811 determine_block_size (len, len_rtx, &min_size, &max_size,
3812 &probable_max_size);
3813 dest_mem = get_memory_rtx (dest, len);
3814 val_mode = TYPE_MODE (unsigned_char_type_node);
3816 if (TREE_CODE (val) != INTEGER_CST)
3818 rtx val_rtx;
3820 val_rtx = expand_normal (val);
3821 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3823 /* Assume that we can memset by pieces if we can store
3824 * the coefficients by pieces (in the required modes).
3825 * We can't pass builtin_memset_gen_str as that emits RTL. */
3826 c = 1;
3827 if (tree_fits_uhwi_p (len)
3828 && can_store_by_pieces (tree_to_uhwi (len),
3829 builtin_memset_read_str, &c, dest_align,
3830 true))
3832 val_rtx = force_reg (val_mode, val_rtx);
3833 store_by_pieces (dest_mem, tree_to_uhwi (len),
3834 builtin_memset_gen_str, val_rtx, dest_align,
3835 true, 0);
3837 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3838 dest_align, expected_align,
3839 expected_size, min_size, max_size,
3840 probable_max_size))
3841 goto do_libcall;
3843 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3844 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3845 return dest_mem;
3848 if (target_char_cast (val, &c))
3849 goto do_libcall;
3851 if (c)
3853 if (tree_fits_uhwi_p (len)
3854 && can_store_by_pieces (tree_to_uhwi (len),
3855 builtin_memset_read_str, &c, dest_align,
3856 true))
3857 store_by_pieces (dest_mem, tree_to_uhwi (len),
3858 builtin_memset_read_str, &c, dest_align, true, 0);
3859 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3860 gen_int_mode (c, val_mode),
3861 dest_align, expected_align,
3862 expected_size, min_size, max_size,
3863 probable_max_size))
3864 goto do_libcall;
3866 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3867 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3868 return dest_mem;
3871 set_mem_align (dest_mem, dest_align);
3872 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3873 CALL_EXPR_TAILCALL (orig_exp)
3874 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3875 expected_align, expected_size,
3876 min_size, max_size,
3877 probable_max_size);
3879 if (dest_addr == 0)
3881 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3882 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3885 return dest_addr;
3887 do_libcall:
3888 fndecl = get_callee_fndecl (orig_exp);
3889 fcode = DECL_FUNCTION_CODE (fndecl);
3890 if (fcode == BUILT_IN_MEMSET
3891 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3892 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3893 dest, val, len);
3894 else if (fcode == BUILT_IN_BZERO)
3895 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3896 dest, len);
3897 else
3898 gcc_unreachable ();
3899 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3900 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3901 return expand_call (fn, target, target == const0_rtx);
3904 /* Expand expression EXP, which is a call to the bzero builtin. Return
3905 NULL_RTX if we failed the caller should emit a normal call. */
3907 static rtx
3908 expand_builtin_bzero (tree exp)
3910 tree dest, size;
3911 location_t loc = EXPR_LOCATION (exp);
3913 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3914 return NULL_RTX;
3916 dest = CALL_EXPR_ARG (exp, 0);
3917 size = CALL_EXPR_ARG (exp, 1);
3919 /* New argument list transforming bzero(ptr x, int y) to
3920 memset(ptr x, int 0, size_t y). This is done this way
3921 so that if it isn't expanded inline, we fallback to
3922 calling bzero instead of memset. */
3924 return expand_builtin_memset_args (dest, integer_zero_node,
3925 fold_convert_loc (loc,
3926 size_type_node, size),
3927 const0_rtx, VOIDmode, exp);
3930 /* Try to expand cmpstr operation ICODE with the given operands.
3931 Return the result rtx on success, otherwise return null. */
3933 static rtx
3934 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
3935 HOST_WIDE_INT align)
3937 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3939 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3940 target = NULL_RTX;
3942 struct expand_operand ops[4];
3943 create_output_operand (&ops[0], target, insn_mode);
3944 create_fixed_operand (&ops[1], arg1_rtx);
3945 create_fixed_operand (&ops[2], arg2_rtx);
3946 create_integer_operand (&ops[3], align);
3947 if (maybe_expand_insn (icode, 4, ops))
3948 return ops[0].value;
3949 return NULL_RTX;
3952 /* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
3953 ARG3_TYPE is the type of ARG3_RTX. Return the result rtx on success,
3954 otherwise return null. */
3956 static rtx
3957 expand_cmpstrn_or_cmpmem (insn_code icode, rtx target, rtx arg1_rtx,
3958 rtx arg2_rtx, tree arg3_type, rtx arg3_rtx,
3959 HOST_WIDE_INT align)
3961 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3963 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3964 target = NULL_RTX;
3966 struct expand_operand ops[5];
3967 create_output_operand (&ops[0], target, insn_mode);
3968 create_fixed_operand (&ops[1], arg1_rtx);
3969 create_fixed_operand (&ops[2], arg2_rtx);
3970 create_convert_operand_from (&ops[3], arg3_rtx, TYPE_MODE (arg3_type),
3971 TYPE_UNSIGNED (arg3_type));
3972 create_integer_operand (&ops[4], align);
3973 if (maybe_expand_insn (icode, 5, ops))
3974 return ops[0].value;
3975 return NULL_RTX;
3978 /* Expand expression EXP, which is a call to the memcmp built-in function.
3979 Return NULL_RTX if we failed and the caller should emit a normal call,
3980 otherwise try to get the result in TARGET, if convenient. */
3982 static rtx
3983 expand_builtin_memcmp (tree exp, rtx target)
3985 if (!validate_arglist (exp,
3986 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3987 return NULL_RTX;
3989 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3990 implementing memcmp because it will stop if it encounters two
3991 zero bytes. */
3992 insn_code icode = direct_optab_handler (cmpmem_optab, SImode);
3993 if (icode == CODE_FOR_nothing)
3994 return NULL_RTX;
3996 tree arg1 = CALL_EXPR_ARG (exp, 0);
3997 tree arg2 = CALL_EXPR_ARG (exp, 1);
3998 tree len = CALL_EXPR_ARG (exp, 2);
4000 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4001 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4003 /* If we don't have POINTER_TYPE, call the function. */
4004 if (arg1_align == 0 || arg2_align == 0)
4005 return NULL_RTX;
4007 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4008 location_t loc = EXPR_LOCATION (exp);
4009 rtx arg1_rtx = get_memory_rtx (arg1, len);
4010 rtx arg2_rtx = get_memory_rtx (arg2, len);
4011 rtx arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4013 /* Set MEM_SIZE as appropriate. */
4014 if (CONST_INT_P (arg3_rtx))
4016 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
4017 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
4020 rtx result = expand_cmpstrn_or_cmpmem (icode, target, arg1_rtx, arg2_rtx,
4021 TREE_TYPE (len), arg3_rtx,
4022 MIN (arg1_align, arg2_align));
4023 if (result)
4025 /* Return the value in the proper mode for this function. */
4026 if (GET_MODE (result) == mode)
4027 return result;
4029 if (target != 0)
4031 convert_move (target, result, 0);
4032 return target;
4035 return convert_to_mode (mode, result, 0);
4038 result = target;
4039 if (! (result != 0
4040 && REG_P (result) && GET_MODE (result) == mode
4041 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4042 result = gen_reg_rtx (mode);
4044 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4045 TYPE_MODE (integer_type_node), 3,
4046 XEXP (arg1_rtx, 0), Pmode,
4047 XEXP (arg2_rtx, 0), Pmode,
4048 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4049 TYPE_UNSIGNED (sizetype)),
4050 TYPE_MODE (sizetype));
4051 return result;
4054 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4055 if we failed the caller should emit a normal call, otherwise try to get
4056 the result in TARGET, if convenient. */
4058 static rtx
4059 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4061 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4062 return NULL_RTX;
4064 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4065 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4066 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
4068 rtx arg1_rtx, arg2_rtx;
4069 tree fndecl, fn;
4070 tree arg1 = CALL_EXPR_ARG (exp, 0);
4071 tree arg2 = CALL_EXPR_ARG (exp, 1);
4072 rtx result = NULL_RTX;
4074 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4075 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4077 /* If we don't have POINTER_TYPE, call the function. */
4078 if (arg1_align == 0 || arg2_align == 0)
4079 return NULL_RTX;
4081 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4082 arg1 = builtin_save_expr (arg1);
4083 arg2 = builtin_save_expr (arg2);
4085 arg1_rtx = get_memory_rtx (arg1, NULL);
4086 arg2_rtx = get_memory_rtx (arg2, NULL);
4088 /* Try to call cmpstrsi. */
4089 if (cmpstr_icode != CODE_FOR_nothing)
4090 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4091 MIN (arg1_align, arg2_align));
4093 /* Try to determine at least one length and call cmpstrnsi. */
4094 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4096 tree len;
4097 rtx arg3_rtx;
4099 tree len1 = c_strlen (arg1, 1);
4100 tree len2 = c_strlen (arg2, 1);
4102 if (len1)
4103 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4104 if (len2)
4105 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4107 /* If we don't have a constant length for the first, use the length
4108 of the second, if we know it. We don't require a constant for
4109 this case; some cost analysis could be done if both are available
4110 but neither is constant. For now, assume they're equally cheap,
4111 unless one has side effects. If both strings have constant lengths,
4112 use the smaller. */
4114 if (!len1)
4115 len = len2;
4116 else if (!len2)
4117 len = len1;
4118 else if (TREE_SIDE_EFFECTS (len1))
4119 len = len2;
4120 else if (TREE_SIDE_EFFECTS (len2))
4121 len = len1;
4122 else if (TREE_CODE (len1) != INTEGER_CST)
4123 len = len2;
4124 else if (TREE_CODE (len2) != INTEGER_CST)
4125 len = len1;
4126 else if (tree_int_cst_lt (len1, len2))
4127 len = len1;
4128 else
4129 len = len2;
4131 /* If both arguments have side effects, we cannot optimize. */
4132 if (len && !TREE_SIDE_EFFECTS (len))
4134 arg3_rtx = expand_normal (len);
4135 result = expand_cmpstrn_or_cmpmem
4136 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4137 arg3_rtx, MIN (arg1_align, arg2_align));
4141 if (result)
4143 /* Return the value in the proper mode for this function. */
4144 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4145 if (GET_MODE (result) == mode)
4146 return result;
4147 if (target == 0)
4148 return convert_to_mode (mode, result, 0);
4149 convert_move (target, result, 0);
4150 return target;
4153 /* Expand the library call ourselves using a stabilized argument
4154 list to avoid re-evaluating the function's arguments twice. */
4155 fndecl = get_callee_fndecl (exp);
4156 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4157 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4158 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4159 return expand_call (fn, target, target == const0_rtx);
4161 return NULL_RTX;
4164 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4165 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4166 the result in TARGET, if convenient. */
4168 static rtx
4169 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4170 ATTRIBUTE_UNUSED machine_mode mode)
4172 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4174 if (!validate_arglist (exp,
4175 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4176 return NULL_RTX;
4178 /* If c_strlen can determine an expression for one of the string
4179 lengths, and it doesn't have side effects, then emit cmpstrnsi
4180 using length MIN(strlen(string)+1, arg3). */
4181 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4182 if (cmpstrn_icode != CODE_FOR_nothing)
4184 tree len, len1, len2;
4185 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4186 rtx result;
4187 tree fndecl, fn;
4188 tree arg1 = CALL_EXPR_ARG (exp, 0);
4189 tree arg2 = CALL_EXPR_ARG (exp, 1);
4190 tree arg3 = CALL_EXPR_ARG (exp, 2);
4192 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4193 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4195 len1 = c_strlen (arg1, 1);
4196 len2 = c_strlen (arg2, 1);
4198 if (len1)
4199 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4200 if (len2)
4201 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4203 /* If we don't have a constant length for the first, use the length
4204 of the second, if we know it. We don't require a constant for
4205 this case; some cost analysis could be done if both are available
4206 but neither is constant. For now, assume they're equally cheap,
4207 unless one has side effects. If both strings have constant lengths,
4208 use the smaller. */
4210 if (!len1)
4211 len = len2;
4212 else if (!len2)
4213 len = len1;
4214 else if (TREE_SIDE_EFFECTS (len1))
4215 len = len2;
4216 else if (TREE_SIDE_EFFECTS (len2))
4217 len = len1;
4218 else if (TREE_CODE (len1) != INTEGER_CST)
4219 len = len2;
4220 else if (TREE_CODE (len2) != INTEGER_CST)
4221 len = len1;
4222 else if (tree_int_cst_lt (len1, len2))
4223 len = len1;
4224 else
4225 len = len2;
4227 /* If both arguments have side effects, we cannot optimize. */
4228 if (!len || TREE_SIDE_EFFECTS (len))
4229 return NULL_RTX;
4231 /* The actual new length parameter is MIN(len,arg3). */
4232 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4233 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4235 /* If we don't have POINTER_TYPE, call the function. */
4236 if (arg1_align == 0 || arg2_align == 0)
4237 return NULL_RTX;
4239 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4240 arg1 = builtin_save_expr (arg1);
4241 arg2 = builtin_save_expr (arg2);
4242 len = builtin_save_expr (len);
4244 arg1_rtx = get_memory_rtx (arg1, len);
4245 arg2_rtx = get_memory_rtx (arg2, len);
4246 arg3_rtx = expand_normal (len);
4247 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4248 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4249 MIN (arg1_align, arg2_align));
4250 if (result)
4252 /* Return the value in the proper mode for this function. */
4253 mode = TYPE_MODE (TREE_TYPE (exp));
4254 if (GET_MODE (result) == mode)
4255 return result;
4256 if (target == 0)
4257 return convert_to_mode (mode, result, 0);
4258 convert_move (target, result, 0);
4259 return target;
4262 /* Expand the library call ourselves using a stabilized argument
4263 list to avoid re-evaluating the function's arguments twice. */
4264 fndecl = get_callee_fndecl (exp);
4265 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4266 arg1, arg2, len);
4267 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4268 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4269 return expand_call (fn, target, target == const0_rtx);
4271 return NULL_RTX;
4274 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4275 if that's convenient. */
4278 expand_builtin_saveregs (void)
4280 rtx val;
4281 rtx_insn *seq;
4283 /* Don't do __builtin_saveregs more than once in a function.
4284 Save the result of the first call and reuse it. */
4285 if (saveregs_value != 0)
4286 return saveregs_value;
4288 /* When this function is called, it means that registers must be
4289 saved on entry to this function. So we migrate the call to the
4290 first insn of this function. */
4292 start_sequence ();
4294 /* Do whatever the machine needs done in this case. */
4295 val = targetm.calls.expand_builtin_saveregs ();
4297 seq = get_insns ();
4298 end_sequence ();
4300 saveregs_value = val;
4302 /* Put the insns after the NOTE that starts the function. If this
4303 is inside a start_sequence, make the outer-level insn chain current, so
4304 the code is placed at the start of the function. */
4305 push_topmost_sequence ();
4306 emit_insn_after (seq, entry_of_function ());
4307 pop_topmost_sequence ();
4309 return val;
4312 /* Expand a call to __builtin_next_arg. */
4314 static rtx
4315 expand_builtin_next_arg (void)
4317 /* Checking arguments is already done in fold_builtin_next_arg
4318 that must be called before this function. */
4319 return expand_binop (ptr_mode, add_optab,
4320 crtl->args.internal_arg_pointer,
4321 crtl->args.arg_offset_rtx,
4322 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4325 /* Make it easier for the backends by protecting the valist argument
4326 from multiple evaluations. */
4328 static tree
4329 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4331 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4333 /* The current way of determining the type of valist is completely
4334 bogus. We should have the information on the va builtin instead. */
4335 if (!vatype)
4336 vatype = targetm.fn_abi_va_list (cfun->decl);
4338 if (TREE_CODE (vatype) == ARRAY_TYPE)
4340 if (TREE_SIDE_EFFECTS (valist))
4341 valist = save_expr (valist);
4343 /* For this case, the backends will be expecting a pointer to
4344 vatype, but it's possible we've actually been given an array
4345 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4346 So fix it. */
4347 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4349 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4350 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4353 else
4355 tree pt = build_pointer_type (vatype);
4357 if (! needs_lvalue)
4359 if (! TREE_SIDE_EFFECTS (valist))
4360 return valist;
4362 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4363 TREE_SIDE_EFFECTS (valist) = 1;
4366 if (TREE_SIDE_EFFECTS (valist))
4367 valist = save_expr (valist);
4368 valist = fold_build2_loc (loc, MEM_REF,
4369 vatype, valist, build_int_cst (pt, 0));
4372 return valist;
4375 /* The "standard" definition of va_list is void*. */
4377 tree
4378 std_build_builtin_va_list (void)
4380 return ptr_type_node;
4383 /* The "standard" abi va_list is va_list_type_node. */
4385 tree
4386 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4388 return va_list_type_node;
4391 /* The "standard" type of va_list is va_list_type_node. */
4393 tree
4394 std_canonical_va_list_type (tree type)
4396 tree wtype, htype;
4398 if (INDIRECT_REF_P (type))
4399 type = TREE_TYPE (type);
4400 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4401 type = TREE_TYPE (type);
4402 wtype = va_list_type_node;
4403 htype = type;
4404 /* Treat structure va_list types. */
4405 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4406 htype = TREE_TYPE (htype);
4407 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4409 /* If va_list is an array type, the argument may have decayed
4410 to a pointer type, e.g. by being passed to another function.
4411 In that case, unwrap both types so that we can compare the
4412 underlying records. */
4413 if (TREE_CODE (htype) == ARRAY_TYPE
4414 || POINTER_TYPE_P (htype))
4416 wtype = TREE_TYPE (wtype);
4417 htype = TREE_TYPE (htype);
4420 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4421 return va_list_type_node;
4423 return NULL_TREE;
4426 /* The "standard" implementation of va_start: just assign `nextarg' to
4427 the variable. */
4429 void
4430 std_expand_builtin_va_start (tree valist, rtx nextarg)
4432 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4433 convert_move (va_r, nextarg, 0);
4435 /* We do not have any valid bounds for the pointer, so
4436 just store zero bounds for it. */
4437 if (chkp_function_instrumented_p (current_function_decl))
4438 chkp_expand_bounds_reset_for_mem (valist,
4439 make_tree (TREE_TYPE (valist),
4440 nextarg));
4443 /* Expand EXP, a call to __builtin_va_start. */
4445 static rtx
4446 expand_builtin_va_start (tree exp)
4448 rtx nextarg;
4449 tree valist;
4450 location_t loc = EXPR_LOCATION (exp);
4452 if (call_expr_nargs (exp) < 2)
4454 error_at (loc, "too few arguments to function %<va_start%>");
4455 return const0_rtx;
4458 if (fold_builtin_next_arg (exp, true))
4459 return const0_rtx;
4461 nextarg = expand_builtin_next_arg ();
4462 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4464 if (targetm.expand_builtin_va_start)
4465 targetm.expand_builtin_va_start (valist, nextarg);
4466 else
4467 std_expand_builtin_va_start (valist, nextarg);
4469 return const0_rtx;
4472 /* Expand EXP, a call to __builtin_va_end. */
4474 static rtx
4475 expand_builtin_va_end (tree exp)
4477 tree valist = CALL_EXPR_ARG (exp, 0);
4479 /* Evaluate for side effects, if needed. I hate macros that don't
4480 do that. */
4481 if (TREE_SIDE_EFFECTS (valist))
4482 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4484 return const0_rtx;
4487 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4488 builtin rather than just as an assignment in stdarg.h because of the
4489 nastiness of array-type va_list types. */
4491 static rtx
4492 expand_builtin_va_copy (tree exp)
4494 tree dst, src, t;
4495 location_t loc = EXPR_LOCATION (exp);
4497 dst = CALL_EXPR_ARG (exp, 0);
4498 src = CALL_EXPR_ARG (exp, 1);
4500 dst = stabilize_va_list_loc (loc, dst, 1);
4501 src = stabilize_va_list_loc (loc, src, 0);
4503 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4505 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4507 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4508 TREE_SIDE_EFFECTS (t) = 1;
4509 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4511 else
4513 rtx dstb, srcb, size;
4515 /* Evaluate to pointers. */
4516 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4517 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4518 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4519 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4521 dstb = convert_memory_address (Pmode, dstb);
4522 srcb = convert_memory_address (Pmode, srcb);
4524 /* "Dereference" to BLKmode memories. */
4525 dstb = gen_rtx_MEM (BLKmode, dstb);
4526 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4527 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4528 srcb = gen_rtx_MEM (BLKmode, srcb);
4529 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4530 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4532 /* Copy. */
4533 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4536 return const0_rtx;
4539 /* Expand a call to one of the builtin functions __builtin_frame_address or
4540 __builtin_return_address. */
4542 static rtx
4543 expand_builtin_frame_address (tree fndecl, tree exp)
4545 /* The argument must be a nonnegative integer constant.
4546 It counts the number of frames to scan up the stack.
4547 The value is either the frame pointer value or the return
4548 address saved in that frame. */
4549 if (call_expr_nargs (exp) == 0)
4550 /* Warning about missing arg was already issued. */
4551 return const0_rtx;
4552 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4554 error ("invalid argument to %qD", fndecl);
4555 return const0_rtx;
4557 else
4559 /* Number of frames to scan up the stack. */
4560 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4562 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4564 /* Some ports cannot access arbitrary stack frames. */
4565 if (tem == NULL)
4567 warning (0, "unsupported argument to %qD", fndecl);
4568 return const0_rtx;
4571 if (count)
4573 /* Warn since no effort is made to ensure that any frame
4574 beyond the current one exists or can be safely reached. */
4575 warning (OPT_Wframe_address, "calling %qD with "
4576 "a nonzero argument is unsafe", fndecl);
4579 /* For __builtin_frame_address, return what we've got. */
4580 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4581 return tem;
4583 if (!REG_P (tem)
4584 && ! CONSTANT_P (tem))
4585 tem = copy_addr_to_reg (tem);
4586 return tem;
4590 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4591 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4592 is the same as for allocate_dynamic_stack_space. */
4594 static rtx
4595 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4597 rtx op0;
4598 rtx result;
4599 bool valid_arglist;
4600 unsigned int align;
4601 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4602 == BUILT_IN_ALLOCA_WITH_ALIGN);
4604 valid_arglist
4605 = (alloca_with_align
4606 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4607 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4609 if (!valid_arglist)
4610 return NULL_RTX;
4612 /* Compute the argument. */
4613 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4615 /* Compute the alignment. */
4616 align = (alloca_with_align
4617 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4618 : BIGGEST_ALIGNMENT);
4620 /* Allocate the desired space. */
4621 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4622 result = convert_memory_address (ptr_mode, result);
4624 return result;
4627 /* Expand a call to bswap builtin in EXP.
4628 Return NULL_RTX if a normal call should be emitted rather than expanding the
4629 function in-line. If convenient, the result should be placed in TARGET.
4630 SUBTARGET may be used as the target for computing one of EXP's operands. */
4632 static rtx
4633 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4634 rtx subtarget)
4636 tree arg;
4637 rtx op0;
4639 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4640 return NULL_RTX;
4642 arg = CALL_EXPR_ARG (exp, 0);
4643 op0 = expand_expr (arg,
4644 subtarget && GET_MODE (subtarget) == target_mode
4645 ? subtarget : NULL_RTX,
4646 target_mode, EXPAND_NORMAL);
4647 if (GET_MODE (op0) != target_mode)
4648 op0 = convert_to_mode (target_mode, op0, 1);
4650 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4652 gcc_assert (target);
4654 return convert_to_mode (target_mode, target, 1);
4657 /* Expand a call to a unary builtin in EXP.
4658 Return NULL_RTX if a normal call should be emitted rather than expanding the
4659 function in-line. If convenient, the result should be placed in TARGET.
4660 SUBTARGET may be used as the target for computing one of EXP's operands. */
4662 static rtx
4663 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4664 rtx subtarget, optab op_optab)
4666 rtx op0;
4668 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4669 return NULL_RTX;
4671 /* Compute the argument. */
4672 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4673 (subtarget
4674 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4675 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4676 VOIDmode, EXPAND_NORMAL);
4677 /* Compute op, into TARGET if possible.
4678 Set TARGET to wherever the result comes back. */
4679 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4680 op_optab, op0, target, op_optab != clrsb_optab);
4681 gcc_assert (target);
4683 return convert_to_mode (target_mode, target, 0);
4686 /* Expand a call to __builtin_expect. We just return our argument
4687 as the builtin_expect semantic should've been already executed by
4688 tree branch prediction pass. */
4690 static rtx
4691 expand_builtin_expect (tree exp, rtx target)
4693 tree arg;
4695 if (call_expr_nargs (exp) < 2)
4696 return const0_rtx;
4697 arg = CALL_EXPR_ARG (exp, 0);
4699 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4700 /* When guessing was done, the hints should be already stripped away. */
4701 gcc_assert (!flag_guess_branch_prob
4702 || optimize == 0 || seen_error ());
4703 return target;
4706 /* Expand a call to __builtin_assume_aligned. We just return our first
4707 argument as the builtin_assume_aligned semantic should've been already
4708 executed by CCP. */
4710 static rtx
4711 expand_builtin_assume_aligned (tree exp, rtx target)
4713 if (call_expr_nargs (exp) < 2)
4714 return const0_rtx;
4715 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4716 EXPAND_NORMAL);
4717 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4718 && (call_expr_nargs (exp) < 3
4719 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4720 return target;
4723 void
4724 expand_builtin_trap (void)
4726 if (targetm.have_trap ())
4728 rtx_insn *insn = emit_insn (targetm.gen_trap ());
4729 /* For trap insns when not accumulating outgoing args force
4730 REG_ARGS_SIZE note to prevent crossjumping of calls with
4731 different args sizes. */
4732 if (!ACCUMULATE_OUTGOING_ARGS)
4733 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4735 else
4736 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4737 emit_barrier ();
4740 /* Expand a call to __builtin_unreachable. We do nothing except emit
4741 a barrier saying that control flow will not pass here.
4743 It is the responsibility of the program being compiled to ensure
4744 that control flow does never reach __builtin_unreachable. */
4745 static void
4746 expand_builtin_unreachable (void)
4748 emit_barrier ();
4751 /* Expand EXP, a call to fabs, fabsf or fabsl.
4752 Return NULL_RTX if a normal call should be emitted rather than expanding
4753 the function inline. If convenient, the result should be placed
4754 in TARGET. SUBTARGET may be used as the target for computing
4755 the operand. */
4757 static rtx
4758 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4760 machine_mode mode;
4761 tree arg;
4762 rtx op0;
4764 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4765 return NULL_RTX;
4767 arg = CALL_EXPR_ARG (exp, 0);
4768 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4769 mode = TYPE_MODE (TREE_TYPE (arg));
4770 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4771 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4774 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4775 Return NULL is a normal call should be emitted rather than expanding the
4776 function inline. If convenient, the result should be placed in TARGET.
4777 SUBTARGET may be used as the target for computing the operand. */
4779 static rtx
4780 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4782 rtx op0, op1;
4783 tree arg;
4785 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4786 return NULL_RTX;
4788 arg = CALL_EXPR_ARG (exp, 0);
4789 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4791 arg = CALL_EXPR_ARG (exp, 1);
4792 op1 = expand_normal (arg);
4794 return expand_copysign (op0, op1, target);
4797 /* Expand a call to __builtin___clear_cache. */
4799 static rtx
4800 expand_builtin___clear_cache (tree exp)
4802 if (!targetm.code_for_clear_cache)
4804 #ifdef CLEAR_INSN_CACHE
4805 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4806 does something. Just do the default expansion to a call to
4807 __clear_cache(). */
4808 return NULL_RTX;
4809 #else
4810 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4811 does nothing. There is no need to call it. Do nothing. */
4812 return const0_rtx;
4813 #endif /* CLEAR_INSN_CACHE */
4816 /* We have a "clear_cache" insn, and it will handle everything. */
4817 tree begin, end;
4818 rtx begin_rtx, end_rtx;
4820 /* We must not expand to a library call. If we did, any
4821 fallback library function in libgcc that might contain a call to
4822 __builtin___clear_cache() would recurse infinitely. */
4823 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4825 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4826 return const0_rtx;
4829 if (targetm.have_clear_cache ())
4831 struct expand_operand ops[2];
4833 begin = CALL_EXPR_ARG (exp, 0);
4834 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4836 end = CALL_EXPR_ARG (exp, 1);
4837 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4839 create_address_operand (&ops[0], begin_rtx);
4840 create_address_operand (&ops[1], end_rtx);
4841 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
4842 return const0_rtx;
4844 return const0_rtx;
4847 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4849 static rtx
4850 round_trampoline_addr (rtx tramp)
4852 rtx temp, addend, mask;
4854 /* If we don't need too much alignment, we'll have been guaranteed
4855 proper alignment by get_trampoline_type. */
4856 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4857 return tramp;
4859 /* Round address up to desired boundary. */
4860 temp = gen_reg_rtx (Pmode);
4861 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4862 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4864 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4865 temp, 0, OPTAB_LIB_WIDEN);
4866 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4867 temp, 0, OPTAB_LIB_WIDEN);
4869 return tramp;
4872 static rtx
4873 expand_builtin_init_trampoline (tree exp, bool onstack)
4875 tree t_tramp, t_func, t_chain;
4876 rtx m_tramp, r_tramp, r_chain, tmp;
4878 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4879 POINTER_TYPE, VOID_TYPE))
4880 return NULL_RTX;
4882 t_tramp = CALL_EXPR_ARG (exp, 0);
4883 t_func = CALL_EXPR_ARG (exp, 1);
4884 t_chain = CALL_EXPR_ARG (exp, 2);
4886 r_tramp = expand_normal (t_tramp);
4887 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4888 MEM_NOTRAP_P (m_tramp) = 1;
4890 /* If ONSTACK, the TRAMP argument should be the address of a field
4891 within the local function's FRAME decl. Either way, let's see if
4892 we can fill in the MEM_ATTRs for this memory. */
4893 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4894 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4896 /* Creator of a heap trampoline is responsible for making sure the
4897 address is aligned to at least STACK_BOUNDARY. Normally malloc
4898 will ensure this anyhow. */
4899 tmp = round_trampoline_addr (r_tramp);
4900 if (tmp != r_tramp)
4902 m_tramp = change_address (m_tramp, BLKmode, tmp);
4903 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4904 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4907 /* The FUNC argument should be the address of the nested function.
4908 Extract the actual function decl to pass to the hook. */
4909 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4910 t_func = TREE_OPERAND (t_func, 0);
4911 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4913 r_chain = expand_normal (t_chain);
4915 /* Generate insns to initialize the trampoline. */
4916 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4918 if (onstack)
4920 trampolines_created = 1;
4922 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4923 "trampoline generated for nested function %qD", t_func);
4926 return const0_rtx;
4929 static rtx
4930 expand_builtin_adjust_trampoline (tree exp)
4932 rtx tramp;
4934 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4935 return NULL_RTX;
4937 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4938 tramp = round_trampoline_addr (tramp);
4939 if (targetm.calls.trampoline_adjust_address)
4940 tramp = targetm.calls.trampoline_adjust_address (tramp);
4942 return tramp;
4945 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4946 function. The function first checks whether the back end provides
4947 an insn to implement signbit for the respective mode. If not, it
4948 checks whether the floating point format of the value is such that
4949 the sign bit can be extracted. If that is not the case, error out.
4950 EXP is the expression that is a call to the builtin function; if
4951 convenient, the result should be placed in TARGET. */
4952 static rtx
4953 expand_builtin_signbit (tree exp, rtx target)
4955 const struct real_format *fmt;
4956 machine_mode fmode, imode, rmode;
4957 tree arg;
4958 int word, bitpos;
4959 enum insn_code icode;
4960 rtx temp;
4961 location_t loc = EXPR_LOCATION (exp);
4963 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4964 return NULL_RTX;
4966 arg = CALL_EXPR_ARG (exp, 0);
4967 fmode = TYPE_MODE (TREE_TYPE (arg));
4968 rmode = TYPE_MODE (TREE_TYPE (exp));
4969 fmt = REAL_MODE_FORMAT (fmode);
4971 arg = builtin_save_expr (arg);
4973 /* Expand the argument yielding a RTX expression. */
4974 temp = expand_normal (arg);
4976 /* Check if the back end provides an insn that handles signbit for the
4977 argument's mode. */
4978 icode = optab_handler (signbit_optab, fmode);
4979 if (icode != CODE_FOR_nothing)
4981 rtx_insn *last = get_last_insn ();
4982 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4983 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4984 return target;
4985 delete_insns_since (last);
4988 /* For floating point formats without a sign bit, implement signbit
4989 as "ARG < 0.0". */
4990 bitpos = fmt->signbit_ro;
4991 if (bitpos < 0)
4993 /* But we can't do this if the format supports signed zero. */
4994 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
4996 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4997 build_real (TREE_TYPE (arg), dconst0));
4998 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5001 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5003 imode = int_mode_for_mode (fmode);
5004 gcc_assert (imode != BLKmode);
5005 temp = gen_lowpart (imode, temp);
5007 else
5009 imode = word_mode;
5010 /* Handle targets with different FP word orders. */
5011 if (FLOAT_WORDS_BIG_ENDIAN)
5012 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5013 else
5014 word = bitpos / BITS_PER_WORD;
5015 temp = operand_subword_force (temp, word, fmode);
5016 bitpos = bitpos % BITS_PER_WORD;
5019 /* Force the intermediate word_mode (or narrower) result into a
5020 register. This avoids attempting to create paradoxical SUBREGs
5021 of floating point modes below. */
5022 temp = force_reg (imode, temp);
5024 /* If the bitpos is within the "result mode" lowpart, the operation
5025 can be implement with a single bitwise AND. Otherwise, we need
5026 a right shift and an AND. */
5028 if (bitpos < GET_MODE_BITSIZE (rmode))
5030 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5032 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5033 temp = gen_lowpart (rmode, temp);
5034 temp = expand_binop (rmode, and_optab, temp,
5035 immed_wide_int_const (mask, rmode),
5036 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5038 else
5040 /* Perform a logical right shift to place the signbit in the least
5041 significant bit, then truncate the result to the desired mode
5042 and mask just this bit. */
5043 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5044 temp = gen_lowpart (rmode, temp);
5045 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5046 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5049 return temp;
5052 /* Expand fork or exec calls. TARGET is the desired target of the
5053 call. EXP is the call. FN is the
5054 identificator of the actual function. IGNORE is nonzero if the
5055 value is to be ignored. */
5057 static rtx
5058 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5060 tree id, decl;
5061 tree call;
5063 /* If we are not profiling, just call the function. */
5064 if (!profile_arc_flag)
5065 return NULL_RTX;
5067 /* Otherwise call the wrapper. This should be equivalent for the rest of
5068 compiler, so the code does not diverge, and the wrapper may run the
5069 code necessary for keeping the profiling sane. */
5071 switch (DECL_FUNCTION_CODE (fn))
5073 case BUILT_IN_FORK:
5074 id = get_identifier ("__gcov_fork");
5075 break;
5077 case BUILT_IN_EXECL:
5078 id = get_identifier ("__gcov_execl");
5079 break;
5081 case BUILT_IN_EXECV:
5082 id = get_identifier ("__gcov_execv");
5083 break;
5085 case BUILT_IN_EXECLP:
5086 id = get_identifier ("__gcov_execlp");
5087 break;
5089 case BUILT_IN_EXECLE:
5090 id = get_identifier ("__gcov_execle");
5091 break;
5093 case BUILT_IN_EXECVP:
5094 id = get_identifier ("__gcov_execvp");
5095 break;
5097 case BUILT_IN_EXECVE:
5098 id = get_identifier ("__gcov_execve");
5099 break;
5101 default:
5102 gcc_unreachable ();
5105 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5106 FUNCTION_DECL, id, TREE_TYPE (fn));
5107 DECL_EXTERNAL (decl) = 1;
5108 TREE_PUBLIC (decl) = 1;
5109 DECL_ARTIFICIAL (decl) = 1;
5110 TREE_NOTHROW (decl) = 1;
5111 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5112 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5113 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5114 return expand_call (call, target, ignore);
5119 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5120 the pointer in these functions is void*, the tree optimizers may remove
5121 casts. The mode computed in expand_builtin isn't reliable either, due
5122 to __sync_bool_compare_and_swap.
5124 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5125 group of builtins. This gives us log2 of the mode size. */
5127 static inline machine_mode
5128 get_builtin_sync_mode (int fcode_diff)
5130 /* The size is not negotiable, so ask not to get BLKmode in return
5131 if the target indicates that a smaller size would be better. */
5132 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5135 /* Expand the memory expression LOC and return the appropriate memory operand
5136 for the builtin_sync operations. */
5138 static rtx
5139 get_builtin_sync_mem (tree loc, machine_mode mode)
5141 rtx addr, mem;
5143 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5144 addr = convert_memory_address (Pmode, addr);
5146 /* Note that we explicitly do not want any alias information for this
5147 memory, so that we kill all other live memories. Otherwise we don't
5148 satisfy the full barrier semantics of the intrinsic. */
5149 mem = validize_mem (gen_rtx_MEM (mode, addr));
5151 /* The alignment needs to be at least according to that of the mode. */
5152 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5153 get_pointer_alignment (loc)));
5154 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5155 MEM_VOLATILE_P (mem) = 1;
5157 return mem;
5160 /* Make sure an argument is in the right mode.
5161 EXP is the tree argument.
5162 MODE is the mode it should be in. */
5164 static rtx
5165 expand_expr_force_mode (tree exp, machine_mode mode)
5167 rtx val;
5168 machine_mode old_mode;
5170 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5171 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5172 of CONST_INTs, where we know the old_mode only from the call argument. */
5174 old_mode = GET_MODE (val);
5175 if (old_mode == VOIDmode)
5176 old_mode = TYPE_MODE (TREE_TYPE (exp));
5177 val = convert_modes (mode, old_mode, val, 1);
5178 return val;
5182 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5183 EXP is the CALL_EXPR. CODE is the rtx code
5184 that corresponds to the arithmetic or logical operation from the name;
5185 an exception here is that NOT actually means NAND. TARGET is an optional
5186 place for us to store the results; AFTER is true if this is the
5187 fetch_and_xxx form. */
5189 static rtx
5190 expand_builtin_sync_operation (machine_mode mode, tree exp,
5191 enum rtx_code code, bool after,
5192 rtx target)
5194 rtx val, mem;
5195 location_t loc = EXPR_LOCATION (exp);
5197 if (code == NOT && warn_sync_nand)
5199 tree fndecl = get_callee_fndecl (exp);
5200 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5202 static bool warned_f_a_n, warned_n_a_f;
5204 switch (fcode)
5206 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5207 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5208 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5209 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5210 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5211 if (warned_f_a_n)
5212 break;
5214 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5215 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5216 warned_f_a_n = true;
5217 break;
5219 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5220 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5221 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5222 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5223 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5224 if (warned_n_a_f)
5225 break;
5227 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5228 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5229 warned_n_a_f = true;
5230 break;
5232 default:
5233 gcc_unreachable ();
5237 /* Expand the operands. */
5238 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5239 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5241 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5242 after);
5245 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5246 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5247 true if this is the boolean form. TARGET is a place for us to store the
5248 results; this is NOT optional if IS_BOOL is true. */
5250 static rtx
5251 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5252 bool is_bool, rtx target)
5254 rtx old_val, new_val, mem;
5255 rtx *pbool, *poval;
5257 /* Expand the operands. */
5258 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5259 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5260 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5262 pbool = poval = NULL;
5263 if (target != const0_rtx)
5265 if (is_bool)
5266 pbool = &target;
5267 else
5268 poval = &target;
5270 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5271 false, MEMMODEL_SYNC_SEQ_CST,
5272 MEMMODEL_SYNC_SEQ_CST))
5273 return NULL_RTX;
5275 return target;
5278 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5279 general form is actually an atomic exchange, and some targets only
5280 support a reduced form with the second argument being a constant 1.
5281 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5282 the results. */
5284 static rtx
5285 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5286 rtx target)
5288 rtx val, mem;
5290 /* Expand the operands. */
5291 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5292 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5294 return expand_sync_lock_test_and_set (target, mem, val);
5297 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5299 static void
5300 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5302 rtx mem;
5304 /* Expand the operands. */
5305 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5307 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5310 /* Given an integer representing an ``enum memmodel'', verify its
5311 correctness and return the memory model enum. */
5313 static enum memmodel
5314 get_memmodel (tree exp)
5316 rtx op;
5317 unsigned HOST_WIDE_INT val;
5319 /* If the parameter is not a constant, it's a run time value so we'll just
5320 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5321 if (TREE_CODE (exp) != INTEGER_CST)
5322 return MEMMODEL_SEQ_CST;
5324 op = expand_normal (exp);
5326 val = INTVAL (op);
5327 if (targetm.memmodel_check)
5328 val = targetm.memmodel_check (val);
5329 else if (val & ~MEMMODEL_MASK)
5331 warning (OPT_Winvalid_memory_model,
5332 "Unknown architecture specifier in memory model to builtin.");
5333 return MEMMODEL_SEQ_CST;
5336 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5337 if (memmodel_base (val) >= MEMMODEL_LAST)
5339 warning (OPT_Winvalid_memory_model,
5340 "invalid memory model argument to builtin");
5341 return MEMMODEL_SEQ_CST;
5344 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5345 be conservative and promote consume to acquire. */
5346 if (val == MEMMODEL_CONSUME)
5347 val = MEMMODEL_ACQUIRE;
5349 return (enum memmodel) val;
5352 /* Expand the __atomic_exchange intrinsic:
5353 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5354 EXP is the CALL_EXPR.
5355 TARGET is an optional place for us to store the results. */
5357 static rtx
5358 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5360 rtx val, mem;
5361 enum memmodel model;
5363 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5365 if (!flag_inline_atomics)
5366 return NULL_RTX;
5368 /* Expand the operands. */
5369 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5370 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5372 return expand_atomic_exchange (target, mem, val, model);
5375 /* Expand the __atomic_compare_exchange intrinsic:
5376 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5377 TYPE desired, BOOL weak,
5378 enum memmodel success,
5379 enum memmodel failure)
5380 EXP is the CALL_EXPR.
5381 TARGET is an optional place for us to store the results. */
5383 static rtx
5384 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5385 rtx target)
5387 rtx expect, desired, mem, oldval;
5388 rtx_code_label *label;
5389 enum memmodel success, failure;
5390 tree weak;
5391 bool is_weak;
5393 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5394 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5396 if (failure > success)
5398 warning (OPT_Winvalid_memory_model,
5399 "failure memory model cannot be stronger than success memory "
5400 "model for %<__atomic_compare_exchange%>");
5401 success = MEMMODEL_SEQ_CST;
5404 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5406 warning (OPT_Winvalid_memory_model,
5407 "invalid failure memory model for "
5408 "%<__atomic_compare_exchange%>");
5409 failure = MEMMODEL_SEQ_CST;
5410 success = MEMMODEL_SEQ_CST;
5414 if (!flag_inline_atomics)
5415 return NULL_RTX;
5417 /* Expand the operands. */
5418 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5420 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5421 expect = convert_memory_address (Pmode, expect);
5422 expect = gen_rtx_MEM (mode, expect);
5423 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5425 weak = CALL_EXPR_ARG (exp, 3);
5426 is_weak = false;
5427 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5428 is_weak = true;
5430 if (target == const0_rtx)
5431 target = NULL;
5433 /* Lest the rtl backend create a race condition with an imporoper store
5434 to memory, always create a new pseudo for OLDVAL. */
5435 oldval = NULL;
5437 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5438 is_weak, success, failure))
5439 return NULL_RTX;
5441 /* Conditionally store back to EXPECT, lest we create a race condition
5442 with an improper store to memory. */
5443 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5444 the normal case where EXPECT is totally private, i.e. a register. At
5445 which point the store can be unconditional. */
5446 label = gen_label_rtx ();
5447 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5448 GET_MODE (target), 1, label);
5449 emit_move_insn (expect, oldval);
5450 emit_label (label);
5452 return target;
5455 /* Expand the __atomic_load intrinsic:
5456 TYPE __atomic_load (TYPE *object, enum memmodel)
5457 EXP is the CALL_EXPR.
5458 TARGET is an optional place for us to store the results. */
5460 static rtx
5461 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5463 rtx mem;
5464 enum memmodel model;
5466 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5467 if (is_mm_release (model) || is_mm_acq_rel (model))
5469 warning (OPT_Winvalid_memory_model,
5470 "invalid memory model for %<__atomic_load%>");
5471 model = MEMMODEL_SEQ_CST;
5474 if (!flag_inline_atomics)
5475 return NULL_RTX;
5477 /* Expand the operand. */
5478 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5480 return expand_atomic_load (target, mem, model);
5484 /* Expand the __atomic_store intrinsic:
5485 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5486 EXP is the CALL_EXPR.
5487 TARGET is an optional place for us to store the results. */
5489 static rtx
5490 expand_builtin_atomic_store (machine_mode mode, tree exp)
5492 rtx mem, val;
5493 enum memmodel model;
5495 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5496 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5497 || is_mm_release (model)))
5499 warning (OPT_Winvalid_memory_model,
5500 "invalid memory model for %<__atomic_store%>");
5501 model = MEMMODEL_SEQ_CST;
5504 if (!flag_inline_atomics)
5505 return NULL_RTX;
5507 /* Expand the operands. */
5508 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5509 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5511 return expand_atomic_store (mem, val, model, false);
5514 /* Expand the __atomic_fetch_XXX intrinsic:
5515 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5516 EXP is the CALL_EXPR.
5517 TARGET is an optional place for us to store the results.
5518 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5519 FETCH_AFTER is true if returning the result of the operation.
5520 FETCH_AFTER is false if returning the value before the operation.
5521 IGNORE is true if the result is not used.
5522 EXT_CALL is the correct builtin for an external call if this cannot be
5523 resolved to an instruction sequence. */
5525 static rtx
5526 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5527 enum rtx_code code, bool fetch_after,
5528 bool ignore, enum built_in_function ext_call)
5530 rtx val, mem, ret;
5531 enum memmodel model;
5532 tree fndecl;
5533 tree addr;
5535 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5537 /* Expand the operands. */
5538 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5539 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5541 /* Only try generating instructions if inlining is turned on. */
5542 if (flag_inline_atomics)
5544 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5545 if (ret)
5546 return ret;
5549 /* Return if a different routine isn't needed for the library call. */
5550 if (ext_call == BUILT_IN_NONE)
5551 return NULL_RTX;
5553 /* Change the call to the specified function. */
5554 fndecl = get_callee_fndecl (exp);
5555 addr = CALL_EXPR_FN (exp);
5556 STRIP_NOPS (addr);
5558 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5559 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5561 /* Expand the call here so we can emit trailing code. */
5562 ret = expand_call (exp, target, ignore);
5564 /* Replace the original function just in case it matters. */
5565 TREE_OPERAND (addr, 0) = fndecl;
5567 /* Then issue the arithmetic correction to return the right result. */
5568 if (!ignore)
5570 if (code == NOT)
5572 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5573 OPTAB_LIB_WIDEN);
5574 ret = expand_simple_unop (mode, NOT, ret, target, true);
5576 else
5577 ret = expand_simple_binop (mode, code, ret, val, target, true,
5578 OPTAB_LIB_WIDEN);
5580 return ret;
5583 /* Expand an atomic clear operation.
5584 void _atomic_clear (BOOL *obj, enum memmodel)
5585 EXP is the call expression. */
5587 static rtx
5588 expand_builtin_atomic_clear (tree exp)
5590 machine_mode mode;
5591 rtx mem, ret;
5592 enum memmodel model;
5594 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5595 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5596 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5598 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
5600 warning (OPT_Winvalid_memory_model,
5601 "invalid memory model for %<__atomic_store%>");
5602 model = MEMMODEL_SEQ_CST;
5605 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5606 Failing that, a store is issued by __atomic_store. The only way this can
5607 fail is if the bool type is larger than a word size. Unlikely, but
5608 handle it anyway for completeness. Assume a single threaded model since
5609 there is no atomic support in this case, and no barriers are required. */
5610 ret = expand_atomic_store (mem, const0_rtx, model, true);
5611 if (!ret)
5612 emit_move_insn (mem, const0_rtx);
5613 return const0_rtx;
5616 /* Expand an atomic test_and_set operation.
5617 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5618 EXP is the call expression. */
5620 static rtx
5621 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5623 rtx mem;
5624 enum memmodel model;
5625 machine_mode mode;
5627 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5628 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5629 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5631 return expand_atomic_test_and_set (target, mem, model);
5635 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5636 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5638 static tree
5639 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5641 int size;
5642 machine_mode mode;
5643 unsigned int mode_align, type_align;
5645 if (TREE_CODE (arg0) != INTEGER_CST)
5646 return NULL_TREE;
5648 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5649 mode = mode_for_size (size, MODE_INT, 0);
5650 mode_align = GET_MODE_ALIGNMENT (mode);
5652 if (TREE_CODE (arg1) == INTEGER_CST)
5654 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
5656 /* Either this argument is null, or it's a fake pointer encoding
5657 the alignment of the object. */
5658 val = val & -val;
5659 val *= BITS_PER_UNIT;
5661 if (val == 0 || mode_align < val)
5662 type_align = mode_align;
5663 else
5664 type_align = val;
5666 else
5668 tree ttype = TREE_TYPE (arg1);
5670 /* This function is usually invoked and folded immediately by the front
5671 end before anything else has a chance to look at it. The pointer
5672 parameter at this point is usually cast to a void *, so check for that
5673 and look past the cast. */
5674 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
5675 && VOID_TYPE_P (TREE_TYPE (ttype)))
5676 arg1 = TREE_OPERAND (arg1, 0);
5678 ttype = TREE_TYPE (arg1);
5679 gcc_assert (POINTER_TYPE_P (ttype));
5681 /* Get the underlying type of the object. */
5682 ttype = TREE_TYPE (ttype);
5683 type_align = TYPE_ALIGN (ttype);
5686 /* If the object has smaller alignment, the lock free routines cannot
5687 be used. */
5688 if (type_align < mode_align)
5689 return boolean_false_node;
5691 /* Check if a compare_and_swap pattern exists for the mode which represents
5692 the required size. The pattern is not allowed to fail, so the existence
5693 of the pattern indicates support is present. */
5694 if (can_compare_and_swap_p (mode, true))
5695 return boolean_true_node;
5696 else
5697 return boolean_false_node;
5700 /* Return true if the parameters to call EXP represent an object which will
5701 always generate lock free instructions. The first argument represents the
5702 size of the object, and the second parameter is a pointer to the object
5703 itself. If NULL is passed for the object, then the result is based on
5704 typical alignment for an object of the specified size. Otherwise return
5705 false. */
5707 static rtx
5708 expand_builtin_atomic_always_lock_free (tree exp)
5710 tree size;
5711 tree arg0 = CALL_EXPR_ARG (exp, 0);
5712 tree arg1 = CALL_EXPR_ARG (exp, 1);
5714 if (TREE_CODE (arg0) != INTEGER_CST)
5716 error ("non-constant argument 1 to __atomic_always_lock_free");
5717 return const0_rtx;
5720 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5721 if (size == boolean_true_node)
5722 return const1_rtx;
5723 return const0_rtx;
5726 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5727 is lock free on this architecture. */
5729 static tree
5730 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5732 if (!flag_inline_atomics)
5733 return NULL_TREE;
5735 /* If it isn't always lock free, don't generate a result. */
5736 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5737 return boolean_true_node;
5739 return NULL_TREE;
5742 /* Return true if the parameters to call EXP represent an object which will
5743 always generate lock free instructions. The first argument represents the
5744 size of the object, and the second parameter is a pointer to the object
5745 itself. If NULL is passed for the object, then the result is based on
5746 typical alignment for an object of the specified size. Otherwise return
5747 NULL*/
5749 static rtx
5750 expand_builtin_atomic_is_lock_free (tree exp)
5752 tree size;
5753 tree arg0 = CALL_EXPR_ARG (exp, 0);
5754 tree arg1 = CALL_EXPR_ARG (exp, 1);
5756 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5758 error ("non-integer argument 1 to __atomic_is_lock_free");
5759 return NULL_RTX;
5762 if (!flag_inline_atomics)
5763 return NULL_RTX;
5765 /* If the value is known at compile time, return the RTX for it. */
5766 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5767 if (size == boolean_true_node)
5768 return const1_rtx;
5770 return NULL_RTX;
5773 /* Expand the __atomic_thread_fence intrinsic:
5774 void __atomic_thread_fence (enum memmodel)
5775 EXP is the CALL_EXPR. */
5777 static void
5778 expand_builtin_atomic_thread_fence (tree exp)
5780 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5781 expand_mem_thread_fence (model);
5784 /* Expand the __atomic_signal_fence intrinsic:
5785 void __atomic_signal_fence (enum memmodel)
5786 EXP is the CALL_EXPR. */
5788 static void
5789 expand_builtin_atomic_signal_fence (tree exp)
5791 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5792 expand_mem_signal_fence (model);
5795 /* Expand the __sync_synchronize intrinsic. */
5797 static void
5798 expand_builtin_sync_synchronize (void)
5800 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
5803 static rtx
5804 expand_builtin_thread_pointer (tree exp, rtx target)
5806 enum insn_code icode;
5807 if (!validate_arglist (exp, VOID_TYPE))
5808 return const0_rtx;
5809 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5810 if (icode != CODE_FOR_nothing)
5812 struct expand_operand op;
5813 /* If the target is not sutitable then create a new target. */
5814 if (target == NULL_RTX
5815 || !REG_P (target)
5816 || GET_MODE (target) != Pmode)
5817 target = gen_reg_rtx (Pmode);
5818 create_output_operand (&op, target, Pmode);
5819 expand_insn (icode, 1, &op);
5820 return target;
5822 error ("__builtin_thread_pointer is not supported on this target");
5823 return const0_rtx;
5826 static void
5827 expand_builtin_set_thread_pointer (tree exp)
5829 enum insn_code icode;
5830 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5831 return;
5832 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5833 if (icode != CODE_FOR_nothing)
5835 struct expand_operand op;
5836 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5837 Pmode, EXPAND_NORMAL);
5838 create_input_operand (&op, val, Pmode);
5839 expand_insn (icode, 1, &op);
5840 return;
5842 error ("__builtin_set_thread_pointer is not supported on this target");
5846 /* Emit code to restore the current value of stack. */
5848 static void
5849 expand_stack_restore (tree var)
5851 rtx_insn *prev;
5852 rtx sa = expand_normal (var);
5854 sa = convert_memory_address (Pmode, sa);
5856 prev = get_last_insn ();
5857 emit_stack_restore (SAVE_BLOCK, sa);
5859 record_new_stack_level ();
5861 fixup_args_size_notes (prev, get_last_insn (), 0);
5864 /* Emit code to save the current value of stack. */
5866 static rtx
5867 expand_stack_save (void)
5869 rtx ret = NULL_RTX;
5871 emit_stack_save (SAVE_BLOCK, &ret);
5872 return ret;
5876 /* Expand an expression EXP that calls a built-in function,
5877 with result going to TARGET if that's convenient
5878 (and in mode MODE if that's convenient).
5879 SUBTARGET may be used as the target for computing one of EXP's operands.
5880 IGNORE is nonzero if the value is to be ignored. */
5883 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5884 int ignore)
5886 tree fndecl = get_callee_fndecl (exp);
5887 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5888 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5889 int flags;
5891 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5892 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5894 /* When ASan is enabled, we don't want to expand some memory/string
5895 builtins and rely on libsanitizer's hooks. This allows us to avoid
5896 redundant checks and be sure, that possible overflow will be detected
5897 by ASan. */
5899 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5900 return expand_call (exp, target, ignore);
5902 /* When not optimizing, generate calls to library functions for a certain
5903 set of builtins. */
5904 if (!optimize
5905 && !called_as_built_in (fndecl)
5906 && fcode != BUILT_IN_FORK
5907 && fcode != BUILT_IN_EXECL
5908 && fcode != BUILT_IN_EXECV
5909 && fcode != BUILT_IN_EXECLP
5910 && fcode != BUILT_IN_EXECLE
5911 && fcode != BUILT_IN_EXECVP
5912 && fcode != BUILT_IN_EXECVE
5913 && fcode != BUILT_IN_ALLOCA
5914 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5915 && fcode != BUILT_IN_FREE
5916 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5917 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5918 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5919 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5920 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5921 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5922 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5923 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5924 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5925 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5926 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5927 && fcode != BUILT_IN_CHKP_BNDRET)
5928 return expand_call (exp, target, ignore);
5930 /* The built-in function expanders test for target == const0_rtx
5931 to determine whether the function's result will be ignored. */
5932 if (ignore)
5933 target = const0_rtx;
5935 /* If the result of a pure or const built-in function is ignored, and
5936 none of its arguments are volatile, we can avoid expanding the
5937 built-in call and just evaluate the arguments for side-effects. */
5938 if (target == const0_rtx
5939 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5940 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5942 bool volatilep = false;
5943 tree arg;
5944 call_expr_arg_iterator iter;
5946 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5947 if (TREE_THIS_VOLATILE (arg))
5949 volatilep = true;
5950 break;
5953 if (! volatilep)
5955 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5956 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5957 return const0_rtx;
5961 /* expand_builtin_with_bounds is supposed to be used for
5962 instrumented builtin calls. */
5963 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
5965 switch (fcode)
5967 CASE_FLT_FN (BUILT_IN_FABS):
5968 case BUILT_IN_FABSD32:
5969 case BUILT_IN_FABSD64:
5970 case BUILT_IN_FABSD128:
5971 target = expand_builtin_fabs (exp, target, subtarget);
5972 if (target)
5973 return target;
5974 break;
5976 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5977 target = expand_builtin_copysign (exp, target, subtarget);
5978 if (target)
5979 return target;
5980 break;
5982 /* Just do a normal library call if we were unable to fold
5983 the values. */
5984 CASE_FLT_FN (BUILT_IN_CABS):
5985 break;
5987 CASE_FLT_FN (BUILT_IN_EXP):
5988 CASE_FLT_FN (BUILT_IN_EXP10):
5989 CASE_FLT_FN (BUILT_IN_POW10):
5990 CASE_FLT_FN (BUILT_IN_EXP2):
5991 CASE_FLT_FN (BUILT_IN_EXPM1):
5992 CASE_FLT_FN (BUILT_IN_LOGB):
5993 CASE_FLT_FN (BUILT_IN_LOG):
5994 CASE_FLT_FN (BUILT_IN_LOG10):
5995 CASE_FLT_FN (BUILT_IN_LOG2):
5996 CASE_FLT_FN (BUILT_IN_LOG1P):
5997 CASE_FLT_FN (BUILT_IN_TAN):
5998 CASE_FLT_FN (BUILT_IN_ASIN):
5999 CASE_FLT_FN (BUILT_IN_ACOS):
6000 CASE_FLT_FN (BUILT_IN_ATAN):
6001 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
6002 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6003 because of possible accuracy problems. */
6004 if (! flag_unsafe_math_optimizations)
6005 break;
6006 CASE_FLT_FN (BUILT_IN_SQRT):
6007 CASE_FLT_FN (BUILT_IN_FLOOR):
6008 CASE_FLT_FN (BUILT_IN_CEIL):
6009 CASE_FLT_FN (BUILT_IN_TRUNC):
6010 CASE_FLT_FN (BUILT_IN_ROUND):
6011 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6012 CASE_FLT_FN (BUILT_IN_RINT):
6013 target = expand_builtin_mathfn (exp, target, subtarget);
6014 if (target)
6015 return target;
6016 break;
6018 CASE_FLT_FN (BUILT_IN_FMA):
6019 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6020 if (target)
6021 return target;
6022 break;
6024 CASE_FLT_FN (BUILT_IN_ILOGB):
6025 if (! flag_unsafe_math_optimizations)
6026 break;
6027 CASE_FLT_FN (BUILT_IN_ISINF):
6028 CASE_FLT_FN (BUILT_IN_FINITE):
6029 case BUILT_IN_ISFINITE:
6030 case BUILT_IN_ISNORMAL:
6031 target = expand_builtin_interclass_mathfn (exp, target);
6032 if (target)
6033 return target;
6034 break;
6036 CASE_FLT_FN (BUILT_IN_ICEIL):
6037 CASE_FLT_FN (BUILT_IN_LCEIL):
6038 CASE_FLT_FN (BUILT_IN_LLCEIL):
6039 CASE_FLT_FN (BUILT_IN_LFLOOR):
6040 CASE_FLT_FN (BUILT_IN_IFLOOR):
6041 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6042 target = expand_builtin_int_roundingfn (exp, target);
6043 if (target)
6044 return target;
6045 break;
6047 CASE_FLT_FN (BUILT_IN_IRINT):
6048 CASE_FLT_FN (BUILT_IN_LRINT):
6049 CASE_FLT_FN (BUILT_IN_LLRINT):
6050 CASE_FLT_FN (BUILT_IN_IROUND):
6051 CASE_FLT_FN (BUILT_IN_LROUND):
6052 CASE_FLT_FN (BUILT_IN_LLROUND):
6053 target = expand_builtin_int_roundingfn_2 (exp, target);
6054 if (target)
6055 return target;
6056 break;
6058 CASE_FLT_FN (BUILT_IN_POWI):
6059 target = expand_builtin_powi (exp, target);
6060 if (target)
6061 return target;
6062 break;
6064 CASE_FLT_FN (BUILT_IN_ATAN2):
6065 CASE_FLT_FN (BUILT_IN_LDEXP):
6066 CASE_FLT_FN (BUILT_IN_SCALB):
6067 CASE_FLT_FN (BUILT_IN_SCALBN):
6068 CASE_FLT_FN (BUILT_IN_SCALBLN):
6069 if (! flag_unsafe_math_optimizations)
6070 break;
6072 CASE_FLT_FN (BUILT_IN_FMOD):
6073 CASE_FLT_FN (BUILT_IN_REMAINDER):
6074 CASE_FLT_FN (BUILT_IN_DREM):
6075 CASE_FLT_FN (BUILT_IN_POW):
6076 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6077 if (target)
6078 return target;
6079 break;
6081 CASE_FLT_FN (BUILT_IN_CEXPI):
6082 target = expand_builtin_cexpi (exp, target);
6083 gcc_assert (target);
6084 return target;
6086 CASE_FLT_FN (BUILT_IN_SIN):
6087 CASE_FLT_FN (BUILT_IN_COS):
6088 if (! flag_unsafe_math_optimizations)
6089 break;
6090 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6091 if (target)
6092 return target;
6093 break;
6095 CASE_FLT_FN (BUILT_IN_SINCOS):
6096 if (! flag_unsafe_math_optimizations)
6097 break;
6098 target = expand_builtin_sincos (exp);
6099 if (target)
6100 return target;
6101 break;
6103 case BUILT_IN_APPLY_ARGS:
6104 return expand_builtin_apply_args ();
6106 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6107 FUNCTION with a copy of the parameters described by
6108 ARGUMENTS, and ARGSIZE. It returns a block of memory
6109 allocated on the stack into which is stored all the registers
6110 that might possibly be used for returning the result of a
6111 function. ARGUMENTS is the value returned by
6112 __builtin_apply_args. ARGSIZE is the number of bytes of
6113 arguments that must be copied. ??? How should this value be
6114 computed? We'll also need a safe worst case value for varargs
6115 functions. */
6116 case BUILT_IN_APPLY:
6117 if (!validate_arglist (exp, POINTER_TYPE,
6118 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6119 && !validate_arglist (exp, REFERENCE_TYPE,
6120 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6121 return const0_rtx;
6122 else
6124 rtx ops[3];
6126 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6127 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6128 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6130 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6133 /* __builtin_return (RESULT) causes the function to return the
6134 value described by RESULT. RESULT is address of the block of
6135 memory returned by __builtin_apply. */
6136 case BUILT_IN_RETURN:
6137 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6138 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6139 return const0_rtx;
6141 case BUILT_IN_SAVEREGS:
6142 return expand_builtin_saveregs ();
6144 case BUILT_IN_VA_ARG_PACK:
6145 /* All valid uses of __builtin_va_arg_pack () are removed during
6146 inlining. */
6147 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6148 return const0_rtx;
6150 case BUILT_IN_VA_ARG_PACK_LEN:
6151 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6152 inlining. */
6153 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6154 return const0_rtx;
6156 /* Return the address of the first anonymous stack arg. */
6157 case BUILT_IN_NEXT_ARG:
6158 if (fold_builtin_next_arg (exp, false))
6159 return const0_rtx;
6160 return expand_builtin_next_arg ();
6162 case BUILT_IN_CLEAR_CACHE:
6163 target = expand_builtin___clear_cache (exp);
6164 if (target)
6165 return target;
6166 break;
6168 case BUILT_IN_CLASSIFY_TYPE:
6169 return expand_builtin_classify_type (exp);
6171 case BUILT_IN_CONSTANT_P:
6172 return const0_rtx;
6174 case BUILT_IN_FRAME_ADDRESS:
6175 case BUILT_IN_RETURN_ADDRESS:
6176 return expand_builtin_frame_address (fndecl, exp);
6178 /* Returns the address of the area where the structure is returned.
6179 0 otherwise. */
6180 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6181 if (call_expr_nargs (exp) != 0
6182 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6183 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6184 return const0_rtx;
6185 else
6186 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6188 case BUILT_IN_ALLOCA:
6189 case BUILT_IN_ALLOCA_WITH_ALIGN:
6190 /* If the allocation stems from the declaration of a variable-sized
6191 object, it cannot accumulate. */
6192 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6193 if (target)
6194 return target;
6195 break;
6197 case BUILT_IN_STACK_SAVE:
6198 return expand_stack_save ();
6200 case BUILT_IN_STACK_RESTORE:
6201 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6202 return const0_rtx;
6204 case BUILT_IN_BSWAP16:
6205 case BUILT_IN_BSWAP32:
6206 case BUILT_IN_BSWAP64:
6207 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6208 if (target)
6209 return target;
6210 break;
6212 CASE_INT_FN (BUILT_IN_FFS):
6213 target = expand_builtin_unop (target_mode, exp, target,
6214 subtarget, ffs_optab);
6215 if (target)
6216 return target;
6217 break;
6219 CASE_INT_FN (BUILT_IN_CLZ):
6220 target = expand_builtin_unop (target_mode, exp, target,
6221 subtarget, clz_optab);
6222 if (target)
6223 return target;
6224 break;
6226 CASE_INT_FN (BUILT_IN_CTZ):
6227 target = expand_builtin_unop (target_mode, exp, target,
6228 subtarget, ctz_optab);
6229 if (target)
6230 return target;
6231 break;
6233 CASE_INT_FN (BUILT_IN_CLRSB):
6234 target = expand_builtin_unop (target_mode, exp, target,
6235 subtarget, clrsb_optab);
6236 if (target)
6237 return target;
6238 break;
6240 CASE_INT_FN (BUILT_IN_POPCOUNT):
6241 target = expand_builtin_unop (target_mode, exp, target,
6242 subtarget, popcount_optab);
6243 if (target)
6244 return target;
6245 break;
6247 CASE_INT_FN (BUILT_IN_PARITY):
6248 target = expand_builtin_unop (target_mode, exp, target,
6249 subtarget, parity_optab);
6250 if (target)
6251 return target;
6252 break;
6254 case BUILT_IN_STRLEN:
6255 target = expand_builtin_strlen (exp, target, target_mode);
6256 if (target)
6257 return target;
6258 break;
6260 case BUILT_IN_STRCPY:
6261 target = expand_builtin_strcpy (exp, target);
6262 if (target)
6263 return target;
6264 break;
6266 case BUILT_IN_STRNCPY:
6267 target = expand_builtin_strncpy (exp, target);
6268 if (target)
6269 return target;
6270 break;
6272 case BUILT_IN_STPCPY:
6273 target = expand_builtin_stpcpy (exp, target, mode);
6274 if (target)
6275 return target;
6276 break;
6278 case BUILT_IN_MEMCPY:
6279 target = expand_builtin_memcpy (exp, target);
6280 if (target)
6281 return target;
6282 break;
6284 case BUILT_IN_MEMPCPY:
6285 target = expand_builtin_mempcpy (exp, target, mode);
6286 if (target)
6287 return target;
6288 break;
6290 case BUILT_IN_MEMSET:
6291 target = expand_builtin_memset (exp, target, mode);
6292 if (target)
6293 return target;
6294 break;
6296 case BUILT_IN_BZERO:
6297 target = expand_builtin_bzero (exp);
6298 if (target)
6299 return target;
6300 break;
6302 case BUILT_IN_STRCMP:
6303 target = expand_builtin_strcmp (exp, target);
6304 if (target)
6305 return target;
6306 break;
6308 case BUILT_IN_STRNCMP:
6309 target = expand_builtin_strncmp (exp, target, mode);
6310 if (target)
6311 return target;
6312 break;
6314 case BUILT_IN_BCMP:
6315 case BUILT_IN_MEMCMP:
6316 target = expand_builtin_memcmp (exp, target);
6317 if (target)
6318 return target;
6319 break;
6321 case BUILT_IN_SETJMP:
6322 /* This should have been lowered to the builtins below. */
6323 gcc_unreachable ();
6325 case BUILT_IN_SETJMP_SETUP:
6326 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6327 and the receiver label. */
6328 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6330 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6331 VOIDmode, EXPAND_NORMAL);
6332 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6333 rtx_insn *label_r = label_rtx (label);
6335 /* This is copied from the handling of non-local gotos. */
6336 expand_builtin_setjmp_setup (buf_addr, label_r);
6337 nonlocal_goto_handler_labels
6338 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6339 nonlocal_goto_handler_labels);
6340 /* ??? Do not let expand_label treat us as such since we would
6341 not want to be both on the list of non-local labels and on
6342 the list of forced labels. */
6343 FORCED_LABEL (label) = 0;
6344 return const0_rtx;
6346 break;
6348 case BUILT_IN_SETJMP_RECEIVER:
6349 /* __builtin_setjmp_receiver is passed the receiver label. */
6350 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6352 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6353 rtx_insn *label_r = label_rtx (label);
6355 expand_builtin_setjmp_receiver (label_r);
6356 return const0_rtx;
6358 break;
6360 /* __builtin_longjmp is passed a pointer to an array of five words.
6361 It's similar to the C library longjmp function but works with
6362 __builtin_setjmp above. */
6363 case BUILT_IN_LONGJMP:
6364 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6366 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6367 VOIDmode, EXPAND_NORMAL);
6368 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6370 if (value != const1_rtx)
6372 error ("%<__builtin_longjmp%> second argument must be 1");
6373 return const0_rtx;
6376 expand_builtin_longjmp (buf_addr, value);
6377 return const0_rtx;
6379 break;
6381 case BUILT_IN_NONLOCAL_GOTO:
6382 target = expand_builtin_nonlocal_goto (exp);
6383 if (target)
6384 return target;
6385 break;
6387 /* This updates the setjmp buffer that is its argument with the value
6388 of the current stack pointer. */
6389 case BUILT_IN_UPDATE_SETJMP_BUF:
6390 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6392 rtx buf_addr
6393 = expand_normal (CALL_EXPR_ARG (exp, 0));
6395 expand_builtin_update_setjmp_buf (buf_addr);
6396 return const0_rtx;
6398 break;
6400 case BUILT_IN_TRAP:
6401 expand_builtin_trap ();
6402 return const0_rtx;
6404 case BUILT_IN_UNREACHABLE:
6405 expand_builtin_unreachable ();
6406 return const0_rtx;
6408 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6409 case BUILT_IN_SIGNBITD32:
6410 case BUILT_IN_SIGNBITD64:
6411 case BUILT_IN_SIGNBITD128:
6412 target = expand_builtin_signbit (exp, target);
6413 if (target)
6414 return target;
6415 break;
6417 /* Various hooks for the DWARF 2 __throw routine. */
6418 case BUILT_IN_UNWIND_INIT:
6419 expand_builtin_unwind_init ();
6420 return const0_rtx;
6421 case BUILT_IN_DWARF_CFA:
6422 return virtual_cfa_rtx;
6423 #ifdef DWARF2_UNWIND_INFO
6424 case BUILT_IN_DWARF_SP_COLUMN:
6425 return expand_builtin_dwarf_sp_column ();
6426 case BUILT_IN_INIT_DWARF_REG_SIZES:
6427 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6428 return const0_rtx;
6429 #endif
6430 case BUILT_IN_FROB_RETURN_ADDR:
6431 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6432 case BUILT_IN_EXTRACT_RETURN_ADDR:
6433 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6434 case BUILT_IN_EH_RETURN:
6435 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6436 CALL_EXPR_ARG (exp, 1));
6437 return const0_rtx;
6438 case BUILT_IN_EH_RETURN_DATA_REGNO:
6439 return expand_builtin_eh_return_data_regno (exp);
6440 case BUILT_IN_EXTEND_POINTER:
6441 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6442 case BUILT_IN_EH_POINTER:
6443 return expand_builtin_eh_pointer (exp);
6444 case BUILT_IN_EH_FILTER:
6445 return expand_builtin_eh_filter (exp);
6446 case BUILT_IN_EH_COPY_VALUES:
6447 return expand_builtin_eh_copy_values (exp);
6449 case BUILT_IN_VA_START:
6450 return expand_builtin_va_start (exp);
6451 case BUILT_IN_VA_END:
6452 return expand_builtin_va_end (exp);
6453 case BUILT_IN_VA_COPY:
6454 return expand_builtin_va_copy (exp);
6455 case BUILT_IN_EXPECT:
6456 return expand_builtin_expect (exp, target);
6457 case BUILT_IN_ASSUME_ALIGNED:
6458 return expand_builtin_assume_aligned (exp, target);
6459 case BUILT_IN_PREFETCH:
6460 expand_builtin_prefetch (exp);
6461 return const0_rtx;
6463 case BUILT_IN_INIT_TRAMPOLINE:
6464 return expand_builtin_init_trampoline (exp, true);
6465 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6466 return expand_builtin_init_trampoline (exp, false);
6467 case BUILT_IN_ADJUST_TRAMPOLINE:
6468 return expand_builtin_adjust_trampoline (exp);
6470 case BUILT_IN_FORK:
6471 case BUILT_IN_EXECL:
6472 case BUILT_IN_EXECV:
6473 case BUILT_IN_EXECLP:
6474 case BUILT_IN_EXECLE:
6475 case BUILT_IN_EXECVP:
6476 case BUILT_IN_EXECVE:
6477 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6478 if (target)
6479 return target;
6480 break;
6482 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6483 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6484 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6485 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6486 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6487 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6488 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6489 if (target)
6490 return target;
6491 break;
6493 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6494 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6495 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6496 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6497 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6498 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6499 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6500 if (target)
6501 return target;
6502 break;
6504 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6505 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6506 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6507 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6508 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6509 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6510 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6511 if (target)
6512 return target;
6513 break;
6515 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6516 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6517 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6518 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6519 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6520 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6521 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6522 if (target)
6523 return target;
6524 break;
6526 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6527 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6528 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6529 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6530 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6531 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6532 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6533 if (target)
6534 return target;
6535 break;
6537 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6538 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6539 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6540 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6541 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6542 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6543 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6544 if (target)
6545 return target;
6546 break;
6548 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6549 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6550 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6551 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6552 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6553 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6554 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6555 if (target)
6556 return target;
6557 break;
6559 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6560 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6561 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6562 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6563 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6564 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6565 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6566 if (target)
6567 return target;
6568 break;
6570 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6571 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6572 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6573 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6574 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6575 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6576 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6577 if (target)
6578 return target;
6579 break;
6581 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6582 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6583 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6584 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6585 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6586 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6587 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6588 if (target)
6589 return target;
6590 break;
6592 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6593 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6594 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6595 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6596 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6597 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6598 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6599 if (target)
6600 return target;
6601 break;
6603 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6604 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6605 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6606 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6607 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6608 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6609 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6610 if (target)
6611 return target;
6612 break;
6614 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6615 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6616 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6617 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6618 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6619 if (mode == VOIDmode)
6620 mode = TYPE_MODE (boolean_type_node);
6621 if (!target || !register_operand (target, mode))
6622 target = gen_reg_rtx (mode);
6624 mode = get_builtin_sync_mode
6625 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6626 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6627 if (target)
6628 return target;
6629 break;
6631 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6632 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6633 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6634 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6635 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6636 mode = get_builtin_sync_mode
6637 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6638 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6639 if (target)
6640 return target;
6641 break;
6643 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6644 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6645 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6646 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6647 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6648 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6649 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6650 if (target)
6651 return target;
6652 break;
6654 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6655 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6656 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6657 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6658 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6659 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6660 expand_builtin_sync_lock_release (mode, exp);
6661 return const0_rtx;
6663 case BUILT_IN_SYNC_SYNCHRONIZE:
6664 expand_builtin_sync_synchronize ();
6665 return const0_rtx;
6667 case BUILT_IN_ATOMIC_EXCHANGE_1:
6668 case BUILT_IN_ATOMIC_EXCHANGE_2:
6669 case BUILT_IN_ATOMIC_EXCHANGE_4:
6670 case BUILT_IN_ATOMIC_EXCHANGE_8:
6671 case BUILT_IN_ATOMIC_EXCHANGE_16:
6672 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6673 target = expand_builtin_atomic_exchange (mode, exp, target);
6674 if (target)
6675 return target;
6676 break;
6678 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6679 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6680 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6681 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6682 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6684 unsigned int nargs, z;
6685 vec<tree, va_gc> *vec;
6687 mode =
6688 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6689 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6690 if (target)
6691 return target;
6693 /* If this is turned into an external library call, the weak parameter
6694 must be dropped to match the expected parameter list. */
6695 nargs = call_expr_nargs (exp);
6696 vec_alloc (vec, nargs - 1);
6697 for (z = 0; z < 3; z++)
6698 vec->quick_push (CALL_EXPR_ARG (exp, z));
6699 /* Skip the boolean weak parameter. */
6700 for (z = 4; z < 6; z++)
6701 vec->quick_push (CALL_EXPR_ARG (exp, z));
6702 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6703 break;
6706 case BUILT_IN_ATOMIC_LOAD_1:
6707 case BUILT_IN_ATOMIC_LOAD_2:
6708 case BUILT_IN_ATOMIC_LOAD_4:
6709 case BUILT_IN_ATOMIC_LOAD_8:
6710 case BUILT_IN_ATOMIC_LOAD_16:
6711 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6712 target = expand_builtin_atomic_load (mode, exp, target);
6713 if (target)
6714 return target;
6715 break;
6717 case BUILT_IN_ATOMIC_STORE_1:
6718 case BUILT_IN_ATOMIC_STORE_2:
6719 case BUILT_IN_ATOMIC_STORE_4:
6720 case BUILT_IN_ATOMIC_STORE_8:
6721 case BUILT_IN_ATOMIC_STORE_16:
6722 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6723 target = expand_builtin_atomic_store (mode, exp);
6724 if (target)
6725 return const0_rtx;
6726 break;
6728 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6729 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6730 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6731 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6732 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6734 enum built_in_function lib;
6735 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6736 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6737 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6738 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6739 ignore, lib);
6740 if (target)
6741 return target;
6742 break;
6744 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6745 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6746 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6747 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6748 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6750 enum built_in_function lib;
6751 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6752 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6753 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6754 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6755 ignore, lib);
6756 if (target)
6757 return target;
6758 break;
6760 case BUILT_IN_ATOMIC_AND_FETCH_1:
6761 case BUILT_IN_ATOMIC_AND_FETCH_2:
6762 case BUILT_IN_ATOMIC_AND_FETCH_4:
6763 case BUILT_IN_ATOMIC_AND_FETCH_8:
6764 case BUILT_IN_ATOMIC_AND_FETCH_16:
6766 enum built_in_function lib;
6767 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6768 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6769 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6770 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6771 ignore, lib);
6772 if (target)
6773 return target;
6774 break;
6776 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6777 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6778 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6779 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6780 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6782 enum built_in_function lib;
6783 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6784 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6785 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6786 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6787 ignore, lib);
6788 if (target)
6789 return target;
6790 break;
6792 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6793 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6794 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6795 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6796 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6798 enum built_in_function lib;
6799 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6800 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6801 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6802 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6803 ignore, lib);
6804 if (target)
6805 return target;
6806 break;
6808 case BUILT_IN_ATOMIC_OR_FETCH_1:
6809 case BUILT_IN_ATOMIC_OR_FETCH_2:
6810 case BUILT_IN_ATOMIC_OR_FETCH_4:
6811 case BUILT_IN_ATOMIC_OR_FETCH_8:
6812 case BUILT_IN_ATOMIC_OR_FETCH_16:
6814 enum built_in_function lib;
6815 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6816 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6817 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6818 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6819 ignore, lib);
6820 if (target)
6821 return target;
6822 break;
6824 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6825 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6826 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6827 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6828 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6829 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6830 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6831 ignore, BUILT_IN_NONE);
6832 if (target)
6833 return target;
6834 break;
6836 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6837 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6838 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6839 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6840 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6841 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6842 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6843 ignore, BUILT_IN_NONE);
6844 if (target)
6845 return target;
6846 break;
6848 case BUILT_IN_ATOMIC_FETCH_AND_1:
6849 case BUILT_IN_ATOMIC_FETCH_AND_2:
6850 case BUILT_IN_ATOMIC_FETCH_AND_4:
6851 case BUILT_IN_ATOMIC_FETCH_AND_8:
6852 case BUILT_IN_ATOMIC_FETCH_AND_16:
6853 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6854 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6855 ignore, BUILT_IN_NONE);
6856 if (target)
6857 return target;
6858 break;
6860 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6861 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6862 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6863 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6864 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6865 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6866 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6867 ignore, BUILT_IN_NONE);
6868 if (target)
6869 return target;
6870 break;
6872 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6873 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6874 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6875 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6876 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6877 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6878 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6879 ignore, BUILT_IN_NONE);
6880 if (target)
6881 return target;
6882 break;
6884 case BUILT_IN_ATOMIC_FETCH_OR_1:
6885 case BUILT_IN_ATOMIC_FETCH_OR_2:
6886 case BUILT_IN_ATOMIC_FETCH_OR_4:
6887 case BUILT_IN_ATOMIC_FETCH_OR_8:
6888 case BUILT_IN_ATOMIC_FETCH_OR_16:
6889 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6890 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6891 ignore, BUILT_IN_NONE);
6892 if (target)
6893 return target;
6894 break;
6896 case BUILT_IN_ATOMIC_TEST_AND_SET:
6897 return expand_builtin_atomic_test_and_set (exp, target);
6899 case BUILT_IN_ATOMIC_CLEAR:
6900 return expand_builtin_atomic_clear (exp);
6902 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6903 return expand_builtin_atomic_always_lock_free (exp);
6905 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6906 target = expand_builtin_atomic_is_lock_free (exp);
6907 if (target)
6908 return target;
6909 break;
6911 case BUILT_IN_ATOMIC_THREAD_FENCE:
6912 expand_builtin_atomic_thread_fence (exp);
6913 return const0_rtx;
6915 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6916 expand_builtin_atomic_signal_fence (exp);
6917 return const0_rtx;
6919 case BUILT_IN_OBJECT_SIZE:
6920 return expand_builtin_object_size (exp);
6922 case BUILT_IN_MEMCPY_CHK:
6923 case BUILT_IN_MEMPCPY_CHK:
6924 case BUILT_IN_MEMMOVE_CHK:
6925 case BUILT_IN_MEMSET_CHK:
6926 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6927 if (target)
6928 return target;
6929 break;
6931 case BUILT_IN_STRCPY_CHK:
6932 case BUILT_IN_STPCPY_CHK:
6933 case BUILT_IN_STRNCPY_CHK:
6934 case BUILT_IN_STPNCPY_CHK:
6935 case BUILT_IN_STRCAT_CHK:
6936 case BUILT_IN_STRNCAT_CHK:
6937 case BUILT_IN_SNPRINTF_CHK:
6938 case BUILT_IN_VSNPRINTF_CHK:
6939 maybe_emit_chk_warning (exp, fcode);
6940 break;
6942 case BUILT_IN_SPRINTF_CHK:
6943 case BUILT_IN_VSPRINTF_CHK:
6944 maybe_emit_sprintf_chk_warning (exp, fcode);
6945 break;
6947 case BUILT_IN_FREE:
6948 if (warn_free_nonheap_object)
6949 maybe_emit_free_warning (exp);
6950 break;
6952 case BUILT_IN_THREAD_POINTER:
6953 return expand_builtin_thread_pointer (exp, target);
6955 case BUILT_IN_SET_THREAD_POINTER:
6956 expand_builtin_set_thread_pointer (exp);
6957 return const0_rtx;
6959 case BUILT_IN_CILK_DETACH:
6960 expand_builtin_cilk_detach (exp);
6961 return const0_rtx;
6963 case BUILT_IN_CILK_POP_FRAME:
6964 expand_builtin_cilk_pop_frame (exp);
6965 return const0_rtx;
6967 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6968 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6969 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6970 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6971 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6972 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6973 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6974 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6975 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6976 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6977 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6978 /* We allow user CHKP builtins if Pointer Bounds
6979 Checker is off. */
6980 if (!chkp_function_instrumented_p (current_function_decl))
6982 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
6983 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6984 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
6985 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
6986 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
6987 return expand_normal (CALL_EXPR_ARG (exp, 0));
6988 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
6989 return expand_normal (size_zero_node);
6990 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
6991 return expand_normal (size_int (-1));
6992 else
6993 return const0_rtx;
6995 /* FALLTHROUGH */
6997 case BUILT_IN_CHKP_BNDMK:
6998 case BUILT_IN_CHKP_BNDSTX:
6999 case BUILT_IN_CHKP_BNDCL:
7000 case BUILT_IN_CHKP_BNDCU:
7001 case BUILT_IN_CHKP_BNDLDX:
7002 case BUILT_IN_CHKP_BNDRET:
7003 case BUILT_IN_CHKP_INTERSECT:
7004 case BUILT_IN_CHKP_NARROW:
7005 case BUILT_IN_CHKP_EXTRACT_LOWER:
7006 case BUILT_IN_CHKP_EXTRACT_UPPER:
7007 /* Software implementation of Pointer Bounds Checker is NYI.
7008 Target support is required. */
7009 error ("Your target platform does not support -fcheck-pointer-bounds");
7010 break;
7012 case BUILT_IN_ACC_ON_DEVICE:
7013 /* Do library call, if we failed to expand the builtin when
7014 folding. */
7015 break;
7017 default: /* just do library call, if unknown builtin */
7018 break;
7021 /* The switch statement above can drop through to cause the function
7022 to be called normally. */
7023 return expand_call (exp, target, ignore);
7026 /* Similar to expand_builtin but is used for instrumented calls. */
7029 expand_builtin_with_bounds (tree exp, rtx target,
7030 rtx subtarget ATTRIBUTE_UNUSED,
7031 machine_mode mode, int ignore)
7033 tree fndecl = get_callee_fndecl (exp);
7034 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7036 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7038 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7039 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7041 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7042 && fcode < END_CHKP_BUILTINS);
7044 switch (fcode)
7046 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7047 target = expand_builtin_memcpy_with_bounds (exp, target);
7048 if (target)
7049 return target;
7050 break;
7052 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7053 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7054 if (target)
7055 return target;
7056 break;
7058 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7059 target = expand_builtin_memset_with_bounds (exp, target, mode);
7060 if (target)
7061 return target;
7062 break;
7064 default:
7065 break;
7068 /* The switch statement above can drop through to cause the function
7069 to be called normally. */
7070 return expand_call (exp, target, ignore);
7073 /* Determine whether a tree node represents a call to a built-in
7074 function. If the tree T is a call to a built-in function with
7075 the right number of arguments of the appropriate types, return
7076 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7077 Otherwise the return value is END_BUILTINS. */
7079 enum built_in_function
7080 builtin_mathfn_code (const_tree t)
7082 const_tree fndecl, arg, parmlist;
7083 const_tree argtype, parmtype;
7084 const_call_expr_arg_iterator iter;
7086 if (TREE_CODE (t) != CALL_EXPR
7087 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7088 return END_BUILTINS;
7090 fndecl = get_callee_fndecl (t);
7091 if (fndecl == NULL_TREE
7092 || TREE_CODE (fndecl) != FUNCTION_DECL
7093 || ! DECL_BUILT_IN (fndecl)
7094 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7095 return END_BUILTINS;
7097 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7098 init_const_call_expr_arg_iterator (t, &iter);
7099 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7101 /* If a function doesn't take a variable number of arguments,
7102 the last element in the list will have type `void'. */
7103 parmtype = TREE_VALUE (parmlist);
7104 if (VOID_TYPE_P (parmtype))
7106 if (more_const_call_expr_args_p (&iter))
7107 return END_BUILTINS;
7108 return DECL_FUNCTION_CODE (fndecl);
7111 if (! more_const_call_expr_args_p (&iter))
7112 return END_BUILTINS;
7114 arg = next_const_call_expr_arg (&iter);
7115 argtype = TREE_TYPE (arg);
7117 if (SCALAR_FLOAT_TYPE_P (parmtype))
7119 if (! SCALAR_FLOAT_TYPE_P (argtype))
7120 return END_BUILTINS;
7122 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7124 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7125 return END_BUILTINS;
7127 else if (POINTER_TYPE_P (parmtype))
7129 if (! POINTER_TYPE_P (argtype))
7130 return END_BUILTINS;
7132 else if (INTEGRAL_TYPE_P (parmtype))
7134 if (! INTEGRAL_TYPE_P (argtype))
7135 return END_BUILTINS;
7137 else
7138 return END_BUILTINS;
7141 /* Variable-length argument list. */
7142 return DECL_FUNCTION_CODE (fndecl);
7145 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7146 evaluate to a constant. */
7148 static tree
7149 fold_builtin_constant_p (tree arg)
7151 /* We return 1 for a numeric type that's known to be a constant
7152 value at compile-time or for an aggregate type that's a
7153 literal constant. */
7154 STRIP_NOPS (arg);
7156 /* If we know this is a constant, emit the constant of one. */
7157 if (CONSTANT_CLASS_P (arg)
7158 || (TREE_CODE (arg) == CONSTRUCTOR
7159 && TREE_CONSTANT (arg)))
7160 return integer_one_node;
7161 if (TREE_CODE (arg) == ADDR_EXPR)
7163 tree op = TREE_OPERAND (arg, 0);
7164 if (TREE_CODE (op) == STRING_CST
7165 || (TREE_CODE (op) == ARRAY_REF
7166 && integer_zerop (TREE_OPERAND (op, 1))
7167 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7168 return integer_one_node;
7171 /* If this expression has side effects, show we don't know it to be a
7172 constant. Likewise if it's a pointer or aggregate type since in
7173 those case we only want literals, since those are only optimized
7174 when generating RTL, not later.
7175 And finally, if we are compiling an initializer, not code, we
7176 need to return a definite result now; there's not going to be any
7177 more optimization done. */
7178 if (TREE_SIDE_EFFECTS (arg)
7179 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7180 || POINTER_TYPE_P (TREE_TYPE (arg))
7181 || cfun == 0
7182 || folding_initializer
7183 || force_folding_builtin_constant_p)
7184 return integer_zero_node;
7186 return NULL_TREE;
7189 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7190 return it as a truthvalue. */
7192 static tree
7193 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7194 tree predictor)
7196 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7198 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7199 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7200 ret_type = TREE_TYPE (TREE_TYPE (fn));
7201 pred_type = TREE_VALUE (arg_types);
7202 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7204 pred = fold_convert_loc (loc, pred_type, pred);
7205 expected = fold_convert_loc (loc, expected_type, expected);
7206 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7207 predictor);
7209 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7210 build_int_cst (ret_type, 0));
7213 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7214 NULL_TREE if no simplification is possible. */
7216 tree
7217 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7219 tree inner, fndecl, inner_arg0;
7220 enum tree_code code;
7222 /* Distribute the expected value over short-circuiting operators.
7223 See through the cast from truthvalue_type_node to long. */
7224 inner_arg0 = arg0;
7225 while (CONVERT_EXPR_P (inner_arg0)
7226 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7227 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7228 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7230 /* If this is a builtin_expect within a builtin_expect keep the
7231 inner one. See through a comparison against a constant. It
7232 might have been added to create a thruthvalue. */
7233 inner = inner_arg0;
7235 if (COMPARISON_CLASS_P (inner)
7236 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7237 inner = TREE_OPERAND (inner, 0);
7239 if (TREE_CODE (inner) == CALL_EXPR
7240 && (fndecl = get_callee_fndecl (inner))
7241 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7242 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7243 return arg0;
7245 inner = inner_arg0;
7246 code = TREE_CODE (inner);
7247 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7249 tree op0 = TREE_OPERAND (inner, 0);
7250 tree op1 = TREE_OPERAND (inner, 1);
7252 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7253 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7254 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7256 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7259 /* If the argument isn't invariant then there's nothing else we can do. */
7260 if (!TREE_CONSTANT (inner_arg0))
7261 return NULL_TREE;
7263 /* If we expect that a comparison against the argument will fold to
7264 a constant return the constant. In practice, this means a true
7265 constant or the address of a non-weak symbol. */
7266 inner = inner_arg0;
7267 STRIP_NOPS (inner);
7268 if (TREE_CODE (inner) == ADDR_EXPR)
7272 inner = TREE_OPERAND (inner, 0);
7274 while (TREE_CODE (inner) == COMPONENT_REF
7275 || TREE_CODE (inner) == ARRAY_REF);
7276 if ((TREE_CODE (inner) == VAR_DECL
7277 || TREE_CODE (inner) == FUNCTION_DECL)
7278 && DECL_WEAK (inner))
7279 return NULL_TREE;
7282 /* Otherwise, ARG0 already has the proper type for the return value. */
7283 return arg0;
7286 /* Fold a call to __builtin_classify_type with argument ARG. */
7288 static tree
7289 fold_builtin_classify_type (tree arg)
7291 if (arg == 0)
7292 return build_int_cst (integer_type_node, no_type_class);
7294 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7297 /* Fold a call to __builtin_strlen with argument ARG. */
7299 static tree
7300 fold_builtin_strlen (location_t loc, tree type, tree arg)
7302 if (!validate_arg (arg, POINTER_TYPE))
7303 return NULL_TREE;
7304 else
7306 tree len = c_strlen (arg, 0);
7308 if (len)
7309 return fold_convert_loc (loc, type, len);
7311 return NULL_TREE;
7315 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7317 static tree
7318 fold_builtin_inf (location_t loc, tree type, int warn)
7320 REAL_VALUE_TYPE real;
7322 /* __builtin_inff is intended to be usable to define INFINITY on all
7323 targets. If an infinity is not available, INFINITY expands "to a
7324 positive constant of type float that overflows at translation
7325 time", footnote "In this case, using INFINITY will violate the
7326 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7327 Thus we pedwarn to ensure this constraint violation is
7328 diagnosed. */
7329 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7330 pedwarn (loc, 0, "target format does not support infinity");
7332 real_inf (&real);
7333 return build_real (type, real);
7336 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7337 NULL_TREE if no simplification can be made. */
7339 static tree
7340 fold_builtin_sincos (location_t loc,
7341 tree arg0, tree arg1, tree arg2)
7343 tree type;
7344 tree fndecl, call = NULL_TREE;
7346 if (!validate_arg (arg0, REAL_TYPE)
7347 || !validate_arg (arg1, POINTER_TYPE)
7348 || !validate_arg (arg2, POINTER_TYPE))
7349 return NULL_TREE;
7351 type = TREE_TYPE (arg0);
7353 /* Calculate the result when the argument is a constant. */
7354 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
7355 if (fn == END_BUILTINS)
7356 return NULL_TREE;
7358 /* Canonicalize sincos to cexpi. */
7359 if (TREE_CODE (arg0) == REAL_CST)
7361 tree complex_type = build_complex_type (type);
7362 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
7364 if (!call)
7366 if (!targetm.libc_has_function (function_c99_math_complex)
7367 || !builtin_decl_implicit_p (fn))
7368 return NULL_TREE;
7369 fndecl = builtin_decl_explicit (fn);
7370 call = build_call_expr_loc (loc, fndecl, 1, arg0);
7371 call = builtin_save_expr (call);
7374 return build2 (COMPOUND_EXPR, void_type_node,
7375 build2 (MODIFY_EXPR, void_type_node,
7376 build_fold_indirect_ref_loc (loc, arg1),
7377 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
7378 build2 (MODIFY_EXPR, void_type_node,
7379 build_fold_indirect_ref_loc (loc, arg2),
7380 fold_build1_loc (loc, REALPART_EXPR, type, call)));
7383 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
7384 arguments to the call, and TYPE is its return type.
7385 Return NULL_TREE if no simplification can be made. */
7387 static tree
7388 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
7390 if (!validate_arg (arg1, POINTER_TYPE)
7391 || !validate_arg (arg2, INTEGER_TYPE)
7392 || !validate_arg (len, INTEGER_TYPE))
7393 return NULL_TREE;
7394 else
7396 const char *p1;
7398 if (TREE_CODE (arg2) != INTEGER_CST
7399 || !tree_fits_uhwi_p (len))
7400 return NULL_TREE;
7402 p1 = c_getstr (arg1);
7403 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
7405 char c;
7406 const char *r;
7407 tree tem;
7409 if (target_char_cast (arg2, &c))
7410 return NULL_TREE;
7412 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
7414 if (r == NULL)
7415 return build_int_cst (TREE_TYPE (arg1), 0);
7417 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
7418 return fold_convert_loc (loc, type, tem);
7420 return NULL_TREE;
7424 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7425 Return NULL_TREE if no simplification can be made. */
7427 static tree
7428 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
7430 if (!validate_arg (arg1, POINTER_TYPE)
7431 || !validate_arg (arg2, POINTER_TYPE)
7432 || !validate_arg (len, INTEGER_TYPE))
7433 return NULL_TREE;
7435 /* If the LEN parameter is zero, return zero. */
7436 if (integer_zerop (len))
7437 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7438 arg1, arg2);
7440 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7441 if (operand_equal_p (arg1, arg2, 0))
7442 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7444 /* If len parameter is one, return an expression corresponding to
7445 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7446 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7448 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7449 tree cst_uchar_ptr_node
7450 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7452 tree ind1
7453 = fold_convert_loc (loc, integer_type_node,
7454 build1 (INDIRECT_REF, cst_uchar_node,
7455 fold_convert_loc (loc,
7456 cst_uchar_ptr_node,
7457 arg1)));
7458 tree ind2
7459 = fold_convert_loc (loc, integer_type_node,
7460 build1 (INDIRECT_REF, cst_uchar_node,
7461 fold_convert_loc (loc,
7462 cst_uchar_ptr_node,
7463 arg2)));
7464 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7467 return NULL_TREE;
7470 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
7471 Return NULL_TREE if no simplification can be made. */
7473 static tree
7474 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
7476 if (!validate_arg (arg1, POINTER_TYPE)
7477 || !validate_arg (arg2, POINTER_TYPE))
7478 return NULL_TREE;
7480 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7481 if (operand_equal_p (arg1, arg2, 0))
7482 return integer_zero_node;
7484 /* If the second arg is "", return *(const unsigned char*)arg1. */
7485 const char *p2 = c_getstr (arg2);
7486 if (p2 && *p2 == '\0')
7488 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7489 tree cst_uchar_ptr_node
7490 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7492 return fold_convert_loc (loc, integer_type_node,
7493 build1 (INDIRECT_REF, cst_uchar_node,
7494 fold_convert_loc (loc,
7495 cst_uchar_ptr_node,
7496 arg1)));
7499 /* If the first arg is "", return -*(const unsigned char*)arg2. */
7500 const char *p1 = c_getstr (arg1);
7501 if (p1 && *p1 == '\0')
7503 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7504 tree cst_uchar_ptr_node
7505 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7507 tree temp
7508 = fold_convert_loc (loc, integer_type_node,
7509 build1 (INDIRECT_REF, cst_uchar_node,
7510 fold_convert_loc (loc,
7511 cst_uchar_ptr_node,
7512 arg2)));
7513 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
7516 return NULL_TREE;
7519 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
7520 Return NULL_TREE if no simplification can be made. */
7522 static tree
7523 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
7525 if (!validate_arg (arg1, POINTER_TYPE)
7526 || !validate_arg (arg2, POINTER_TYPE)
7527 || !validate_arg (len, INTEGER_TYPE))
7528 return NULL_TREE;
7530 /* If the LEN parameter is zero, return zero. */
7531 if (integer_zerop (len))
7532 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7533 arg1, arg2);
7535 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7536 if (operand_equal_p (arg1, arg2, 0))
7537 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7539 /* If the second arg is "", and the length is greater than zero,
7540 return *(const unsigned char*)arg1. */
7541 const char *p2 = c_getstr (arg2);
7542 if (p2 && *p2 == '\0'
7543 && TREE_CODE (len) == INTEGER_CST
7544 && tree_int_cst_sgn (len) == 1)
7546 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7547 tree cst_uchar_ptr_node
7548 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7550 return fold_convert_loc (loc, integer_type_node,
7551 build1 (INDIRECT_REF, cst_uchar_node,
7552 fold_convert_loc (loc,
7553 cst_uchar_ptr_node,
7554 arg1)));
7557 /* If the first arg is "", and the length is greater than zero,
7558 return -*(const unsigned char*)arg2. */
7559 const char *p1 = c_getstr (arg1);
7560 if (p1 && *p1 == '\0'
7561 && TREE_CODE (len) == INTEGER_CST
7562 && tree_int_cst_sgn (len) == 1)
7564 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7565 tree cst_uchar_ptr_node
7566 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7568 tree temp = fold_convert_loc (loc, integer_type_node,
7569 build1 (INDIRECT_REF, cst_uchar_node,
7570 fold_convert_loc (loc,
7571 cst_uchar_ptr_node,
7572 arg2)));
7573 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
7576 /* If len parameter is one, return an expression corresponding to
7577 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7578 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7580 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7581 tree cst_uchar_ptr_node
7582 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7584 tree ind1 = fold_convert_loc (loc, integer_type_node,
7585 build1 (INDIRECT_REF, cst_uchar_node,
7586 fold_convert_loc (loc,
7587 cst_uchar_ptr_node,
7588 arg1)));
7589 tree ind2 = fold_convert_loc (loc, integer_type_node,
7590 build1 (INDIRECT_REF, cst_uchar_node,
7591 fold_convert_loc (loc,
7592 cst_uchar_ptr_node,
7593 arg2)));
7594 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7597 return NULL_TREE;
7600 /* Fold a call to builtin isascii with argument ARG. */
7602 static tree
7603 fold_builtin_isascii (location_t loc, tree arg)
7605 if (!validate_arg (arg, INTEGER_TYPE))
7606 return NULL_TREE;
7607 else
7609 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
7610 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
7611 build_int_cst (integer_type_node,
7612 ~ (unsigned HOST_WIDE_INT) 0x7f));
7613 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
7614 arg, integer_zero_node);
7618 /* Fold a call to builtin toascii with argument ARG. */
7620 static tree
7621 fold_builtin_toascii (location_t loc, tree arg)
7623 if (!validate_arg (arg, INTEGER_TYPE))
7624 return NULL_TREE;
7626 /* Transform toascii(c) -> (c & 0x7f). */
7627 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
7628 build_int_cst (integer_type_node, 0x7f));
7631 /* Fold a call to builtin isdigit with argument ARG. */
7633 static tree
7634 fold_builtin_isdigit (location_t loc, tree arg)
7636 if (!validate_arg (arg, INTEGER_TYPE))
7637 return NULL_TREE;
7638 else
7640 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
7641 /* According to the C standard, isdigit is unaffected by locale.
7642 However, it definitely is affected by the target character set. */
7643 unsigned HOST_WIDE_INT target_digit0
7644 = lang_hooks.to_target_charset ('0');
7646 if (target_digit0 == 0)
7647 return NULL_TREE;
7649 arg = fold_convert_loc (loc, unsigned_type_node, arg);
7650 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
7651 build_int_cst (unsigned_type_node, target_digit0));
7652 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
7653 build_int_cst (unsigned_type_node, 9));
7657 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
7659 static tree
7660 fold_builtin_fabs (location_t loc, tree arg, tree type)
7662 if (!validate_arg (arg, REAL_TYPE))
7663 return NULL_TREE;
7665 arg = fold_convert_loc (loc, type, arg);
7666 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7669 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
7671 static tree
7672 fold_builtin_abs (location_t loc, tree arg, tree type)
7674 if (!validate_arg (arg, INTEGER_TYPE))
7675 return NULL_TREE;
7677 arg = fold_convert_loc (loc, type, arg);
7678 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7681 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
7683 static tree
7684 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
7686 /* ??? Only expand to FMA_EXPR if it's directly supported. */
7687 if (validate_arg (arg0, REAL_TYPE)
7688 && validate_arg (arg1, REAL_TYPE)
7689 && validate_arg (arg2, REAL_TYPE)
7690 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
7691 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
7693 return NULL_TREE;
7696 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
7698 static tree
7699 fold_builtin_carg (location_t loc, tree arg, tree type)
7701 if (validate_arg (arg, COMPLEX_TYPE)
7702 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7704 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
7706 if (atan2_fn)
7708 tree new_arg = builtin_save_expr (arg);
7709 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
7710 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
7711 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
7715 return NULL_TREE;
7718 /* Fold a call to builtin frexp, we can assume the base is 2. */
7720 static tree
7721 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
7723 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7724 return NULL_TREE;
7726 STRIP_NOPS (arg0);
7728 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
7729 return NULL_TREE;
7731 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7733 /* Proceed if a valid pointer type was passed in. */
7734 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
7736 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
7737 tree frac, exp;
7739 switch (value->cl)
7741 case rvc_zero:
7742 /* For +-0, return (*exp = 0, +-0). */
7743 exp = integer_zero_node;
7744 frac = arg0;
7745 break;
7746 case rvc_nan:
7747 case rvc_inf:
7748 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
7749 return omit_one_operand_loc (loc, rettype, arg0, arg1);
7750 case rvc_normal:
7752 /* Since the frexp function always expects base 2, and in
7753 GCC normalized significands are already in the range
7754 [0.5, 1.0), we have exactly what frexp wants. */
7755 REAL_VALUE_TYPE frac_rvt = *value;
7756 SET_REAL_EXP (&frac_rvt, 0);
7757 frac = build_real (rettype, frac_rvt);
7758 exp = build_int_cst (integer_type_node, REAL_EXP (value));
7760 break;
7761 default:
7762 gcc_unreachable ();
7765 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7766 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
7767 TREE_SIDE_EFFECTS (arg1) = 1;
7768 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
7771 return NULL_TREE;
7774 /* Fold a call to builtin modf. */
7776 static tree
7777 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
7779 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7780 return NULL_TREE;
7782 STRIP_NOPS (arg0);
7784 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
7785 return NULL_TREE;
7787 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7789 /* Proceed if a valid pointer type was passed in. */
7790 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
7792 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
7793 REAL_VALUE_TYPE trunc, frac;
7795 switch (value->cl)
7797 case rvc_nan:
7798 case rvc_zero:
7799 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
7800 trunc = frac = *value;
7801 break;
7802 case rvc_inf:
7803 /* For +-Inf, return (*arg1 = arg0, +-0). */
7804 frac = dconst0;
7805 frac.sign = value->sign;
7806 trunc = *value;
7807 break;
7808 case rvc_normal:
7809 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
7810 real_trunc (&trunc, VOIDmode, value);
7811 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
7812 /* If the original number was negative and already
7813 integral, then the fractional part is -0.0. */
7814 if (value->sign && frac.cl == rvc_zero)
7815 frac.sign = value->sign;
7816 break;
7819 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7820 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
7821 build_real (rettype, trunc));
7822 TREE_SIDE_EFFECTS (arg1) = 1;
7823 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
7824 build_real (rettype, frac));
7827 return NULL_TREE;
7830 /* Given a location LOC, an interclass builtin function decl FNDECL
7831 and its single argument ARG, return an folded expression computing
7832 the same, or NULL_TREE if we either couldn't or didn't want to fold
7833 (the latter happen if there's an RTL instruction available). */
7835 static tree
7836 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
7838 machine_mode mode;
7840 if (!validate_arg (arg, REAL_TYPE))
7841 return NULL_TREE;
7843 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
7844 return NULL_TREE;
7846 mode = TYPE_MODE (TREE_TYPE (arg));
7848 /* If there is no optab, try generic code. */
7849 switch (DECL_FUNCTION_CODE (fndecl))
7851 tree result;
7853 CASE_FLT_FN (BUILT_IN_ISINF):
7855 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
7856 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
7857 tree const type = TREE_TYPE (arg);
7858 REAL_VALUE_TYPE r;
7859 char buf[128];
7861 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7862 real_from_string (&r, buf);
7863 result = build_call_expr (isgr_fn, 2,
7864 fold_build1_loc (loc, ABS_EXPR, type, arg),
7865 build_real (type, r));
7866 return result;
7868 CASE_FLT_FN (BUILT_IN_FINITE):
7869 case BUILT_IN_ISFINITE:
7871 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
7872 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7873 tree const type = TREE_TYPE (arg);
7874 REAL_VALUE_TYPE r;
7875 char buf[128];
7877 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7878 real_from_string (&r, buf);
7879 result = build_call_expr (isle_fn, 2,
7880 fold_build1_loc (loc, ABS_EXPR, type, arg),
7881 build_real (type, r));
7882 /*result = fold_build2_loc (loc, UNGT_EXPR,
7883 TREE_TYPE (TREE_TYPE (fndecl)),
7884 fold_build1_loc (loc, ABS_EXPR, type, arg),
7885 build_real (type, r));
7886 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
7887 TREE_TYPE (TREE_TYPE (fndecl)),
7888 result);*/
7889 return result;
7891 case BUILT_IN_ISNORMAL:
7893 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
7894 islessequal(fabs(x),DBL_MAX). */
7895 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7896 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
7897 tree const type = TREE_TYPE (arg);
7898 REAL_VALUE_TYPE rmax, rmin;
7899 char buf[128];
7901 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7902 real_from_string (&rmax, buf);
7903 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
7904 real_from_string (&rmin, buf);
7905 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
7906 result = build_call_expr (isle_fn, 2, arg,
7907 build_real (type, rmax));
7908 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
7909 build_call_expr (isge_fn, 2, arg,
7910 build_real (type, rmin)));
7911 return result;
7913 default:
7914 break;
7917 return NULL_TREE;
7920 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
7921 ARG is the argument for the call. */
7923 static tree
7924 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
7926 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7928 if (!validate_arg (arg, REAL_TYPE))
7929 return NULL_TREE;
7931 switch (builtin_index)
7933 case BUILT_IN_ISINF:
7934 if (!HONOR_INFINITIES (arg))
7935 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
7937 return NULL_TREE;
7939 case BUILT_IN_ISINF_SIGN:
7941 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
7942 /* In a boolean context, GCC will fold the inner COND_EXPR to
7943 1. So e.g. "if (isinf_sign(x))" would be folded to just
7944 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
7945 tree signbit_fn = mathfn_built_in_1
7946 (TREE_TYPE (arg), CFN_BUILT_IN_SIGNBIT, 0);
7947 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
7948 tree tmp = NULL_TREE;
7950 arg = builtin_save_expr (arg);
7952 if (signbit_fn && isinf_fn)
7954 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
7955 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
7957 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
7958 signbit_call, integer_zero_node);
7959 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
7960 isinf_call, integer_zero_node);
7962 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
7963 integer_minus_one_node, integer_one_node);
7964 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
7965 isinf_call, tmp,
7966 integer_zero_node);
7969 return tmp;
7972 case BUILT_IN_ISFINITE:
7973 if (!HONOR_NANS (arg)
7974 && !HONOR_INFINITIES (arg))
7975 return omit_one_operand_loc (loc, type, integer_one_node, arg);
7977 return NULL_TREE;
7979 case BUILT_IN_ISNAN:
7980 if (!HONOR_NANS (arg))
7981 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
7983 arg = builtin_save_expr (arg);
7984 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
7986 default:
7987 gcc_unreachable ();
7991 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
7992 This builtin will generate code to return the appropriate floating
7993 point classification depending on the value of the floating point
7994 number passed in. The possible return values must be supplied as
7995 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
7996 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
7997 one floating point argument which is "type generic". */
7999 static tree
8000 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8002 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8003 arg, type, res, tmp;
8004 machine_mode mode;
8005 REAL_VALUE_TYPE r;
8006 char buf[128];
8008 /* Verify the required arguments in the original call. */
8009 if (nargs != 6
8010 || !validate_arg (args[0], INTEGER_TYPE)
8011 || !validate_arg (args[1], INTEGER_TYPE)
8012 || !validate_arg (args[2], INTEGER_TYPE)
8013 || !validate_arg (args[3], INTEGER_TYPE)
8014 || !validate_arg (args[4], INTEGER_TYPE)
8015 || !validate_arg (args[5], REAL_TYPE))
8016 return NULL_TREE;
8018 fp_nan = args[0];
8019 fp_infinite = args[1];
8020 fp_normal = args[2];
8021 fp_subnormal = args[3];
8022 fp_zero = args[4];
8023 arg = args[5];
8024 type = TREE_TYPE (arg);
8025 mode = TYPE_MODE (type);
8026 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8028 /* fpclassify(x) ->
8029 isnan(x) ? FP_NAN :
8030 (fabs(x) == Inf ? FP_INFINITE :
8031 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8032 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8034 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8035 build_real (type, dconst0));
8036 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8037 tmp, fp_zero, fp_subnormal);
8039 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8040 real_from_string (&r, buf);
8041 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8042 arg, build_real (type, r));
8043 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8045 if (HONOR_INFINITIES (mode))
8047 real_inf (&r);
8048 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8049 build_real (type, r));
8050 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8051 fp_infinite, res);
8054 if (HONOR_NANS (mode))
8056 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8057 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8060 return res;
8063 /* Fold a call to an unordered comparison function such as
8064 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8065 being called and ARG0 and ARG1 are the arguments for the call.
8066 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8067 the opposite of the desired result. UNORDERED_CODE is used
8068 for modes that can hold NaNs and ORDERED_CODE is used for
8069 the rest. */
8071 static tree
8072 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8073 enum tree_code unordered_code,
8074 enum tree_code ordered_code)
8076 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8077 enum tree_code code;
8078 tree type0, type1;
8079 enum tree_code code0, code1;
8080 tree cmp_type = NULL_TREE;
8082 type0 = TREE_TYPE (arg0);
8083 type1 = TREE_TYPE (arg1);
8085 code0 = TREE_CODE (type0);
8086 code1 = TREE_CODE (type1);
8088 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8089 /* Choose the wider of two real types. */
8090 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8091 ? type0 : type1;
8092 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8093 cmp_type = type0;
8094 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8095 cmp_type = type1;
8097 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8098 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8100 if (unordered_code == UNORDERED_EXPR)
8102 if (!HONOR_NANS (arg0))
8103 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8104 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8107 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8108 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8109 fold_build2_loc (loc, code, type, arg0, arg1));
8112 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8113 arithmetics if it can never overflow, or into internal functions that
8114 return both result of arithmetics and overflowed boolean flag in
8115 a complex integer result, or some other check for overflow. */
8117 static tree
8118 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8119 tree arg0, tree arg1, tree arg2)
8121 enum internal_fn ifn = IFN_LAST;
8122 tree type = TREE_TYPE (TREE_TYPE (arg2));
8123 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8124 switch (fcode)
8126 case BUILT_IN_ADD_OVERFLOW:
8127 case BUILT_IN_SADD_OVERFLOW:
8128 case BUILT_IN_SADDL_OVERFLOW:
8129 case BUILT_IN_SADDLL_OVERFLOW:
8130 case BUILT_IN_UADD_OVERFLOW:
8131 case BUILT_IN_UADDL_OVERFLOW:
8132 case BUILT_IN_UADDLL_OVERFLOW:
8133 ifn = IFN_ADD_OVERFLOW;
8134 break;
8135 case BUILT_IN_SUB_OVERFLOW:
8136 case BUILT_IN_SSUB_OVERFLOW:
8137 case BUILT_IN_SSUBL_OVERFLOW:
8138 case BUILT_IN_SSUBLL_OVERFLOW:
8139 case BUILT_IN_USUB_OVERFLOW:
8140 case BUILT_IN_USUBL_OVERFLOW:
8141 case BUILT_IN_USUBLL_OVERFLOW:
8142 ifn = IFN_SUB_OVERFLOW;
8143 break;
8144 case BUILT_IN_MUL_OVERFLOW:
8145 case BUILT_IN_SMUL_OVERFLOW:
8146 case BUILT_IN_SMULL_OVERFLOW:
8147 case BUILT_IN_SMULLL_OVERFLOW:
8148 case BUILT_IN_UMUL_OVERFLOW:
8149 case BUILT_IN_UMULL_OVERFLOW:
8150 case BUILT_IN_UMULLL_OVERFLOW:
8151 ifn = IFN_MUL_OVERFLOW;
8152 break;
8153 default:
8154 gcc_unreachable ();
8156 tree ctype = build_complex_type (type);
8157 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8158 2, arg0, arg1);
8159 tree tgt = save_expr (call);
8160 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8161 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8162 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8163 tree store
8164 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8165 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8168 /* Fold a call to built-in function FNDECL with 0 arguments.
8169 This function returns NULL_TREE if no simplification was possible. */
8171 static tree
8172 fold_builtin_0 (location_t loc, tree fndecl)
8174 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8175 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8176 switch (fcode)
8178 CASE_FLT_FN (BUILT_IN_INF):
8179 case BUILT_IN_INFD32:
8180 case BUILT_IN_INFD64:
8181 case BUILT_IN_INFD128:
8182 return fold_builtin_inf (loc, type, true);
8184 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8185 return fold_builtin_inf (loc, type, false);
8187 case BUILT_IN_CLASSIFY_TYPE:
8188 return fold_builtin_classify_type (NULL_TREE);
8190 default:
8191 break;
8193 return NULL_TREE;
8196 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8197 This function returns NULL_TREE if no simplification was possible. */
8199 static tree
8200 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8202 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8203 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8205 if (TREE_CODE (arg0) == ERROR_MARK)
8206 return NULL_TREE;
8208 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
8209 return ret;
8211 switch (fcode)
8213 case BUILT_IN_CONSTANT_P:
8215 tree val = fold_builtin_constant_p (arg0);
8217 /* Gimplification will pull the CALL_EXPR for the builtin out of
8218 an if condition. When not optimizing, we'll not CSE it back.
8219 To avoid link error types of regressions, return false now. */
8220 if (!val && !optimize)
8221 val = integer_zero_node;
8223 return val;
8226 case BUILT_IN_CLASSIFY_TYPE:
8227 return fold_builtin_classify_type (arg0);
8229 case BUILT_IN_STRLEN:
8230 return fold_builtin_strlen (loc, type, arg0);
8232 CASE_FLT_FN (BUILT_IN_FABS):
8233 case BUILT_IN_FABSD32:
8234 case BUILT_IN_FABSD64:
8235 case BUILT_IN_FABSD128:
8236 return fold_builtin_fabs (loc, arg0, type);
8238 case BUILT_IN_ABS:
8239 case BUILT_IN_LABS:
8240 case BUILT_IN_LLABS:
8241 case BUILT_IN_IMAXABS:
8242 return fold_builtin_abs (loc, arg0, type);
8244 CASE_FLT_FN (BUILT_IN_CONJ):
8245 if (validate_arg (arg0, COMPLEX_TYPE)
8246 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8247 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8248 break;
8250 CASE_FLT_FN (BUILT_IN_CREAL):
8251 if (validate_arg (arg0, COMPLEX_TYPE)
8252 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8253 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8254 break;
8256 CASE_FLT_FN (BUILT_IN_CIMAG):
8257 if (validate_arg (arg0, COMPLEX_TYPE)
8258 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8259 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8260 break;
8262 CASE_FLT_FN (BUILT_IN_CARG):
8263 return fold_builtin_carg (loc, arg0, type);
8265 case BUILT_IN_ISASCII:
8266 return fold_builtin_isascii (loc, arg0);
8268 case BUILT_IN_TOASCII:
8269 return fold_builtin_toascii (loc, arg0);
8271 case BUILT_IN_ISDIGIT:
8272 return fold_builtin_isdigit (loc, arg0);
8274 CASE_FLT_FN (BUILT_IN_FINITE):
8275 case BUILT_IN_FINITED32:
8276 case BUILT_IN_FINITED64:
8277 case BUILT_IN_FINITED128:
8278 case BUILT_IN_ISFINITE:
8280 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
8281 if (ret)
8282 return ret;
8283 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8286 CASE_FLT_FN (BUILT_IN_ISINF):
8287 case BUILT_IN_ISINFD32:
8288 case BUILT_IN_ISINFD64:
8289 case BUILT_IN_ISINFD128:
8291 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
8292 if (ret)
8293 return ret;
8294 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8297 case BUILT_IN_ISNORMAL:
8298 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8300 case BUILT_IN_ISINF_SIGN:
8301 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
8303 CASE_FLT_FN (BUILT_IN_ISNAN):
8304 case BUILT_IN_ISNAND32:
8305 case BUILT_IN_ISNAND64:
8306 case BUILT_IN_ISNAND128:
8307 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
8309 case BUILT_IN_FREE:
8310 if (integer_zerop (arg0))
8311 return build_empty_stmt (loc);
8312 break;
8314 default:
8315 break;
8318 return NULL_TREE;
8322 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8323 This function returns NULL_TREE if no simplification was possible. */
8325 static tree
8326 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
8328 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8329 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8331 if (TREE_CODE (arg0) == ERROR_MARK
8332 || TREE_CODE (arg1) == ERROR_MARK)
8333 return NULL_TREE;
8335 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
8336 return ret;
8338 switch (fcode)
8340 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
8341 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
8342 if (validate_arg (arg0, REAL_TYPE)
8343 && validate_arg (arg1, POINTER_TYPE))
8344 return do_mpfr_lgamma_r (arg0, arg1, type);
8345 break;
8347 CASE_FLT_FN (BUILT_IN_FREXP):
8348 return fold_builtin_frexp (loc, arg0, arg1, type);
8350 CASE_FLT_FN (BUILT_IN_MODF):
8351 return fold_builtin_modf (loc, arg0, arg1, type);
8353 case BUILT_IN_STRSTR:
8354 return fold_builtin_strstr (loc, arg0, arg1, type);
8356 case BUILT_IN_STRSPN:
8357 return fold_builtin_strspn (loc, arg0, arg1);
8359 case BUILT_IN_STRCSPN:
8360 return fold_builtin_strcspn (loc, arg0, arg1);
8362 case BUILT_IN_STRCHR:
8363 case BUILT_IN_INDEX:
8364 return fold_builtin_strchr (loc, arg0, arg1, type);
8366 case BUILT_IN_STRRCHR:
8367 case BUILT_IN_RINDEX:
8368 return fold_builtin_strrchr (loc, arg0, arg1, type);
8370 case BUILT_IN_STRCMP:
8371 return fold_builtin_strcmp (loc, arg0, arg1);
8373 case BUILT_IN_STRPBRK:
8374 return fold_builtin_strpbrk (loc, arg0, arg1, type);
8376 case BUILT_IN_EXPECT:
8377 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
8379 case BUILT_IN_ISGREATER:
8380 return fold_builtin_unordered_cmp (loc, fndecl,
8381 arg0, arg1, UNLE_EXPR, LE_EXPR);
8382 case BUILT_IN_ISGREATEREQUAL:
8383 return fold_builtin_unordered_cmp (loc, fndecl,
8384 arg0, arg1, UNLT_EXPR, LT_EXPR);
8385 case BUILT_IN_ISLESS:
8386 return fold_builtin_unordered_cmp (loc, fndecl,
8387 arg0, arg1, UNGE_EXPR, GE_EXPR);
8388 case BUILT_IN_ISLESSEQUAL:
8389 return fold_builtin_unordered_cmp (loc, fndecl,
8390 arg0, arg1, UNGT_EXPR, GT_EXPR);
8391 case BUILT_IN_ISLESSGREATER:
8392 return fold_builtin_unordered_cmp (loc, fndecl,
8393 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
8394 case BUILT_IN_ISUNORDERED:
8395 return fold_builtin_unordered_cmp (loc, fndecl,
8396 arg0, arg1, UNORDERED_EXPR,
8397 NOP_EXPR);
8399 /* We do the folding for va_start in the expander. */
8400 case BUILT_IN_VA_START:
8401 break;
8403 case BUILT_IN_OBJECT_SIZE:
8404 return fold_builtin_object_size (arg0, arg1);
8406 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8407 return fold_builtin_atomic_always_lock_free (arg0, arg1);
8409 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8410 return fold_builtin_atomic_is_lock_free (arg0, arg1);
8412 default:
8413 break;
8415 return NULL_TREE;
8418 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
8419 and ARG2.
8420 This function returns NULL_TREE if no simplification was possible. */
8422 static tree
8423 fold_builtin_3 (location_t loc, tree fndecl,
8424 tree arg0, tree arg1, tree arg2)
8426 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8427 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8429 if (TREE_CODE (arg0) == ERROR_MARK
8430 || TREE_CODE (arg1) == ERROR_MARK
8431 || TREE_CODE (arg2) == ERROR_MARK)
8432 return NULL_TREE;
8434 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
8435 arg0, arg1, arg2))
8436 return ret;
8438 switch (fcode)
8441 CASE_FLT_FN (BUILT_IN_SINCOS):
8442 return fold_builtin_sincos (loc, arg0, arg1, arg2);
8444 CASE_FLT_FN (BUILT_IN_FMA):
8445 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
8447 CASE_FLT_FN (BUILT_IN_REMQUO):
8448 if (validate_arg (arg0, REAL_TYPE)
8449 && validate_arg (arg1, REAL_TYPE)
8450 && validate_arg (arg2, POINTER_TYPE))
8451 return do_mpfr_remquo (arg0, arg1, arg2);
8452 break;
8454 case BUILT_IN_STRNCMP:
8455 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
8457 case BUILT_IN_MEMCHR:
8458 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
8460 case BUILT_IN_BCMP:
8461 case BUILT_IN_MEMCMP:
8462 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
8464 case BUILT_IN_EXPECT:
8465 return fold_builtin_expect (loc, arg0, arg1, arg2);
8467 case BUILT_IN_ADD_OVERFLOW:
8468 case BUILT_IN_SUB_OVERFLOW:
8469 case BUILT_IN_MUL_OVERFLOW:
8470 case BUILT_IN_SADD_OVERFLOW:
8471 case BUILT_IN_SADDL_OVERFLOW:
8472 case BUILT_IN_SADDLL_OVERFLOW:
8473 case BUILT_IN_SSUB_OVERFLOW:
8474 case BUILT_IN_SSUBL_OVERFLOW:
8475 case BUILT_IN_SSUBLL_OVERFLOW:
8476 case BUILT_IN_SMUL_OVERFLOW:
8477 case BUILT_IN_SMULL_OVERFLOW:
8478 case BUILT_IN_SMULLL_OVERFLOW:
8479 case BUILT_IN_UADD_OVERFLOW:
8480 case BUILT_IN_UADDL_OVERFLOW:
8481 case BUILT_IN_UADDLL_OVERFLOW:
8482 case BUILT_IN_USUB_OVERFLOW:
8483 case BUILT_IN_USUBL_OVERFLOW:
8484 case BUILT_IN_USUBLL_OVERFLOW:
8485 case BUILT_IN_UMUL_OVERFLOW:
8486 case BUILT_IN_UMULL_OVERFLOW:
8487 case BUILT_IN_UMULLL_OVERFLOW:
8488 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
8490 default:
8491 break;
8493 return NULL_TREE;
8496 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
8497 arguments. IGNORE is true if the result of the
8498 function call is ignored. This function returns NULL_TREE if no
8499 simplification was possible. */
8501 tree
8502 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
8504 tree ret = NULL_TREE;
8506 switch (nargs)
8508 case 0:
8509 ret = fold_builtin_0 (loc, fndecl);
8510 break;
8511 case 1:
8512 ret = fold_builtin_1 (loc, fndecl, args[0]);
8513 break;
8514 case 2:
8515 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
8516 break;
8517 case 3:
8518 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
8519 break;
8520 default:
8521 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
8522 break;
8524 if (ret)
8526 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
8527 SET_EXPR_LOCATION (ret, loc);
8528 TREE_NO_WARNING (ret) = 1;
8529 return ret;
8531 return NULL_TREE;
8534 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
8535 list ARGS along with N new arguments in NEWARGS. SKIP is the number
8536 of arguments in ARGS to be omitted. OLDNARGS is the number of
8537 elements in ARGS. */
8539 static tree
8540 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
8541 int skip, tree fndecl, int n, va_list newargs)
8543 int nargs = oldnargs - skip + n;
8544 tree *buffer;
8546 if (n > 0)
8548 int i, j;
8550 buffer = XALLOCAVEC (tree, nargs);
8551 for (i = 0; i < n; i++)
8552 buffer[i] = va_arg (newargs, tree);
8553 for (j = skip; j < oldnargs; j++, i++)
8554 buffer[i] = args[j];
8556 else
8557 buffer = args + skip;
8559 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
8562 /* Return true if FNDECL shouldn't be folded right now.
8563 If a built-in function has an inline attribute always_inline
8564 wrapper, defer folding it after always_inline functions have
8565 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
8566 might not be performed. */
8568 bool
8569 avoid_folding_inline_builtin (tree fndecl)
8571 return (DECL_DECLARED_INLINE_P (fndecl)
8572 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
8573 && cfun
8574 && !cfun->always_inline_functions_inlined
8575 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
8578 /* A wrapper function for builtin folding that prevents warnings for
8579 "statement without effect" and the like, caused by removing the
8580 call node earlier than the warning is generated. */
8582 tree
8583 fold_call_expr (location_t loc, tree exp, bool ignore)
8585 tree ret = NULL_TREE;
8586 tree fndecl = get_callee_fndecl (exp);
8587 if (fndecl
8588 && TREE_CODE (fndecl) == FUNCTION_DECL
8589 && DECL_BUILT_IN (fndecl)
8590 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
8591 yet. Defer folding until we see all the arguments
8592 (after inlining). */
8593 && !CALL_EXPR_VA_ARG_PACK (exp))
8595 int nargs = call_expr_nargs (exp);
8597 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
8598 instead last argument is __builtin_va_arg_pack (). Defer folding
8599 even in that case, until arguments are finalized. */
8600 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
8602 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
8603 if (fndecl2
8604 && TREE_CODE (fndecl2) == FUNCTION_DECL
8605 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8606 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8607 return NULL_TREE;
8610 if (avoid_folding_inline_builtin (fndecl))
8611 return NULL_TREE;
8613 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8614 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
8615 CALL_EXPR_ARGP (exp), ignore);
8616 else
8618 tree *args = CALL_EXPR_ARGP (exp);
8619 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
8620 if (ret)
8621 return ret;
8624 return NULL_TREE;
8627 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
8628 N arguments are passed in the array ARGARRAY. Return a folded
8629 expression or NULL_TREE if no simplification was possible. */
8631 tree
8632 fold_builtin_call_array (location_t loc, tree,
8633 tree fn,
8634 int n,
8635 tree *argarray)
8637 if (TREE_CODE (fn) != ADDR_EXPR)
8638 return NULL_TREE;
8640 tree fndecl = TREE_OPERAND (fn, 0);
8641 if (TREE_CODE (fndecl) == FUNCTION_DECL
8642 && DECL_BUILT_IN (fndecl))
8644 /* If last argument is __builtin_va_arg_pack (), arguments to this
8645 function are not finalized yet. Defer folding until they are. */
8646 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
8648 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
8649 if (fndecl2
8650 && TREE_CODE (fndecl2) == FUNCTION_DECL
8651 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8652 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8653 return NULL_TREE;
8655 if (avoid_folding_inline_builtin (fndecl))
8656 return NULL_TREE;
8657 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8658 return targetm.fold_builtin (fndecl, n, argarray, false);
8659 else
8660 return fold_builtin_n (loc, fndecl, argarray, n, false);
8663 return NULL_TREE;
8666 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
8667 along with N new arguments specified as the "..." parameters. SKIP
8668 is the number of arguments in EXP to be omitted. This function is used
8669 to do varargs-to-varargs transformations. */
8671 static tree
8672 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
8674 va_list ap;
8675 tree t;
8677 va_start (ap, n);
8678 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
8679 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
8680 va_end (ap);
8682 return t;
8685 /* Validate a single argument ARG against a tree code CODE representing
8686 a type. */
8688 static bool
8689 validate_arg (const_tree arg, enum tree_code code)
8691 if (!arg)
8692 return false;
8693 else if (code == POINTER_TYPE)
8694 return POINTER_TYPE_P (TREE_TYPE (arg));
8695 else if (code == INTEGER_TYPE)
8696 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
8697 return code == TREE_CODE (TREE_TYPE (arg));
8700 /* This function validates the types of a function call argument list
8701 against a specified list of tree_codes. If the last specifier is a 0,
8702 that represents an ellipses, otherwise the last specifier must be a
8703 VOID_TYPE.
8705 This is the GIMPLE version of validate_arglist. Eventually we want to
8706 completely convert builtins.c to work from GIMPLEs and the tree based
8707 validate_arglist will then be removed. */
8709 bool
8710 validate_gimple_arglist (const gcall *call, ...)
8712 enum tree_code code;
8713 bool res = 0;
8714 va_list ap;
8715 const_tree arg;
8716 size_t i;
8718 va_start (ap, call);
8719 i = 0;
8723 code = (enum tree_code) va_arg (ap, int);
8724 switch (code)
8726 case 0:
8727 /* This signifies an ellipses, any further arguments are all ok. */
8728 res = true;
8729 goto end;
8730 case VOID_TYPE:
8731 /* This signifies an endlink, if no arguments remain, return
8732 true, otherwise return false. */
8733 res = (i == gimple_call_num_args (call));
8734 goto end;
8735 default:
8736 /* If no parameters remain or the parameter's code does not
8737 match the specified code, return false. Otherwise continue
8738 checking any remaining arguments. */
8739 arg = gimple_call_arg (call, i++);
8740 if (!validate_arg (arg, code))
8741 goto end;
8742 break;
8745 while (1);
8747 /* We need gotos here since we can only have one VA_CLOSE in a
8748 function. */
8749 end: ;
8750 va_end (ap);
8752 return res;
8755 /* Default target-specific builtin expander that does nothing. */
8758 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
8759 rtx target ATTRIBUTE_UNUSED,
8760 rtx subtarget ATTRIBUTE_UNUSED,
8761 machine_mode mode ATTRIBUTE_UNUSED,
8762 int ignore ATTRIBUTE_UNUSED)
8764 return NULL_RTX;
8767 /* Returns true is EXP represents data that would potentially reside
8768 in a readonly section. */
8770 bool
8771 readonly_data_expr (tree exp)
8773 STRIP_NOPS (exp);
8775 if (TREE_CODE (exp) != ADDR_EXPR)
8776 return false;
8778 exp = get_base_address (TREE_OPERAND (exp, 0));
8779 if (!exp)
8780 return false;
8782 /* Make sure we call decl_readonly_section only for trees it
8783 can handle (since it returns true for everything it doesn't
8784 understand). */
8785 if (TREE_CODE (exp) == STRING_CST
8786 || TREE_CODE (exp) == CONSTRUCTOR
8787 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
8788 return decl_readonly_section (exp, 0);
8789 else
8790 return false;
8793 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
8794 to the call, and TYPE is its return type.
8796 Return NULL_TREE if no simplification was possible, otherwise return the
8797 simplified form of the call as a tree.
8799 The simplified form may be a constant or other expression which
8800 computes the same value, but in a more efficient manner (including
8801 calls to other builtin functions).
8803 The call may contain arguments which need to be evaluated, but
8804 which are not useful to determine the result of the call. In
8805 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8806 COMPOUND_EXPR will be an argument which must be evaluated.
8807 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8808 COMPOUND_EXPR in the chain will contain the tree for the simplified
8809 form of the builtin function call. */
8811 static tree
8812 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
8814 if (!validate_arg (s1, POINTER_TYPE)
8815 || !validate_arg (s2, POINTER_TYPE))
8816 return NULL_TREE;
8817 else
8819 tree fn;
8820 const char *p1, *p2;
8822 p2 = c_getstr (s2);
8823 if (p2 == NULL)
8824 return NULL_TREE;
8826 p1 = c_getstr (s1);
8827 if (p1 != NULL)
8829 const char *r = strstr (p1, p2);
8830 tree tem;
8832 if (r == NULL)
8833 return build_int_cst (TREE_TYPE (s1), 0);
8835 /* Return an offset into the constant string argument. */
8836 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8837 return fold_convert_loc (loc, type, tem);
8840 /* The argument is const char *, and the result is char *, so we need
8841 a type conversion here to avoid a warning. */
8842 if (p2[0] == '\0')
8843 return fold_convert_loc (loc, type, s1);
8845 if (p2[1] != '\0')
8846 return NULL_TREE;
8848 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
8849 if (!fn)
8850 return NULL_TREE;
8852 /* New argument list transforming strstr(s1, s2) to
8853 strchr(s1, s2[0]). */
8854 return build_call_expr_loc (loc, fn, 2, s1,
8855 build_int_cst (integer_type_node, p2[0]));
8859 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
8860 the call, and TYPE is its return type.
8862 Return NULL_TREE if no simplification was possible, otherwise return the
8863 simplified form of the call as a tree.
8865 The simplified form may be a constant or other expression which
8866 computes the same value, but in a more efficient manner (including
8867 calls to other builtin functions).
8869 The call may contain arguments which need to be evaluated, but
8870 which are not useful to determine the result of the call. In
8871 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8872 COMPOUND_EXPR will be an argument which must be evaluated.
8873 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8874 COMPOUND_EXPR in the chain will contain the tree for the simplified
8875 form of the builtin function call. */
8877 static tree
8878 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
8880 if (!validate_arg (s1, POINTER_TYPE)
8881 || !validate_arg (s2, INTEGER_TYPE))
8882 return NULL_TREE;
8883 else
8885 const char *p1;
8887 if (TREE_CODE (s2) != INTEGER_CST)
8888 return NULL_TREE;
8890 p1 = c_getstr (s1);
8891 if (p1 != NULL)
8893 char c;
8894 const char *r;
8895 tree tem;
8897 if (target_char_cast (s2, &c))
8898 return NULL_TREE;
8900 r = strchr (p1, c);
8902 if (r == NULL)
8903 return build_int_cst (TREE_TYPE (s1), 0);
8905 /* Return an offset into the constant string argument. */
8906 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8907 return fold_convert_loc (loc, type, tem);
8909 return NULL_TREE;
8913 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
8914 the call, and TYPE is its return type.
8916 Return NULL_TREE if no simplification was possible, otherwise return the
8917 simplified form of the call as a tree.
8919 The simplified form may be a constant or other expression which
8920 computes the same value, but in a more efficient manner (including
8921 calls to other builtin functions).
8923 The call may contain arguments which need to be evaluated, but
8924 which are not useful to determine the result of the call. In
8925 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8926 COMPOUND_EXPR will be an argument which must be evaluated.
8927 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8928 COMPOUND_EXPR in the chain will contain the tree for the simplified
8929 form of the builtin function call. */
8931 static tree
8932 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
8934 if (!validate_arg (s1, POINTER_TYPE)
8935 || !validate_arg (s2, INTEGER_TYPE))
8936 return NULL_TREE;
8937 else
8939 tree fn;
8940 const char *p1;
8942 if (TREE_CODE (s2) != INTEGER_CST)
8943 return NULL_TREE;
8945 p1 = c_getstr (s1);
8946 if (p1 != NULL)
8948 char c;
8949 const char *r;
8950 tree tem;
8952 if (target_char_cast (s2, &c))
8953 return NULL_TREE;
8955 r = strrchr (p1, c);
8957 if (r == NULL)
8958 return build_int_cst (TREE_TYPE (s1), 0);
8960 /* Return an offset into the constant string argument. */
8961 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8962 return fold_convert_loc (loc, type, tem);
8965 if (! integer_zerop (s2))
8966 return NULL_TREE;
8968 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
8969 if (!fn)
8970 return NULL_TREE;
8972 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
8973 return build_call_expr_loc (loc, fn, 2, s1, s2);
8977 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
8978 to the call, and TYPE is its return type.
8980 Return NULL_TREE if no simplification was possible, otherwise return the
8981 simplified form of the call as a tree.
8983 The simplified form may be a constant or other expression which
8984 computes the same value, but in a more efficient manner (including
8985 calls to other builtin functions).
8987 The call may contain arguments which need to be evaluated, but
8988 which are not useful to determine the result of the call. In
8989 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8990 COMPOUND_EXPR will be an argument which must be evaluated.
8991 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8992 COMPOUND_EXPR in the chain will contain the tree for the simplified
8993 form of the builtin function call. */
8995 static tree
8996 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
8998 if (!validate_arg (s1, POINTER_TYPE)
8999 || !validate_arg (s2, POINTER_TYPE))
9000 return NULL_TREE;
9001 else
9003 tree fn;
9004 const char *p1, *p2;
9006 p2 = c_getstr (s2);
9007 if (p2 == NULL)
9008 return NULL_TREE;
9010 p1 = c_getstr (s1);
9011 if (p1 != NULL)
9013 const char *r = strpbrk (p1, p2);
9014 tree tem;
9016 if (r == NULL)
9017 return build_int_cst (TREE_TYPE (s1), 0);
9019 /* Return an offset into the constant string argument. */
9020 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9021 return fold_convert_loc (loc, type, tem);
9024 if (p2[0] == '\0')
9025 /* strpbrk(x, "") == NULL.
9026 Evaluate and ignore s1 in case it had side-effects. */
9027 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
9029 if (p2[1] != '\0')
9030 return NULL_TREE; /* Really call strpbrk. */
9032 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9033 if (!fn)
9034 return NULL_TREE;
9036 /* New argument list transforming strpbrk(s1, s2) to
9037 strchr(s1, s2[0]). */
9038 return build_call_expr_loc (loc, fn, 2, s1,
9039 build_int_cst (integer_type_node, p2[0]));
9043 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9044 to the call.
9046 Return NULL_TREE if no simplification was possible, otherwise return the
9047 simplified form of the call as a tree.
9049 The simplified form may be a constant or other expression which
9050 computes the same value, but in a more efficient manner (including
9051 calls to other builtin functions).
9053 The call may contain arguments which need to be evaluated, but
9054 which are not useful to determine the result of the call. In
9055 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9056 COMPOUND_EXPR will be an argument which must be evaluated.
9057 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9058 COMPOUND_EXPR in the chain will contain the tree for the simplified
9059 form of the builtin function call. */
9061 static tree
9062 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9064 if (!validate_arg (s1, POINTER_TYPE)
9065 || !validate_arg (s2, POINTER_TYPE))
9066 return NULL_TREE;
9067 else
9069 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9071 /* If either argument is "", return NULL_TREE. */
9072 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9073 /* Evaluate and ignore both arguments in case either one has
9074 side-effects. */
9075 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9076 s1, s2);
9077 return NULL_TREE;
9081 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9082 to the call.
9084 Return NULL_TREE if no simplification was possible, otherwise return the
9085 simplified form of the call as a tree.
9087 The simplified form may be a constant or other expression which
9088 computes the same value, but in a more efficient manner (including
9089 calls to other builtin functions).
9091 The call may contain arguments which need to be evaluated, but
9092 which are not useful to determine the result of the call. In
9093 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9094 COMPOUND_EXPR will be an argument which must be evaluated.
9095 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9096 COMPOUND_EXPR in the chain will contain the tree for the simplified
9097 form of the builtin function call. */
9099 static tree
9100 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9102 if (!validate_arg (s1, POINTER_TYPE)
9103 || !validate_arg (s2, POINTER_TYPE))
9104 return NULL_TREE;
9105 else
9107 /* If the first argument is "", return NULL_TREE. */
9108 const char *p1 = c_getstr (s1);
9109 if (p1 && *p1 == '\0')
9111 /* Evaluate and ignore argument s2 in case it has
9112 side-effects. */
9113 return omit_one_operand_loc (loc, size_type_node,
9114 size_zero_node, s2);
9117 /* If the second argument is "", return __builtin_strlen(s1). */
9118 const char *p2 = c_getstr (s2);
9119 if (p2 && *p2 == '\0')
9121 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9123 /* If the replacement _DECL isn't initialized, don't do the
9124 transformation. */
9125 if (!fn)
9126 return NULL_TREE;
9128 return build_call_expr_loc (loc, fn, 1, s1);
9130 return NULL_TREE;
9134 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9135 produced. False otherwise. This is done so that we don't output the error
9136 or warning twice or three times. */
9138 bool
9139 fold_builtin_next_arg (tree exp, bool va_start_p)
9141 tree fntype = TREE_TYPE (current_function_decl);
9142 int nargs = call_expr_nargs (exp);
9143 tree arg;
9144 /* There is good chance the current input_location points inside the
9145 definition of the va_start macro (perhaps on the token for
9146 builtin) in a system header, so warnings will not be emitted.
9147 Use the location in real source code. */
9148 source_location current_location =
9149 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9150 NULL);
9152 if (!stdarg_p (fntype))
9154 error ("%<va_start%> used in function with fixed args");
9155 return true;
9158 if (va_start_p)
9160 if (va_start_p && (nargs != 2))
9162 error ("wrong number of arguments to function %<va_start%>");
9163 return true;
9165 arg = CALL_EXPR_ARG (exp, 1);
9167 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9168 when we checked the arguments and if needed issued a warning. */
9169 else
9171 if (nargs == 0)
9173 /* Evidently an out of date version of <stdarg.h>; can't validate
9174 va_start's second argument, but can still work as intended. */
9175 warning_at (current_location,
9176 OPT_Wvarargs,
9177 "%<__builtin_next_arg%> called without an argument");
9178 return true;
9180 else if (nargs > 1)
9182 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9183 return true;
9185 arg = CALL_EXPR_ARG (exp, 0);
9188 if (TREE_CODE (arg) == SSA_NAME)
9189 arg = SSA_NAME_VAR (arg);
9191 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9192 or __builtin_next_arg (0) the first time we see it, after checking
9193 the arguments and if needed issuing a warning. */
9194 if (!integer_zerop (arg))
9196 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9198 /* Strip off all nops for the sake of the comparison. This
9199 is not quite the same as STRIP_NOPS. It does more.
9200 We must also strip off INDIRECT_EXPR for C++ reference
9201 parameters. */
9202 while (CONVERT_EXPR_P (arg)
9203 || TREE_CODE (arg) == INDIRECT_REF)
9204 arg = TREE_OPERAND (arg, 0);
9205 if (arg != last_parm)
9207 /* FIXME: Sometimes with the tree optimizers we can get the
9208 not the last argument even though the user used the last
9209 argument. We just warn and set the arg to be the last
9210 argument so that we will get wrong-code because of
9211 it. */
9212 warning_at (current_location,
9213 OPT_Wvarargs,
9214 "second parameter of %<va_start%> not last named argument");
9217 /* Undefined by C99 7.15.1.4p4 (va_start):
9218 "If the parameter parmN is declared with the register storage
9219 class, with a function or array type, or with a type that is
9220 not compatible with the type that results after application of
9221 the default argument promotions, the behavior is undefined."
9223 else if (DECL_REGISTER (arg))
9225 warning_at (current_location,
9226 OPT_Wvarargs,
9227 "undefined behaviour when second parameter of "
9228 "%<va_start%> is declared with %<register%> storage");
9231 /* We want to verify the second parameter just once before the tree
9232 optimizers are run and then avoid keeping it in the tree,
9233 as otherwise we could warn even for correct code like:
9234 void foo (int i, ...)
9235 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9236 if (va_start_p)
9237 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9238 else
9239 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9241 return false;
9245 /* Expand a call EXP to __builtin_object_size. */
9247 static rtx
9248 expand_builtin_object_size (tree exp)
9250 tree ost;
9251 int object_size_type;
9252 tree fndecl = get_callee_fndecl (exp);
9254 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9256 error ("%Kfirst argument of %D must be a pointer, second integer constant",
9257 exp, fndecl);
9258 expand_builtin_trap ();
9259 return const0_rtx;
9262 ost = CALL_EXPR_ARG (exp, 1);
9263 STRIP_NOPS (ost);
9265 if (TREE_CODE (ost) != INTEGER_CST
9266 || tree_int_cst_sgn (ost) < 0
9267 || compare_tree_int (ost, 3) > 0)
9269 error ("%Klast argument of %D is not integer constant between 0 and 3",
9270 exp, fndecl);
9271 expand_builtin_trap ();
9272 return const0_rtx;
9275 object_size_type = tree_to_shwi (ost);
9277 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9280 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9281 FCODE is the BUILT_IN_* to use.
9282 Return NULL_RTX if we failed; the caller should emit a normal call,
9283 otherwise try to get the result in TARGET, if convenient (and in
9284 mode MODE if that's convenient). */
9286 static rtx
9287 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9288 enum built_in_function fcode)
9290 tree dest, src, len, size;
9292 if (!validate_arglist (exp,
9293 POINTER_TYPE,
9294 fcode == BUILT_IN_MEMSET_CHK
9295 ? INTEGER_TYPE : POINTER_TYPE,
9296 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9297 return NULL_RTX;
9299 dest = CALL_EXPR_ARG (exp, 0);
9300 src = CALL_EXPR_ARG (exp, 1);
9301 len = CALL_EXPR_ARG (exp, 2);
9302 size = CALL_EXPR_ARG (exp, 3);
9304 if (! tree_fits_uhwi_p (size))
9305 return NULL_RTX;
9307 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
9309 tree fn;
9311 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
9313 warning_at (tree_nonartificial_location (exp),
9314 0, "%Kcall to %D will always overflow destination buffer",
9315 exp, get_callee_fndecl (exp));
9316 return NULL_RTX;
9319 fn = NULL_TREE;
9320 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9321 mem{cpy,pcpy,move,set} is available. */
9322 switch (fcode)
9324 case BUILT_IN_MEMCPY_CHK:
9325 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
9326 break;
9327 case BUILT_IN_MEMPCPY_CHK:
9328 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
9329 break;
9330 case BUILT_IN_MEMMOVE_CHK:
9331 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
9332 break;
9333 case BUILT_IN_MEMSET_CHK:
9334 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
9335 break;
9336 default:
9337 break;
9340 if (! fn)
9341 return NULL_RTX;
9343 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
9344 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9345 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9346 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9348 else if (fcode == BUILT_IN_MEMSET_CHK)
9349 return NULL_RTX;
9350 else
9352 unsigned int dest_align = get_pointer_alignment (dest);
9354 /* If DEST is not a pointer type, call the normal function. */
9355 if (dest_align == 0)
9356 return NULL_RTX;
9358 /* If SRC and DEST are the same (and not volatile), do nothing. */
9359 if (operand_equal_p (src, dest, 0))
9361 tree expr;
9363 if (fcode != BUILT_IN_MEMPCPY_CHK)
9365 /* Evaluate and ignore LEN in case it has side-effects. */
9366 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9367 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9370 expr = fold_build_pointer_plus (dest, len);
9371 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9374 /* __memmove_chk special case. */
9375 if (fcode == BUILT_IN_MEMMOVE_CHK)
9377 unsigned int src_align = get_pointer_alignment (src);
9379 if (src_align == 0)
9380 return NULL_RTX;
9382 /* If src is categorized for a readonly section we can use
9383 normal __memcpy_chk. */
9384 if (readonly_data_expr (src))
9386 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
9387 if (!fn)
9388 return NULL_RTX;
9389 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9390 dest, src, len, size);
9391 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9392 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9393 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9396 return NULL_RTX;
9400 /* Emit warning if a buffer overflow is detected at compile time. */
9402 static void
9403 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9405 int is_strlen = 0;
9406 tree len, size;
9407 location_t loc = tree_nonartificial_location (exp);
9409 switch (fcode)
9411 case BUILT_IN_STRCPY_CHK:
9412 case BUILT_IN_STPCPY_CHK:
9413 /* For __strcat_chk the warning will be emitted only if overflowing
9414 by at least strlen (dest) + 1 bytes. */
9415 case BUILT_IN_STRCAT_CHK:
9416 len = CALL_EXPR_ARG (exp, 1);
9417 size = CALL_EXPR_ARG (exp, 2);
9418 is_strlen = 1;
9419 break;
9420 case BUILT_IN_STRNCAT_CHK:
9421 case BUILT_IN_STRNCPY_CHK:
9422 case BUILT_IN_STPNCPY_CHK:
9423 len = CALL_EXPR_ARG (exp, 2);
9424 size = CALL_EXPR_ARG (exp, 3);
9425 break;
9426 case BUILT_IN_SNPRINTF_CHK:
9427 case BUILT_IN_VSNPRINTF_CHK:
9428 len = CALL_EXPR_ARG (exp, 1);
9429 size = CALL_EXPR_ARG (exp, 3);
9430 break;
9431 default:
9432 gcc_unreachable ();
9435 if (!len || !size)
9436 return;
9438 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9439 return;
9441 if (is_strlen)
9443 len = c_strlen (len, 1);
9444 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
9445 return;
9447 else if (fcode == BUILT_IN_STRNCAT_CHK)
9449 tree src = CALL_EXPR_ARG (exp, 1);
9450 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
9451 return;
9452 src = c_strlen (src, 1);
9453 if (! src || ! tree_fits_uhwi_p (src))
9455 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
9456 exp, get_callee_fndecl (exp));
9457 return;
9459 else if (tree_int_cst_lt (src, size))
9460 return;
9462 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
9463 return;
9465 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
9466 exp, get_callee_fndecl (exp));
9469 /* Emit warning if a buffer overflow is detected at compile time
9470 in __sprintf_chk/__vsprintf_chk calls. */
9472 static void
9473 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
9475 tree size, len, fmt;
9476 const char *fmt_str;
9477 int nargs = call_expr_nargs (exp);
9479 /* Verify the required arguments in the original call. */
9481 if (nargs < 4)
9482 return;
9483 size = CALL_EXPR_ARG (exp, 2);
9484 fmt = CALL_EXPR_ARG (exp, 3);
9486 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9487 return;
9489 /* Check whether the format is a literal string constant. */
9490 fmt_str = c_getstr (fmt);
9491 if (fmt_str == NULL)
9492 return;
9494 if (!init_target_chars ())
9495 return;
9497 /* If the format doesn't contain % args or %%, we know its size. */
9498 if (strchr (fmt_str, target_percent) == 0)
9499 len = build_int_cstu (size_type_node, strlen (fmt_str));
9500 /* If the format is "%s" and first ... argument is a string literal,
9501 we know it too. */
9502 else if (fcode == BUILT_IN_SPRINTF_CHK
9503 && strcmp (fmt_str, target_percent_s) == 0)
9505 tree arg;
9507 if (nargs < 5)
9508 return;
9509 arg = CALL_EXPR_ARG (exp, 4);
9510 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
9511 return;
9513 len = c_strlen (arg, 1);
9514 if (!len || ! tree_fits_uhwi_p (len))
9515 return;
9517 else
9518 return;
9520 if (! tree_int_cst_lt (len, size))
9521 warning_at (tree_nonartificial_location (exp),
9522 0, "%Kcall to %D will always overflow destination buffer",
9523 exp, get_callee_fndecl (exp));
9526 /* Emit warning if a free is called with address of a variable. */
9528 static void
9529 maybe_emit_free_warning (tree exp)
9531 tree arg = CALL_EXPR_ARG (exp, 0);
9533 STRIP_NOPS (arg);
9534 if (TREE_CODE (arg) != ADDR_EXPR)
9535 return;
9537 arg = get_base_address (TREE_OPERAND (arg, 0));
9538 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
9539 return;
9541 if (SSA_VAR_P (arg))
9542 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9543 "%Kattempt to free a non-heap object %qD", exp, arg);
9544 else
9545 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9546 "%Kattempt to free a non-heap object", exp);
9549 /* Fold a call to __builtin_object_size with arguments PTR and OST,
9550 if possible. */
9552 static tree
9553 fold_builtin_object_size (tree ptr, tree ost)
9555 unsigned HOST_WIDE_INT bytes;
9556 int object_size_type;
9558 if (!validate_arg (ptr, POINTER_TYPE)
9559 || !validate_arg (ost, INTEGER_TYPE))
9560 return NULL_TREE;
9562 STRIP_NOPS (ost);
9564 if (TREE_CODE (ost) != INTEGER_CST
9565 || tree_int_cst_sgn (ost) < 0
9566 || compare_tree_int (ost, 3) > 0)
9567 return NULL_TREE;
9569 object_size_type = tree_to_shwi (ost);
9571 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
9572 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
9573 and (size_t) 0 for types 2 and 3. */
9574 if (TREE_SIDE_EFFECTS (ptr))
9575 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
9577 if (TREE_CODE (ptr) == ADDR_EXPR)
9579 bytes = compute_builtin_object_size (ptr, object_size_type);
9580 if (wi::fits_to_tree_p (bytes, size_type_node))
9581 return build_int_cstu (size_type_node, bytes);
9583 else if (TREE_CODE (ptr) == SSA_NAME)
9585 /* If object size is not known yet, delay folding until
9586 later. Maybe subsequent passes will help determining
9587 it. */
9588 bytes = compute_builtin_object_size (ptr, object_size_type);
9589 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
9590 && wi::fits_to_tree_p (bytes, size_type_node))
9591 return build_int_cstu (size_type_node, bytes);
9594 return NULL_TREE;
9597 /* Builtins with folding operations that operate on "..." arguments
9598 need special handling; we need to store the arguments in a convenient
9599 data structure before attempting any folding. Fortunately there are
9600 only a few builtins that fall into this category. FNDECL is the
9601 function, EXP is the CALL_EXPR for the call. */
9603 static tree
9604 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
9606 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9607 tree ret = NULL_TREE;
9609 switch (fcode)
9611 case BUILT_IN_FPCLASSIFY:
9612 ret = fold_builtin_fpclassify (loc, args, nargs);
9613 break;
9615 default:
9616 break;
9618 if (ret)
9620 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9621 SET_EXPR_LOCATION (ret, loc);
9622 TREE_NO_WARNING (ret) = 1;
9623 return ret;
9625 return NULL_TREE;
9628 /* Initialize format string characters in the target charset. */
9630 bool
9631 init_target_chars (void)
9633 static bool init;
9634 if (!init)
9636 target_newline = lang_hooks.to_target_charset ('\n');
9637 target_percent = lang_hooks.to_target_charset ('%');
9638 target_c = lang_hooks.to_target_charset ('c');
9639 target_s = lang_hooks.to_target_charset ('s');
9640 if (target_newline == 0 || target_percent == 0 || target_c == 0
9641 || target_s == 0)
9642 return false;
9644 target_percent_c[0] = target_percent;
9645 target_percent_c[1] = target_c;
9646 target_percent_c[2] = '\0';
9648 target_percent_s[0] = target_percent;
9649 target_percent_s[1] = target_s;
9650 target_percent_s[2] = '\0';
9652 target_percent_s_newline[0] = target_percent;
9653 target_percent_s_newline[1] = target_s;
9654 target_percent_s_newline[2] = target_newline;
9655 target_percent_s_newline[3] = '\0';
9657 init = true;
9659 return true;
9662 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
9663 and no overflow/underflow occurred. INEXACT is true if M was not
9664 exactly calculated. TYPE is the tree type for the result. This
9665 function assumes that you cleared the MPFR flags and then
9666 calculated M to see if anything subsequently set a flag prior to
9667 entering this function. Return NULL_TREE if any checks fail. */
9669 static tree
9670 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
9672 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9673 overflow/underflow occurred. If -frounding-math, proceed iff the
9674 result of calling FUNC was exact. */
9675 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
9676 && (!flag_rounding_math || !inexact))
9678 REAL_VALUE_TYPE rr;
9680 real_from_mpfr (&rr, m, type, GMP_RNDN);
9681 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
9682 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9683 but the mpft_t is not, then we underflowed in the
9684 conversion. */
9685 if (real_isfinite (&rr)
9686 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
9688 REAL_VALUE_TYPE rmode;
9690 real_convert (&rmode, TYPE_MODE (type), &rr);
9691 /* Proceed iff the specified mode can hold the value. */
9692 if (real_identical (&rmode, &rr))
9693 return build_real (type, rmode);
9696 return NULL_TREE;
9699 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
9700 number and no overflow/underflow occurred. INEXACT is true if M
9701 was not exactly calculated. TYPE is the tree type for the result.
9702 This function assumes that you cleared the MPFR flags and then
9703 calculated M to see if anything subsequently set a flag prior to
9704 entering this function. Return NULL_TREE if any checks fail, if
9705 FORCE_CONVERT is true, then bypass the checks. */
9707 static tree
9708 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
9710 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9711 overflow/underflow occurred. If -frounding-math, proceed iff the
9712 result of calling FUNC was exact. */
9713 if (force_convert
9714 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
9715 && !mpfr_overflow_p () && !mpfr_underflow_p ()
9716 && (!flag_rounding_math || !inexact)))
9718 REAL_VALUE_TYPE re, im;
9720 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
9721 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
9722 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
9723 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9724 but the mpft_t is not, then we underflowed in the
9725 conversion. */
9726 if (force_convert
9727 || (real_isfinite (&re) && real_isfinite (&im)
9728 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
9729 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
9731 REAL_VALUE_TYPE re_mode, im_mode;
9733 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
9734 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
9735 /* Proceed iff the specified mode can hold the value. */
9736 if (force_convert
9737 || (real_identical (&re_mode, &re)
9738 && real_identical (&im_mode, &im)))
9739 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
9740 build_real (TREE_TYPE (type), im_mode));
9743 return NULL_TREE;
9746 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
9747 the pointer *(ARG_QUO) and return the result. The type is taken
9748 from the type of ARG0 and is used for setting the precision of the
9749 calculation and results. */
9751 static tree
9752 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
9754 tree const type = TREE_TYPE (arg0);
9755 tree result = NULL_TREE;
9757 STRIP_NOPS (arg0);
9758 STRIP_NOPS (arg1);
9760 /* To proceed, MPFR must exactly represent the target floating point
9761 format, which only happens when the target base equals two. */
9762 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9763 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9764 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
9766 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
9767 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
9769 if (real_isfinite (ra0) && real_isfinite (ra1))
9771 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9772 const int prec = fmt->p;
9773 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9774 tree result_rem;
9775 long integer_quo;
9776 mpfr_t m0, m1;
9778 mpfr_inits2 (prec, m0, m1, NULL);
9779 mpfr_from_real (m0, ra0, GMP_RNDN);
9780 mpfr_from_real (m1, ra1, GMP_RNDN);
9781 mpfr_clear_flags ();
9782 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
9783 /* Remquo is independent of the rounding mode, so pass
9784 inexact=0 to do_mpfr_ckconv(). */
9785 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
9786 mpfr_clears (m0, m1, NULL);
9787 if (result_rem)
9789 /* MPFR calculates quo in the host's long so it may
9790 return more bits in quo than the target int can hold
9791 if sizeof(host long) > sizeof(target int). This can
9792 happen even for native compilers in LP64 mode. In
9793 these cases, modulo the quo value with the largest
9794 number that the target int can hold while leaving one
9795 bit for the sign. */
9796 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
9797 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
9799 /* Dereference the quo pointer argument. */
9800 arg_quo = build_fold_indirect_ref (arg_quo);
9801 /* Proceed iff a valid pointer type was passed in. */
9802 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
9804 /* Set the value. */
9805 tree result_quo
9806 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
9807 build_int_cst (TREE_TYPE (arg_quo),
9808 integer_quo));
9809 TREE_SIDE_EFFECTS (result_quo) = 1;
9810 /* Combine the quo assignment with the rem. */
9811 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9812 result_quo, result_rem));
9817 return result;
9820 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
9821 resulting value as a tree with type TYPE. The mpfr precision is
9822 set to the precision of TYPE. We assume that this mpfr function
9823 returns zero if the result could be calculated exactly within the
9824 requested precision. In addition, the integer pointer represented
9825 by ARG_SG will be dereferenced and set to the appropriate signgam
9826 (-1,1) value. */
9828 static tree
9829 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
9831 tree result = NULL_TREE;
9833 STRIP_NOPS (arg);
9835 /* To proceed, MPFR must exactly represent the target floating point
9836 format, which only happens when the target base equals two. Also
9837 verify ARG is a constant and that ARG_SG is an int pointer. */
9838 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9839 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
9840 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
9841 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
9843 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
9845 /* In addition to NaN and Inf, the argument cannot be zero or a
9846 negative integer. */
9847 if (real_isfinite (ra)
9848 && ra->cl != rvc_zero
9849 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
9851 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9852 const int prec = fmt->p;
9853 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9854 int inexact, sg;
9855 mpfr_t m;
9856 tree result_lg;
9858 mpfr_init2 (m, prec);
9859 mpfr_from_real (m, ra, GMP_RNDN);
9860 mpfr_clear_flags ();
9861 inexact = mpfr_lgamma (m, &sg, m, rnd);
9862 result_lg = do_mpfr_ckconv (m, type, inexact);
9863 mpfr_clear (m);
9864 if (result_lg)
9866 tree result_sg;
9868 /* Dereference the arg_sg pointer argument. */
9869 arg_sg = build_fold_indirect_ref (arg_sg);
9870 /* Assign the signgam value into *arg_sg. */
9871 result_sg = fold_build2 (MODIFY_EXPR,
9872 TREE_TYPE (arg_sg), arg_sg,
9873 build_int_cst (TREE_TYPE (arg_sg), sg));
9874 TREE_SIDE_EFFECTS (result_sg) = 1;
9875 /* Combine the signgam assignment with the lgamma result. */
9876 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9877 result_sg, result_lg));
9882 return result;
9885 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
9886 mpc function FUNC on it and return the resulting value as a tree
9887 with type TYPE. The mpfr precision is set to the precision of
9888 TYPE. We assume that function FUNC returns zero if the result
9889 could be calculated exactly within the requested precision. If
9890 DO_NONFINITE is true, then fold expressions containing Inf or NaN
9891 in the arguments and/or results. */
9893 tree
9894 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
9895 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
9897 tree result = NULL_TREE;
9899 STRIP_NOPS (arg0);
9900 STRIP_NOPS (arg1);
9902 /* To proceed, MPFR must exactly represent the target floating point
9903 format, which only happens when the target base equals two. */
9904 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
9905 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
9906 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
9907 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
9908 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
9910 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
9911 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
9912 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
9913 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
9915 if (do_nonfinite
9916 || (real_isfinite (re0) && real_isfinite (im0)
9917 && real_isfinite (re1) && real_isfinite (im1)))
9919 const struct real_format *const fmt =
9920 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
9921 const int prec = fmt->p;
9922 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
9923 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
9924 int inexact;
9925 mpc_t m0, m1;
9927 mpc_init2 (m0, prec);
9928 mpc_init2 (m1, prec);
9929 mpfr_from_real (mpc_realref (m0), re0, rnd);
9930 mpfr_from_real (mpc_imagref (m0), im0, rnd);
9931 mpfr_from_real (mpc_realref (m1), re1, rnd);
9932 mpfr_from_real (mpc_imagref (m1), im1, rnd);
9933 mpfr_clear_flags ();
9934 inexact = func (m0, m0, m1, crnd);
9935 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
9936 mpc_clear (m0);
9937 mpc_clear (m1);
9941 return result;
9944 /* A wrapper function for builtin folding that prevents warnings for
9945 "statement without effect" and the like, caused by removing the
9946 call node earlier than the warning is generated. */
9948 tree
9949 fold_call_stmt (gcall *stmt, bool ignore)
9951 tree ret = NULL_TREE;
9952 tree fndecl = gimple_call_fndecl (stmt);
9953 location_t loc = gimple_location (stmt);
9954 if (fndecl
9955 && TREE_CODE (fndecl) == FUNCTION_DECL
9956 && DECL_BUILT_IN (fndecl)
9957 && !gimple_call_va_arg_pack_p (stmt))
9959 int nargs = gimple_call_num_args (stmt);
9960 tree *args = (nargs > 0
9961 ? gimple_call_arg_ptr (stmt, 0)
9962 : &error_mark_node);
9964 if (avoid_folding_inline_builtin (fndecl))
9965 return NULL_TREE;
9966 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9968 return targetm.fold_builtin (fndecl, nargs, args, ignore);
9970 else
9972 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9973 if (ret)
9975 /* Propagate location information from original call to
9976 expansion of builtin. Otherwise things like
9977 maybe_emit_chk_warning, that operate on the expansion
9978 of a builtin, will use the wrong location information. */
9979 if (gimple_has_location (stmt))
9981 tree realret = ret;
9982 if (TREE_CODE (ret) == NOP_EXPR)
9983 realret = TREE_OPERAND (ret, 0);
9984 if (CAN_HAVE_LOCATION_P (realret)
9985 && !EXPR_HAS_LOCATION (realret))
9986 SET_EXPR_LOCATION (realret, loc);
9987 return realret;
9989 return ret;
9993 return NULL_TREE;
9996 /* Look up the function in builtin_decl that corresponds to DECL
9997 and set ASMSPEC as its user assembler name. DECL must be a
9998 function decl that declares a builtin. */
10000 void
10001 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10003 tree builtin;
10004 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10005 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10006 && asmspec != 0);
10008 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10009 set_user_assembler_name (builtin, asmspec);
10010 switch (DECL_FUNCTION_CODE (decl))
10012 case BUILT_IN_MEMCPY:
10013 init_block_move_fn (asmspec);
10014 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
10015 break;
10016 case BUILT_IN_MEMSET:
10017 init_block_clear_fn (asmspec);
10018 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
10019 break;
10020 case BUILT_IN_MEMMOVE:
10021 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
10022 break;
10023 case BUILT_IN_MEMCMP:
10024 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
10025 break;
10026 case BUILT_IN_ABORT:
10027 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
10028 break;
10029 case BUILT_IN_FFS:
10030 if (INT_TYPE_SIZE < BITS_PER_WORD)
10032 set_user_assembler_libfunc ("ffs", asmspec);
10033 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
10034 MODE_INT, 0), "ffs");
10036 break;
10037 default:
10038 break;
10042 /* Return true if DECL is a builtin that expands to a constant or similarly
10043 simple code. */
10044 bool
10045 is_simple_builtin (tree decl)
10047 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10048 switch (DECL_FUNCTION_CODE (decl))
10050 /* Builtins that expand to constants. */
10051 case BUILT_IN_CONSTANT_P:
10052 case BUILT_IN_EXPECT:
10053 case BUILT_IN_OBJECT_SIZE:
10054 case BUILT_IN_UNREACHABLE:
10055 /* Simple register moves or loads from stack. */
10056 case BUILT_IN_ASSUME_ALIGNED:
10057 case BUILT_IN_RETURN_ADDRESS:
10058 case BUILT_IN_EXTRACT_RETURN_ADDR:
10059 case BUILT_IN_FROB_RETURN_ADDR:
10060 case BUILT_IN_RETURN:
10061 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10062 case BUILT_IN_FRAME_ADDRESS:
10063 case BUILT_IN_VA_END:
10064 case BUILT_IN_STACK_SAVE:
10065 case BUILT_IN_STACK_RESTORE:
10066 /* Exception state returns or moves registers around. */
10067 case BUILT_IN_EH_FILTER:
10068 case BUILT_IN_EH_POINTER:
10069 case BUILT_IN_EH_COPY_VALUES:
10070 return true;
10072 default:
10073 return false;
10076 return false;
10079 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10080 most probably expanded inline into reasonably simple code. This is a
10081 superset of is_simple_builtin. */
10082 bool
10083 is_inexpensive_builtin (tree decl)
10085 if (!decl)
10086 return false;
10087 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10088 return true;
10089 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10090 switch (DECL_FUNCTION_CODE (decl))
10092 case BUILT_IN_ABS:
10093 case BUILT_IN_ALLOCA:
10094 case BUILT_IN_ALLOCA_WITH_ALIGN:
10095 case BUILT_IN_BSWAP16:
10096 case BUILT_IN_BSWAP32:
10097 case BUILT_IN_BSWAP64:
10098 case BUILT_IN_CLZ:
10099 case BUILT_IN_CLZIMAX:
10100 case BUILT_IN_CLZL:
10101 case BUILT_IN_CLZLL:
10102 case BUILT_IN_CTZ:
10103 case BUILT_IN_CTZIMAX:
10104 case BUILT_IN_CTZL:
10105 case BUILT_IN_CTZLL:
10106 case BUILT_IN_FFS:
10107 case BUILT_IN_FFSIMAX:
10108 case BUILT_IN_FFSL:
10109 case BUILT_IN_FFSLL:
10110 case BUILT_IN_IMAXABS:
10111 case BUILT_IN_FINITE:
10112 case BUILT_IN_FINITEF:
10113 case BUILT_IN_FINITEL:
10114 case BUILT_IN_FINITED32:
10115 case BUILT_IN_FINITED64:
10116 case BUILT_IN_FINITED128:
10117 case BUILT_IN_FPCLASSIFY:
10118 case BUILT_IN_ISFINITE:
10119 case BUILT_IN_ISINF_SIGN:
10120 case BUILT_IN_ISINF:
10121 case BUILT_IN_ISINFF:
10122 case BUILT_IN_ISINFL:
10123 case BUILT_IN_ISINFD32:
10124 case BUILT_IN_ISINFD64:
10125 case BUILT_IN_ISINFD128:
10126 case BUILT_IN_ISNAN:
10127 case BUILT_IN_ISNANF:
10128 case BUILT_IN_ISNANL:
10129 case BUILT_IN_ISNAND32:
10130 case BUILT_IN_ISNAND64:
10131 case BUILT_IN_ISNAND128:
10132 case BUILT_IN_ISNORMAL:
10133 case BUILT_IN_ISGREATER:
10134 case BUILT_IN_ISGREATEREQUAL:
10135 case BUILT_IN_ISLESS:
10136 case BUILT_IN_ISLESSEQUAL:
10137 case BUILT_IN_ISLESSGREATER:
10138 case BUILT_IN_ISUNORDERED:
10139 case BUILT_IN_VA_ARG_PACK:
10140 case BUILT_IN_VA_ARG_PACK_LEN:
10141 case BUILT_IN_VA_COPY:
10142 case BUILT_IN_TRAP:
10143 case BUILT_IN_SAVEREGS:
10144 case BUILT_IN_POPCOUNTL:
10145 case BUILT_IN_POPCOUNTLL:
10146 case BUILT_IN_POPCOUNTIMAX:
10147 case BUILT_IN_POPCOUNT:
10148 case BUILT_IN_PARITYL:
10149 case BUILT_IN_PARITYLL:
10150 case BUILT_IN_PARITYIMAX:
10151 case BUILT_IN_PARITY:
10152 case BUILT_IN_LABS:
10153 case BUILT_IN_LLABS:
10154 case BUILT_IN_PREFETCH:
10155 case BUILT_IN_ACC_ON_DEVICE:
10156 return true;
10158 default:
10159 return is_simple_builtin (decl);
10162 return false;