Extend fold_const_call to combined_fn
[official-gcc.git] / gcc / builtins.c
blobfc038cda461545b355f4bdc1ff9fe34e48d1f0ae
1 /* Expand builtin functions.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "predict.h"
33 #include "tm_p.h"
34 #include "stringpool.h"
35 #include "tree-ssanames.h"
36 #include "expmed.h"
37 #include "optabs.h"
38 #include "emit-rtl.h"
39 #include "recog.h"
40 #include "diagnostic-core.h"
41 #include "alias.h"
42 #include "fold-const.h"
43 #include "fold-const-call.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "varasm.h"
47 #include "tree-object-size.h"
48 #include "realmpfr.h"
49 #include "cfgrtl.h"
50 #include "except.h"
51 #include "dojump.h"
52 #include "explow.h"
53 #include "stmt.h"
54 #include "expr.h"
55 #include "libfuncs.h"
56 #include "output.h"
57 #include "typeclass.h"
58 #include "langhooks.h"
59 #include "value-prof.h"
60 #include "builtins.h"
61 #include "asan.h"
62 #include "cilk.h"
63 #include "tree-chkp.h"
64 #include "rtl-chkp.h"
65 #include "internal-fn.h"
68 struct target_builtins default_target_builtins;
69 #if SWITCHABLE_TARGET
70 struct target_builtins *this_target_builtins = &default_target_builtins;
71 #endif
73 /* Define the names of the builtin function types and codes. */
74 const char *const built_in_class_names[BUILT_IN_LAST]
75 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
77 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
78 const char * built_in_names[(int) END_BUILTINS] =
80 #include "builtins.def"
83 /* Setup an array of builtin_info_type, make sure each element decl is
84 initialized to NULL_TREE. */
85 builtin_info_type builtin_info[(int)END_BUILTINS];
87 /* Non-zero if __builtin_constant_p should be folded right away. */
88 bool force_folding_builtin_constant_p;
90 static rtx c_readstr (const char *, machine_mode);
91 static int target_char_cast (tree, char *);
92 static rtx get_memory_rtx (tree, tree);
93 static int apply_args_size (void);
94 static int apply_result_size (void);
95 static rtx result_vector (int, rtx);
96 static void expand_builtin_prefetch (tree);
97 static rtx expand_builtin_apply_args (void);
98 static rtx expand_builtin_apply_args_1 (void);
99 static rtx expand_builtin_apply (rtx, rtx, rtx);
100 static void expand_builtin_return (rtx);
101 static enum type_class type_to_class (tree);
102 static rtx expand_builtin_classify_type (tree);
103 static void expand_errno_check (tree, rtx);
104 static rtx expand_builtin_mathfn (tree, rtx, rtx);
105 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
106 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
107 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
108 static rtx expand_builtin_interclass_mathfn (tree, rtx);
109 static rtx expand_builtin_sincos (tree);
110 static rtx expand_builtin_cexpi (tree, rtx);
111 static rtx expand_builtin_int_roundingfn (tree, rtx);
112 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
113 static rtx expand_builtin_next_arg (void);
114 static rtx expand_builtin_va_start (tree);
115 static rtx expand_builtin_va_end (tree);
116 static rtx expand_builtin_va_copy (tree);
117 static rtx expand_builtin_strcmp (tree, rtx);
118 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
119 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
120 static rtx expand_builtin_memcpy (tree, rtx);
121 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
122 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
123 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
124 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
125 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
126 machine_mode, int, tree);
127 static rtx expand_builtin_strcpy (tree, rtx);
128 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
129 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
130 static rtx expand_builtin_strncpy (tree, rtx);
131 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
132 static rtx expand_builtin_memset (tree, rtx, machine_mode);
133 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
134 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
135 static rtx expand_builtin_bzero (tree);
136 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
137 static rtx expand_builtin_alloca (tree, bool);
138 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
139 static rtx expand_builtin_frame_address (tree, tree);
140 static tree stabilize_va_list_loc (location_t, tree, int);
141 static rtx expand_builtin_expect (tree, rtx);
142 static tree fold_builtin_constant_p (tree);
143 static tree fold_builtin_classify_type (tree);
144 static tree fold_builtin_strlen (location_t, tree, tree);
145 static tree fold_builtin_inf (location_t, tree, int);
146 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
147 static bool validate_arg (const_tree, enum tree_code code);
148 static rtx expand_builtin_fabs (tree, rtx, rtx);
149 static rtx expand_builtin_signbit (tree, rtx);
150 static tree fold_builtin_strchr (location_t, tree, tree, tree);
151 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
152 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
153 static tree fold_builtin_strcmp (location_t, tree, tree);
154 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
155 static tree fold_builtin_isascii (location_t, tree);
156 static tree fold_builtin_toascii (location_t, tree);
157 static tree fold_builtin_isdigit (location_t, tree);
158 static tree fold_builtin_fabs (location_t, tree, tree);
159 static tree fold_builtin_abs (location_t, tree, tree);
160 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
161 enum tree_code);
162 static tree fold_builtin_0 (location_t, tree);
163 static tree fold_builtin_1 (location_t, tree, tree);
164 static tree fold_builtin_2 (location_t, tree, tree, tree);
165 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
166 static tree fold_builtin_varargs (location_t, tree, tree*, int);
168 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
169 static tree fold_builtin_strstr (location_t, tree, tree, tree);
170 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
171 static tree fold_builtin_strspn (location_t, tree, tree);
172 static tree fold_builtin_strcspn (location_t, tree, tree);
174 static rtx expand_builtin_object_size (tree);
175 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
176 enum built_in_function);
177 static void maybe_emit_chk_warning (tree, enum built_in_function);
178 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
179 static void maybe_emit_free_warning (tree);
180 static tree fold_builtin_object_size (tree, tree);
182 unsigned HOST_WIDE_INT target_newline;
183 unsigned HOST_WIDE_INT target_percent;
184 static unsigned HOST_WIDE_INT target_c;
185 static unsigned HOST_WIDE_INT target_s;
186 char target_percent_c[3];
187 char target_percent_s[3];
188 char target_percent_s_newline[4];
189 static tree do_mpfr_remquo (tree, tree, tree);
190 static tree do_mpfr_lgamma_r (tree, tree, tree);
191 static void expand_builtin_sync_synchronize (void);
193 /* Return true if NAME starts with __builtin_ or __sync_. */
195 static bool
196 is_builtin_name (const char *name)
198 if (strncmp (name, "__builtin_", 10) == 0)
199 return true;
200 if (strncmp (name, "__sync_", 7) == 0)
201 return true;
202 if (strncmp (name, "__atomic_", 9) == 0)
203 return true;
204 if (flag_cilkplus
205 && (!strcmp (name, "__cilkrts_detach")
206 || !strcmp (name, "__cilkrts_pop_frame")))
207 return true;
208 return false;
212 /* Return true if DECL is a function symbol representing a built-in. */
214 bool
215 is_builtin_fn (tree decl)
217 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
220 /* Return true if NODE should be considered for inline expansion regardless
221 of the optimization level. This means whenever a function is invoked with
222 its "internal" name, which normally contains the prefix "__builtin". */
224 static bool
225 called_as_built_in (tree node)
227 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
228 we want the name used to call the function, not the name it
229 will have. */
230 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
231 return is_builtin_name (name);
234 /* Compute values M and N such that M divides (address of EXP - N) and such
235 that N < M. If these numbers can be determined, store M in alignp and N in
236 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
237 *alignp and any bit-offset to *bitposp.
239 Note that the address (and thus the alignment) computed here is based
240 on the address to which a symbol resolves, whereas DECL_ALIGN is based
241 on the address at which an object is actually located. These two
242 addresses are not always the same. For example, on ARM targets,
243 the address &foo of a Thumb function foo() has the lowest bit set,
244 whereas foo() itself starts on an even address.
246 If ADDR_P is true we are taking the address of the memory reference EXP
247 and thus cannot rely on the access taking place. */
249 static bool
250 get_object_alignment_2 (tree exp, unsigned int *alignp,
251 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
253 HOST_WIDE_INT bitsize, bitpos;
254 tree offset;
255 machine_mode mode;
256 int unsignedp, reversep, volatilep;
257 unsigned int align = BITS_PER_UNIT;
258 bool known_alignment = false;
260 /* Get the innermost object and the constant (bitpos) and possibly
261 variable (offset) offset of the access. */
262 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
263 &unsignedp, &reversep, &volatilep, true);
265 /* Extract alignment information from the innermost object and
266 possibly adjust bitpos and offset. */
267 if (TREE_CODE (exp) == FUNCTION_DECL)
269 /* Function addresses can encode extra information besides their
270 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
271 allows the low bit to be used as a virtual bit, we know
272 that the address itself must be at least 2-byte aligned. */
273 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
274 align = 2 * BITS_PER_UNIT;
276 else if (TREE_CODE (exp) == LABEL_DECL)
278 else if (TREE_CODE (exp) == CONST_DECL)
280 /* The alignment of a CONST_DECL is determined by its initializer. */
281 exp = DECL_INITIAL (exp);
282 align = TYPE_ALIGN (TREE_TYPE (exp));
283 if (CONSTANT_CLASS_P (exp))
284 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
286 known_alignment = true;
288 else if (DECL_P (exp))
290 align = DECL_ALIGN (exp);
291 known_alignment = true;
293 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
295 align = TYPE_ALIGN (TREE_TYPE (exp));
297 else if (TREE_CODE (exp) == INDIRECT_REF
298 || TREE_CODE (exp) == MEM_REF
299 || TREE_CODE (exp) == TARGET_MEM_REF)
301 tree addr = TREE_OPERAND (exp, 0);
302 unsigned ptr_align;
303 unsigned HOST_WIDE_INT ptr_bitpos;
304 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
306 /* If the address is explicitely aligned, handle that. */
307 if (TREE_CODE (addr) == BIT_AND_EXPR
308 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
310 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
311 ptr_bitmask *= BITS_PER_UNIT;
312 align = ptr_bitmask & -ptr_bitmask;
313 addr = TREE_OPERAND (addr, 0);
316 known_alignment
317 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
318 align = MAX (ptr_align, align);
320 /* Re-apply explicit alignment to the bitpos. */
321 ptr_bitpos &= ptr_bitmask;
323 /* The alignment of the pointer operand in a TARGET_MEM_REF
324 has to take the variable offset parts into account. */
325 if (TREE_CODE (exp) == TARGET_MEM_REF)
327 if (TMR_INDEX (exp))
329 unsigned HOST_WIDE_INT step = 1;
330 if (TMR_STEP (exp))
331 step = TREE_INT_CST_LOW (TMR_STEP (exp));
332 align = MIN (align, (step & -step) * BITS_PER_UNIT);
334 if (TMR_INDEX2 (exp))
335 align = BITS_PER_UNIT;
336 known_alignment = false;
339 /* When EXP is an actual memory reference then we can use
340 TYPE_ALIGN of a pointer indirection to derive alignment.
341 Do so only if get_pointer_alignment_1 did not reveal absolute
342 alignment knowledge and if using that alignment would
343 improve the situation. */
344 if (!addr_p && !known_alignment
345 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
346 align = TYPE_ALIGN (TREE_TYPE (exp));
347 else
349 /* Else adjust bitpos accordingly. */
350 bitpos += ptr_bitpos;
351 if (TREE_CODE (exp) == MEM_REF
352 || TREE_CODE (exp) == TARGET_MEM_REF)
353 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
356 else if (TREE_CODE (exp) == STRING_CST)
358 /* STRING_CST are the only constant objects we allow to be not
359 wrapped inside a CONST_DECL. */
360 align = TYPE_ALIGN (TREE_TYPE (exp));
361 if (CONSTANT_CLASS_P (exp))
362 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
364 known_alignment = true;
367 /* If there is a non-constant offset part extract the maximum
368 alignment that can prevail. */
369 if (offset)
371 unsigned int trailing_zeros = tree_ctz (offset);
372 if (trailing_zeros < HOST_BITS_PER_INT)
374 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
375 if (inner)
376 align = MIN (align, inner);
380 *alignp = align;
381 *bitposp = bitpos & (*alignp - 1);
382 return known_alignment;
385 /* For a memory reference expression EXP compute values M and N such that M
386 divides (&EXP - N) and such that N < M. If these numbers can be determined,
387 store M in alignp and N in *BITPOSP and return true. Otherwise return false
388 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
390 bool
391 get_object_alignment_1 (tree exp, unsigned int *alignp,
392 unsigned HOST_WIDE_INT *bitposp)
394 return get_object_alignment_2 (exp, alignp, bitposp, false);
397 /* Return the alignment in bits of EXP, an object. */
399 unsigned int
400 get_object_alignment (tree exp)
402 unsigned HOST_WIDE_INT bitpos = 0;
403 unsigned int align;
405 get_object_alignment_1 (exp, &align, &bitpos);
407 /* align and bitpos now specify known low bits of the pointer.
408 ptr & (align - 1) == bitpos. */
410 if (bitpos != 0)
411 align = (bitpos & -bitpos);
412 return align;
415 /* For a pointer valued expression EXP compute values M and N such that M
416 divides (EXP - N) and such that N < M. If these numbers can be determined,
417 store M in alignp and N in *BITPOSP and return true. Return false if
418 the results are just a conservative approximation.
420 If EXP is not a pointer, false is returned too. */
422 bool
423 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
424 unsigned HOST_WIDE_INT *bitposp)
426 STRIP_NOPS (exp);
428 if (TREE_CODE (exp) == ADDR_EXPR)
429 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
430 alignp, bitposp, true);
431 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
433 unsigned int align;
434 unsigned HOST_WIDE_INT bitpos;
435 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
436 &align, &bitpos);
437 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
438 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
439 else
441 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
442 if (trailing_zeros < HOST_BITS_PER_INT)
444 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
445 if (inner)
446 align = MIN (align, inner);
449 *alignp = align;
450 *bitposp = bitpos & (align - 1);
451 return res;
453 else if (TREE_CODE (exp) == SSA_NAME
454 && POINTER_TYPE_P (TREE_TYPE (exp)))
456 unsigned int ptr_align, ptr_misalign;
457 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
459 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
461 *bitposp = ptr_misalign * BITS_PER_UNIT;
462 *alignp = ptr_align * BITS_PER_UNIT;
463 /* We cannot really tell whether this result is an approximation. */
464 return true;
466 else
468 *bitposp = 0;
469 *alignp = BITS_PER_UNIT;
470 return false;
473 else if (TREE_CODE (exp) == INTEGER_CST)
475 *alignp = BIGGEST_ALIGNMENT;
476 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
477 & (BIGGEST_ALIGNMENT - 1));
478 return true;
481 *bitposp = 0;
482 *alignp = BITS_PER_UNIT;
483 return false;
486 /* Return the alignment in bits of EXP, a pointer valued expression.
487 The alignment returned is, by default, the alignment of the thing that
488 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
490 Otherwise, look at the expression to see if we can do better, i.e., if the
491 expression is actually pointing at an object whose alignment is tighter. */
493 unsigned int
494 get_pointer_alignment (tree exp)
496 unsigned HOST_WIDE_INT bitpos = 0;
497 unsigned int align;
499 get_pointer_alignment_1 (exp, &align, &bitpos);
501 /* align and bitpos now specify known low bits of the pointer.
502 ptr & (align - 1) == bitpos. */
504 if (bitpos != 0)
505 align = (bitpos & -bitpos);
507 return align;
510 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
511 way, because it could contain a zero byte in the middle.
512 TREE_STRING_LENGTH is the size of the character array, not the string.
514 ONLY_VALUE should be nonzero if the result is not going to be emitted
515 into the instruction stream and zero if it is going to be expanded.
516 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
517 is returned, otherwise NULL, since
518 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
519 evaluate the side-effects.
521 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
522 accesses. Note that this implies the result is not going to be emitted
523 into the instruction stream.
525 The value returned is of type `ssizetype'.
527 Unfortunately, string_constant can't access the values of const char
528 arrays with initializers, so neither can we do so here. */
530 tree
531 c_strlen (tree src, int only_value)
533 tree offset_node;
534 HOST_WIDE_INT offset;
535 int max;
536 const char *ptr;
537 location_t loc;
539 STRIP_NOPS (src);
540 if (TREE_CODE (src) == COND_EXPR
541 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
543 tree len1, len2;
545 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
546 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
547 if (tree_int_cst_equal (len1, len2))
548 return len1;
551 if (TREE_CODE (src) == COMPOUND_EXPR
552 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
553 return c_strlen (TREE_OPERAND (src, 1), only_value);
555 loc = EXPR_LOC_OR_LOC (src, input_location);
557 src = string_constant (src, &offset_node);
558 if (src == 0)
559 return NULL_TREE;
561 max = TREE_STRING_LENGTH (src) - 1;
562 ptr = TREE_STRING_POINTER (src);
564 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
566 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
567 compute the offset to the following null if we don't know where to
568 start searching for it. */
569 int i;
571 for (i = 0; i < max; i++)
572 if (ptr[i] == 0)
573 return NULL_TREE;
575 /* We don't know the starting offset, but we do know that the string
576 has no internal zero bytes. We can assume that the offset falls
577 within the bounds of the string; otherwise, the programmer deserves
578 what he gets. Subtract the offset from the length of the string,
579 and return that. This would perhaps not be valid if we were dealing
580 with named arrays in addition to literal string constants. */
582 return size_diffop_loc (loc, size_int (max), offset_node);
585 /* We have a known offset into the string. Start searching there for
586 a null character if we can represent it as a single HOST_WIDE_INT. */
587 if (offset_node == 0)
588 offset = 0;
589 else if (! tree_fits_shwi_p (offset_node))
590 offset = -1;
591 else
592 offset = tree_to_shwi (offset_node);
594 /* If the offset is known to be out of bounds, warn, and call strlen at
595 runtime. */
596 if (offset < 0 || offset > max)
598 /* Suppress multiple warnings for propagated constant strings. */
599 if (only_value != 2
600 && !TREE_NO_WARNING (src))
602 warning_at (loc, 0, "offset outside bounds of constant string");
603 TREE_NO_WARNING (src) = 1;
605 return NULL_TREE;
608 /* Use strlen to search for the first zero byte. Since any strings
609 constructed with build_string will have nulls appended, we win even
610 if we get handed something like (char[4])"abcd".
612 Since OFFSET is our starting index into the string, no further
613 calculation is needed. */
614 return ssize_int (strlen (ptr + offset));
617 /* Return a constant integer corresponding to target reading
618 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
620 static rtx
621 c_readstr (const char *str, machine_mode mode)
623 HOST_WIDE_INT ch;
624 unsigned int i, j;
625 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
627 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
628 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
629 / HOST_BITS_PER_WIDE_INT;
631 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
632 for (i = 0; i < len; i++)
633 tmp[i] = 0;
635 ch = 1;
636 for (i = 0; i < GET_MODE_SIZE (mode); i++)
638 j = i;
639 if (WORDS_BIG_ENDIAN)
640 j = GET_MODE_SIZE (mode) - i - 1;
641 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
642 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
643 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
644 j *= BITS_PER_UNIT;
646 if (ch)
647 ch = (unsigned char) str[i];
648 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
651 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
652 return immed_wide_int_const (c, mode);
655 /* Cast a target constant CST to target CHAR and if that value fits into
656 host char type, return zero and put that value into variable pointed to by
657 P. */
659 static int
660 target_char_cast (tree cst, char *p)
662 unsigned HOST_WIDE_INT val, hostval;
664 if (TREE_CODE (cst) != INTEGER_CST
665 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
666 return 1;
668 /* Do not care if it fits or not right here. */
669 val = TREE_INT_CST_LOW (cst);
671 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
672 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
674 hostval = val;
675 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
676 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
678 if (val != hostval)
679 return 1;
681 *p = hostval;
682 return 0;
685 /* Similar to save_expr, but assumes that arbitrary code is not executed
686 in between the multiple evaluations. In particular, we assume that a
687 non-addressable local variable will not be modified. */
689 static tree
690 builtin_save_expr (tree exp)
692 if (TREE_CODE (exp) == SSA_NAME
693 || (TREE_ADDRESSABLE (exp) == 0
694 && (TREE_CODE (exp) == PARM_DECL
695 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
696 return exp;
698 return save_expr (exp);
701 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
702 times to get the address of either a higher stack frame, or a return
703 address located within it (depending on FNDECL_CODE). */
705 static rtx
706 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
708 int i;
709 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
710 if (tem == NULL_RTX)
712 /* For a zero count with __builtin_return_address, we don't care what
713 frame address we return, because target-specific definitions will
714 override us. Therefore frame pointer elimination is OK, and using
715 the soft frame pointer is OK.
717 For a nonzero count, or a zero count with __builtin_frame_address,
718 we require a stable offset from the current frame pointer to the
719 previous one, so we must use the hard frame pointer, and
720 we must disable frame pointer elimination. */
721 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
722 tem = frame_pointer_rtx;
723 else
725 tem = hard_frame_pointer_rtx;
727 /* Tell reload not to eliminate the frame pointer. */
728 crtl->accesses_prior_frames = 1;
732 if (count > 0)
733 SETUP_FRAME_ADDRESSES ();
735 /* On the SPARC, the return address is not in the frame, it is in a
736 register. There is no way to access it off of the current frame
737 pointer, but it can be accessed off the previous frame pointer by
738 reading the value from the register window save area. */
739 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
740 count--;
742 /* Scan back COUNT frames to the specified frame. */
743 for (i = 0; i < count; i++)
745 /* Assume the dynamic chain pointer is in the word that the
746 frame address points to, unless otherwise specified. */
747 tem = DYNAMIC_CHAIN_ADDRESS (tem);
748 tem = memory_address (Pmode, tem);
749 tem = gen_frame_mem (Pmode, tem);
750 tem = copy_to_reg (tem);
753 /* For __builtin_frame_address, return what we've got. But, on
754 the SPARC for example, we may have to add a bias. */
755 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
756 return FRAME_ADDR_RTX (tem);
758 /* For __builtin_return_address, get the return address from that frame. */
759 #ifdef RETURN_ADDR_RTX
760 tem = RETURN_ADDR_RTX (count, tem);
761 #else
762 tem = memory_address (Pmode,
763 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
764 tem = gen_frame_mem (Pmode, tem);
765 #endif
766 return tem;
769 /* Alias set used for setjmp buffer. */
770 static alias_set_type setjmp_alias_set = -1;
772 /* Construct the leading half of a __builtin_setjmp call. Control will
773 return to RECEIVER_LABEL. This is also called directly by the SJLJ
774 exception handling code. */
776 void
777 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
779 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
780 rtx stack_save;
781 rtx mem;
783 if (setjmp_alias_set == -1)
784 setjmp_alias_set = new_alias_set ();
786 buf_addr = convert_memory_address (Pmode, buf_addr);
788 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
790 /* We store the frame pointer and the address of receiver_label in
791 the buffer and use the rest of it for the stack save area, which
792 is machine-dependent. */
794 mem = gen_rtx_MEM (Pmode, buf_addr);
795 set_mem_alias_set (mem, setjmp_alias_set);
796 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
798 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
799 GET_MODE_SIZE (Pmode))),
800 set_mem_alias_set (mem, setjmp_alias_set);
802 emit_move_insn (validize_mem (mem),
803 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
805 stack_save = gen_rtx_MEM (sa_mode,
806 plus_constant (Pmode, buf_addr,
807 2 * GET_MODE_SIZE (Pmode)));
808 set_mem_alias_set (stack_save, setjmp_alias_set);
809 emit_stack_save (SAVE_NONLOCAL, &stack_save);
811 /* If there is further processing to do, do it. */
812 if (targetm.have_builtin_setjmp_setup ())
813 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
815 /* We have a nonlocal label. */
816 cfun->has_nonlocal_label = 1;
819 /* Construct the trailing part of a __builtin_setjmp call. This is
820 also called directly by the SJLJ exception handling code.
821 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
823 void
824 expand_builtin_setjmp_receiver (rtx receiver_label)
826 rtx chain;
828 /* Mark the FP as used when we get here, so we have to make sure it's
829 marked as used by this function. */
830 emit_use (hard_frame_pointer_rtx);
832 /* Mark the static chain as clobbered here so life information
833 doesn't get messed up for it. */
834 chain = targetm.calls.static_chain (current_function_decl, true);
835 if (chain && REG_P (chain))
836 emit_clobber (chain);
838 /* Now put in the code to restore the frame pointer, and argument
839 pointer, if needed. */
840 if (! targetm.have_nonlocal_goto ())
842 /* First adjust our frame pointer to its actual value. It was
843 previously set to the start of the virtual area corresponding to
844 the stacked variables when we branched here and now needs to be
845 adjusted to the actual hardware fp value.
847 Assignments to virtual registers are converted by
848 instantiate_virtual_regs into the corresponding assignment
849 to the underlying register (fp in this case) that makes
850 the original assignment true.
851 So the following insn will actually be decrementing fp by
852 STARTING_FRAME_OFFSET. */
853 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
855 /* Restoring the frame pointer also modifies the hard frame pointer.
856 Mark it used (so that the previous assignment remains live once
857 the frame pointer is eliminated) and clobbered (to represent the
858 implicit update from the assignment). */
859 emit_use (hard_frame_pointer_rtx);
860 emit_clobber (hard_frame_pointer_rtx);
863 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
865 #ifdef ELIMINABLE_REGS
866 /* If the argument pointer can be eliminated in favor of the
867 frame pointer, we don't need to restore it. We assume here
868 that if such an elimination is present, it can always be used.
869 This is the case on all known machines; if we don't make this
870 assumption, we do unnecessary saving on many machines. */
871 size_t i;
872 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
874 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
875 if (elim_regs[i].from == ARG_POINTER_REGNUM
876 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
877 break;
879 if (i == ARRAY_SIZE (elim_regs))
880 #endif
882 /* Now restore our arg pointer from the address at which it
883 was saved in our stack frame. */
884 emit_move_insn (crtl->args.internal_arg_pointer,
885 copy_to_reg (get_arg_pointer_save_area ()));
889 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
890 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
891 else if (targetm.have_nonlocal_goto_receiver ())
892 emit_insn (targetm.gen_nonlocal_goto_receiver ());
893 else
894 { /* Nothing */ }
896 /* We must not allow the code we just generated to be reordered by
897 scheduling. Specifically, the update of the frame pointer must
898 happen immediately, not later. */
899 emit_insn (gen_blockage ());
902 /* __builtin_longjmp is passed a pointer to an array of five words (not
903 all will be used on all machines). It operates similarly to the C
904 library function of the same name, but is more efficient. Much of
905 the code below is copied from the handling of non-local gotos. */
907 static void
908 expand_builtin_longjmp (rtx buf_addr, rtx value)
910 rtx fp, lab, stack;
911 rtx_insn *insn, *last;
912 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
914 /* DRAP is needed for stack realign if longjmp is expanded to current
915 function */
916 if (SUPPORTS_STACK_ALIGNMENT)
917 crtl->need_drap = true;
919 if (setjmp_alias_set == -1)
920 setjmp_alias_set = new_alias_set ();
922 buf_addr = convert_memory_address (Pmode, buf_addr);
924 buf_addr = force_reg (Pmode, buf_addr);
926 /* We require that the user must pass a second argument of 1, because
927 that is what builtin_setjmp will return. */
928 gcc_assert (value == const1_rtx);
930 last = get_last_insn ();
931 if (targetm.have_builtin_longjmp ())
932 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
933 else
935 fp = gen_rtx_MEM (Pmode, buf_addr);
936 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
937 GET_MODE_SIZE (Pmode)));
939 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
940 2 * GET_MODE_SIZE (Pmode)));
941 set_mem_alias_set (fp, setjmp_alias_set);
942 set_mem_alias_set (lab, setjmp_alias_set);
943 set_mem_alias_set (stack, setjmp_alias_set);
945 /* Pick up FP, label, and SP from the block and jump. This code is
946 from expand_goto in stmt.c; see there for detailed comments. */
947 if (targetm.have_nonlocal_goto ())
948 /* We have to pass a value to the nonlocal_goto pattern that will
949 get copied into the static_chain pointer, but it does not matter
950 what that value is, because builtin_setjmp does not use it. */
951 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
952 else
954 lab = copy_to_reg (lab);
956 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
957 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
959 emit_move_insn (hard_frame_pointer_rtx, fp);
960 emit_stack_restore (SAVE_NONLOCAL, stack);
962 emit_use (hard_frame_pointer_rtx);
963 emit_use (stack_pointer_rtx);
964 emit_indirect_jump (lab);
968 /* Search backwards and mark the jump insn as a non-local goto.
969 Note that this precludes the use of __builtin_longjmp to a
970 __builtin_setjmp target in the same function. However, we've
971 already cautioned the user that these functions are for
972 internal exception handling use only. */
973 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
975 gcc_assert (insn != last);
977 if (JUMP_P (insn))
979 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
980 break;
982 else if (CALL_P (insn))
983 break;
987 static inline bool
988 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
990 return (iter->i < iter->n);
993 /* This function validates the types of a function call argument list
994 against a specified list of tree_codes. If the last specifier is a 0,
995 that represents an ellipses, otherwise the last specifier must be a
996 VOID_TYPE. */
998 static bool
999 validate_arglist (const_tree callexpr, ...)
1001 enum tree_code code;
1002 bool res = 0;
1003 va_list ap;
1004 const_call_expr_arg_iterator iter;
1005 const_tree arg;
1007 va_start (ap, callexpr);
1008 init_const_call_expr_arg_iterator (callexpr, &iter);
1012 code = (enum tree_code) va_arg (ap, int);
1013 switch (code)
1015 case 0:
1016 /* This signifies an ellipses, any further arguments are all ok. */
1017 res = true;
1018 goto end;
1019 case VOID_TYPE:
1020 /* This signifies an endlink, if no arguments remain, return
1021 true, otherwise return false. */
1022 res = !more_const_call_expr_args_p (&iter);
1023 goto end;
1024 default:
1025 /* If no parameters remain or the parameter's code does not
1026 match the specified code, return false. Otherwise continue
1027 checking any remaining arguments. */
1028 arg = next_const_call_expr_arg (&iter);
1029 if (!validate_arg (arg, code))
1030 goto end;
1031 break;
1034 while (1);
1036 /* We need gotos here since we can only have one VA_CLOSE in a
1037 function. */
1038 end: ;
1039 va_end (ap);
1041 return res;
1044 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1045 and the address of the save area. */
1047 static rtx
1048 expand_builtin_nonlocal_goto (tree exp)
1050 tree t_label, t_save_area;
1051 rtx r_label, r_save_area, r_fp, r_sp;
1052 rtx_insn *insn;
1054 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1055 return NULL_RTX;
1057 t_label = CALL_EXPR_ARG (exp, 0);
1058 t_save_area = CALL_EXPR_ARG (exp, 1);
1060 r_label = expand_normal (t_label);
1061 r_label = convert_memory_address (Pmode, r_label);
1062 r_save_area = expand_normal (t_save_area);
1063 r_save_area = convert_memory_address (Pmode, r_save_area);
1064 /* Copy the address of the save location to a register just in case it was
1065 based on the frame pointer. */
1066 r_save_area = copy_to_reg (r_save_area);
1067 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1068 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1069 plus_constant (Pmode, r_save_area,
1070 GET_MODE_SIZE (Pmode)));
1072 crtl->has_nonlocal_goto = 1;
1074 /* ??? We no longer need to pass the static chain value, afaik. */
1075 if (targetm.have_nonlocal_goto ())
1076 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1077 else
1079 r_label = copy_to_reg (r_label);
1081 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1082 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1084 /* Restore frame pointer for containing function. */
1085 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1086 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1088 /* USE of hard_frame_pointer_rtx added for consistency;
1089 not clear if really needed. */
1090 emit_use (hard_frame_pointer_rtx);
1091 emit_use (stack_pointer_rtx);
1093 /* If the architecture is using a GP register, we must
1094 conservatively assume that the target function makes use of it.
1095 The prologue of functions with nonlocal gotos must therefore
1096 initialize the GP register to the appropriate value, and we
1097 must then make sure that this value is live at the point
1098 of the jump. (Note that this doesn't necessarily apply
1099 to targets with a nonlocal_goto pattern; they are free
1100 to implement it in their own way. Note also that this is
1101 a no-op if the GP register is a global invariant.) */
1102 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1103 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1104 emit_use (pic_offset_table_rtx);
1106 emit_indirect_jump (r_label);
1109 /* Search backwards to the jump insn and mark it as a
1110 non-local goto. */
1111 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1113 if (JUMP_P (insn))
1115 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1116 break;
1118 else if (CALL_P (insn))
1119 break;
1122 return const0_rtx;
1125 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1126 (not all will be used on all machines) that was passed to __builtin_setjmp.
1127 It updates the stack pointer in that block to the current value. This is
1128 also called directly by the SJLJ exception handling code. */
1130 void
1131 expand_builtin_update_setjmp_buf (rtx buf_addr)
1133 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1134 rtx stack_save
1135 = gen_rtx_MEM (sa_mode,
1136 memory_address
1137 (sa_mode,
1138 plus_constant (Pmode, buf_addr,
1139 2 * GET_MODE_SIZE (Pmode))));
1141 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1144 /* Expand a call to __builtin_prefetch. For a target that does not support
1145 data prefetch, evaluate the memory address argument in case it has side
1146 effects. */
1148 static void
1149 expand_builtin_prefetch (tree exp)
1151 tree arg0, arg1, arg2;
1152 int nargs;
1153 rtx op0, op1, op2;
1155 if (!validate_arglist (exp, POINTER_TYPE, 0))
1156 return;
1158 arg0 = CALL_EXPR_ARG (exp, 0);
1160 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1161 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1162 locality). */
1163 nargs = call_expr_nargs (exp);
1164 if (nargs > 1)
1165 arg1 = CALL_EXPR_ARG (exp, 1);
1166 else
1167 arg1 = integer_zero_node;
1168 if (nargs > 2)
1169 arg2 = CALL_EXPR_ARG (exp, 2);
1170 else
1171 arg2 = integer_three_node;
1173 /* Argument 0 is an address. */
1174 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1176 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1177 if (TREE_CODE (arg1) != INTEGER_CST)
1179 error ("second argument to %<__builtin_prefetch%> must be a constant");
1180 arg1 = integer_zero_node;
1182 op1 = expand_normal (arg1);
1183 /* Argument 1 must be either zero or one. */
1184 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1186 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1187 " using zero");
1188 op1 = const0_rtx;
1191 /* Argument 2 (locality) must be a compile-time constant int. */
1192 if (TREE_CODE (arg2) != INTEGER_CST)
1194 error ("third argument to %<__builtin_prefetch%> must be a constant");
1195 arg2 = integer_zero_node;
1197 op2 = expand_normal (arg2);
1198 /* Argument 2 must be 0, 1, 2, or 3. */
1199 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1201 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1202 op2 = const0_rtx;
1205 if (targetm.have_prefetch ())
1207 struct expand_operand ops[3];
1209 create_address_operand (&ops[0], op0);
1210 create_integer_operand (&ops[1], INTVAL (op1));
1211 create_integer_operand (&ops[2], INTVAL (op2));
1212 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1213 return;
1216 /* Don't do anything with direct references to volatile memory, but
1217 generate code to handle other side effects. */
1218 if (!MEM_P (op0) && side_effects_p (op0))
1219 emit_insn (op0);
1222 /* Get a MEM rtx for expression EXP which is the address of an operand
1223 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1224 the maximum length of the block of memory that might be accessed or
1225 NULL if unknown. */
1227 static rtx
1228 get_memory_rtx (tree exp, tree len)
1230 tree orig_exp = exp;
1231 rtx addr, mem;
1233 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1234 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1235 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1236 exp = TREE_OPERAND (exp, 0);
1238 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1239 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1241 /* Get an expression we can use to find the attributes to assign to MEM.
1242 First remove any nops. */
1243 while (CONVERT_EXPR_P (exp)
1244 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1245 exp = TREE_OPERAND (exp, 0);
1247 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1248 (as builtin stringops may alias with anything). */
1249 exp = fold_build2 (MEM_REF,
1250 build_array_type (char_type_node,
1251 build_range_type (sizetype,
1252 size_one_node, len)),
1253 exp, build_int_cst (ptr_type_node, 0));
1255 /* If the MEM_REF has no acceptable address, try to get the base object
1256 from the original address we got, and build an all-aliasing
1257 unknown-sized access to that one. */
1258 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1259 set_mem_attributes (mem, exp, 0);
1260 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1261 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1262 0))))
1264 exp = build_fold_addr_expr (exp);
1265 exp = fold_build2 (MEM_REF,
1266 build_array_type (char_type_node,
1267 build_range_type (sizetype,
1268 size_zero_node,
1269 NULL)),
1270 exp, build_int_cst (ptr_type_node, 0));
1271 set_mem_attributes (mem, exp, 0);
1273 set_mem_alias_set (mem, 0);
1274 return mem;
1277 /* Built-in functions to perform an untyped call and return. */
1279 #define apply_args_mode \
1280 (this_target_builtins->x_apply_args_mode)
1281 #define apply_result_mode \
1282 (this_target_builtins->x_apply_result_mode)
1284 /* Return the size required for the block returned by __builtin_apply_args,
1285 and initialize apply_args_mode. */
1287 static int
1288 apply_args_size (void)
1290 static int size = -1;
1291 int align;
1292 unsigned int regno;
1293 machine_mode mode;
1295 /* The values computed by this function never change. */
1296 if (size < 0)
1298 /* The first value is the incoming arg-pointer. */
1299 size = GET_MODE_SIZE (Pmode);
1301 /* The second value is the structure value address unless this is
1302 passed as an "invisible" first argument. */
1303 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1304 size += GET_MODE_SIZE (Pmode);
1306 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1307 if (FUNCTION_ARG_REGNO_P (regno))
1309 mode = targetm.calls.get_raw_arg_mode (regno);
1311 gcc_assert (mode != VOIDmode);
1313 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1314 if (size % align != 0)
1315 size = CEIL (size, align) * align;
1316 size += GET_MODE_SIZE (mode);
1317 apply_args_mode[regno] = mode;
1319 else
1321 apply_args_mode[regno] = VOIDmode;
1324 return size;
1327 /* Return the size required for the block returned by __builtin_apply,
1328 and initialize apply_result_mode. */
1330 static int
1331 apply_result_size (void)
1333 static int size = -1;
1334 int align, regno;
1335 machine_mode mode;
1337 /* The values computed by this function never change. */
1338 if (size < 0)
1340 size = 0;
1342 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1343 if (targetm.calls.function_value_regno_p (regno))
1345 mode = targetm.calls.get_raw_result_mode (regno);
1347 gcc_assert (mode != VOIDmode);
1349 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1350 if (size % align != 0)
1351 size = CEIL (size, align) * align;
1352 size += GET_MODE_SIZE (mode);
1353 apply_result_mode[regno] = mode;
1355 else
1356 apply_result_mode[regno] = VOIDmode;
1358 /* Allow targets that use untyped_call and untyped_return to override
1359 the size so that machine-specific information can be stored here. */
1360 #ifdef APPLY_RESULT_SIZE
1361 size = APPLY_RESULT_SIZE;
1362 #endif
1364 return size;
1367 /* Create a vector describing the result block RESULT. If SAVEP is true,
1368 the result block is used to save the values; otherwise it is used to
1369 restore the values. */
1371 static rtx
1372 result_vector (int savep, rtx result)
1374 int regno, size, align, nelts;
1375 machine_mode mode;
1376 rtx reg, mem;
1377 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1379 size = nelts = 0;
1380 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1381 if ((mode = apply_result_mode[regno]) != VOIDmode)
1383 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1384 if (size % align != 0)
1385 size = CEIL (size, align) * align;
1386 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1387 mem = adjust_address (result, mode, size);
1388 savevec[nelts++] = (savep
1389 ? gen_rtx_SET (mem, reg)
1390 : gen_rtx_SET (reg, mem));
1391 size += GET_MODE_SIZE (mode);
1393 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1396 /* Save the state required to perform an untyped call with the same
1397 arguments as were passed to the current function. */
1399 static rtx
1400 expand_builtin_apply_args_1 (void)
1402 rtx registers, tem;
1403 int size, align, regno;
1404 machine_mode mode;
1405 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1407 /* Create a block where the arg-pointer, structure value address,
1408 and argument registers can be saved. */
1409 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1411 /* Walk past the arg-pointer and structure value address. */
1412 size = GET_MODE_SIZE (Pmode);
1413 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1414 size += GET_MODE_SIZE (Pmode);
1416 /* Save each register used in calling a function to the block. */
1417 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1418 if ((mode = apply_args_mode[regno]) != VOIDmode)
1420 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1421 if (size % align != 0)
1422 size = CEIL (size, align) * align;
1424 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1426 emit_move_insn (adjust_address (registers, mode, size), tem);
1427 size += GET_MODE_SIZE (mode);
1430 /* Save the arg pointer to the block. */
1431 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1432 /* We need the pointer as the caller actually passed them to us, not
1433 as we might have pretended they were passed. Make sure it's a valid
1434 operand, as emit_move_insn isn't expected to handle a PLUS. */
1435 if (STACK_GROWS_DOWNWARD)
1437 = force_operand (plus_constant (Pmode, tem,
1438 crtl->args.pretend_args_size),
1439 NULL_RTX);
1440 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1442 size = GET_MODE_SIZE (Pmode);
1444 /* Save the structure value address unless this is passed as an
1445 "invisible" first argument. */
1446 if (struct_incoming_value)
1448 emit_move_insn (adjust_address (registers, Pmode, size),
1449 copy_to_reg (struct_incoming_value));
1450 size += GET_MODE_SIZE (Pmode);
1453 /* Return the address of the block. */
1454 return copy_addr_to_reg (XEXP (registers, 0));
1457 /* __builtin_apply_args returns block of memory allocated on
1458 the stack into which is stored the arg pointer, structure
1459 value address, static chain, and all the registers that might
1460 possibly be used in performing a function call. The code is
1461 moved to the start of the function so the incoming values are
1462 saved. */
1464 static rtx
1465 expand_builtin_apply_args (void)
1467 /* Don't do __builtin_apply_args more than once in a function.
1468 Save the result of the first call and reuse it. */
1469 if (apply_args_value != 0)
1470 return apply_args_value;
1472 /* When this function is called, it means that registers must be
1473 saved on entry to this function. So we migrate the
1474 call to the first insn of this function. */
1475 rtx temp;
1477 start_sequence ();
1478 temp = expand_builtin_apply_args_1 ();
1479 rtx_insn *seq = get_insns ();
1480 end_sequence ();
1482 apply_args_value = temp;
1484 /* Put the insns after the NOTE that starts the function.
1485 If this is inside a start_sequence, make the outer-level insn
1486 chain current, so the code is placed at the start of the
1487 function. If internal_arg_pointer is a non-virtual pseudo,
1488 it needs to be placed after the function that initializes
1489 that pseudo. */
1490 push_topmost_sequence ();
1491 if (REG_P (crtl->args.internal_arg_pointer)
1492 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1493 emit_insn_before (seq, parm_birth_insn);
1494 else
1495 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1496 pop_topmost_sequence ();
1497 return temp;
1501 /* Perform an untyped call and save the state required to perform an
1502 untyped return of whatever value was returned by the given function. */
1504 static rtx
1505 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1507 int size, align, regno;
1508 machine_mode mode;
1509 rtx incoming_args, result, reg, dest, src;
1510 rtx_call_insn *call_insn;
1511 rtx old_stack_level = 0;
1512 rtx call_fusage = 0;
1513 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1515 arguments = convert_memory_address (Pmode, arguments);
1517 /* Create a block where the return registers can be saved. */
1518 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1520 /* Fetch the arg pointer from the ARGUMENTS block. */
1521 incoming_args = gen_reg_rtx (Pmode);
1522 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1523 if (!STACK_GROWS_DOWNWARD)
1524 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1525 incoming_args, 0, OPTAB_LIB_WIDEN);
1527 /* Push a new argument block and copy the arguments. Do not allow
1528 the (potential) memcpy call below to interfere with our stack
1529 manipulations. */
1530 do_pending_stack_adjust ();
1531 NO_DEFER_POP;
1533 /* Save the stack with nonlocal if available. */
1534 if (targetm.have_save_stack_nonlocal ())
1535 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1536 else
1537 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1539 /* Allocate a block of memory onto the stack and copy the memory
1540 arguments to the outgoing arguments address. We can pass TRUE
1541 as the 4th argument because we just saved the stack pointer
1542 and will restore it right after the call. */
1543 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1545 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1546 may have already set current_function_calls_alloca to true.
1547 current_function_calls_alloca won't be set if argsize is zero,
1548 so we have to guarantee need_drap is true here. */
1549 if (SUPPORTS_STACK_ALIGNMENT)
1550 crtl->need_drap = true;
1552 dest = virtual_outgoing_args_rtx;
1553 if (!STACK_GROWS_DOWNWARD)
1555 if (CONST_INT_P (argsize))
1556 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1557 else
1558 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1560 dest = gen_rtx_MEM (BLKmode, dest);
1561 set_mem_align (dest, PARM_BOUNDARY);
1562 src = gen_rtx_MEM (BLKmode, incoming_args);
1563 set_mem_align (src, PARM_BOUNDARY);
1564 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1566 /* Refer to the argument block. */
1567 apply_args_size ();
1568 arguments = gen_rtx_MEM (BLKmode, arguments);
1569 set_mem_align (arguments, PARM_BOUNDARY);
1571 /* Walk past the arg-pointer and structure value address. */
1572 size = GET_MODE_SIZE (Pmode);
1573 if (struct_value)
1574 size += GET_MODE_SIZE (Pmode);
1576 /* Restore each of the registers previously saved. Make USE insns
1577 for each of these registers for use in making the call. */
1578 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1579 if ((mode = apply_args_mode[regno]) != VOIDmode)
1581 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1582 if (size % align != 0)
1583 size = CEIL (size, align) * align;
1584 reg = gen_rtx_REG (mode, regno);
1585 emit_move_insn (reg, adjust_address (arguments, mode, size));
1586 use_reg (&call_fusage, reg);
1587 size += GET_MODE_SIZE (mode);
1590 /* Restore the structure value address unless this is passed as an
1591 "invisible" first argument. */
1592 size = GET_MODE_SIZE (Pmode);
1593 if (struct_value)
1595 rtx value = gen_reg_rtx (Pmode);
1596 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1597 emit_move_insn (struct_value, value);
1598 if (REG_P (struct_value))
1599 use_reg (&call_fusage, struct_value);
1600 size += GET_MODE_SIZE (Pmode);
1603 /* All arguments and registers used for the call are set up by now! */
1604 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1606 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1607 and we don't want to load it into a register as an optimization,
1608 because prepare_call_address already did it if it should be done. */
1609 if (GET_CODE (function) != SYMBOL_REF)
1610 function = memory_address (FUNCTION_MODE, function);
1612 /* Generate the actual call instruction and save the return value. */
1613 if (targetm.have_untyped_call ())
1615 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1616 emit_call_insn (targetm.gen_untyped_call (mem, result,
1617 result_vector (1, result)));
1619 else if (targetm.have_call_value ())
1621 rtx valreg = 0;
1623 /* Locate the unique return register. It is not possible to
1624 express a call that sets more than one return register using
1625 call_value; use untyped_call for that. In fact, untyped_call
1626 only needs to save the return registers in the given block. */
1627 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1628 if ((mode = apply_result_mode[regno]) != VOIDmode)
1630 gcc_assert (!valreg); /* have_untyped_call required. */
1632 valreg = gen_rtx_REG (mode, regno);
1635 emit_insn (targetm.gen_call_value (valreg,
1636 gen_rtx_MEM (FUNCTION_MODE, function),
1637 const0_rtx, NULL_RTX, const0_rtx));
1639 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1641 else
1642 gcc_unreachable ();
1644 /* Find the CALL insn we just emitted, and attach the register usage
1645 information. */
1646 call_insn = last_call_insn ();
1647 add_function_usage_to (call_insn, call_fusage);
1649 /* Restore the stack. */
1650 if (targetm.have_save_stack_nonlocal ())
1651 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1652 else
1653 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1654 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1656 OK_DEFER_POP;
1658 /* Return the address of the result block. */
1659 result = copy_addr_to_reg (XEXP (result, 0));
1660 return convert_memory_address (ptr_mode, result);
1663 /* Perform an untyped return. */
1665 static void
1666 expand_builtin_return (rtx result)
1668 int size, align, regno;
1669 machine_mode mode;
1670 rtx reg;
1671 rtx_insn *call_fusage = 0;
1673 result = convert_memory_address (Pmode, result);
1675 apply_result_size ();
1676 result = gen_rtx_MEM (BLKmode, result);
1678 if (targetm.have_untyped_return ())
1680 rtx vector = result_vector (0, result);
1681 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1682 emit_barrier ();
1683 return;
1686 /* Restore the return value and note that each value is used. */
1687 size = 0;
1688 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1689 if ((mode = apply_result_mode[regno]) != VOIDmode)
1691 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1692 if (size % align != 0)
1693 size = CEIL (size, align) * align;
1694 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1695 emit_move_insn (reg, adjust_address (result, mode, size));
1697 push_to_sequence (call_fusage);
1698 emit_use (reg);
1699 call_fusage = get_insns ();
1700 end_sequence ();
1701 size += GET_MODE_SIZE (mode);
1704 /* Put the USE insns before the return. */
1705 emit_insn (call_fusage);
1707 /* Return whatever values was restored by jumping directly to the end
1708 of the function. */
1709 expand_naked_return ();
1712 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1714 static enum type_class
1715 type_to_class (tree type)
1717 switch (TREE_CODE (type))
1719 case VOID_TYPE: return void_type_class;
1720 case INTEGER_TYPE: return integer_type_class;
1721 case ENUMERAL_TYPE: return enumeral_type_class;
1722 case BOOLEAN_TYPE: return boolean_type_class;
1723 case POINTER_TYPE: return pointer_type_class;
1724 case REFERENCE_TYPE: return reference_type_class;
1725 case OFFSET_TYPE: return offset_type_class;
1726 case REAL_TYPE: return real_type_class;
1727 case COMPLEX_TYPE: return complex_type_class;
1728 case FUNCTION_TYPE: return function_type_class;
1729 case METHOD_TYPE: return method_type_class;
1730 case RECORD_TYPE: return record_type_class;
1731 case UNION_TYPE:
1732 case QUAL_UNION_TYPE: return union_type_class;
1733 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1734 ? string_type_class : array_type_class);
1735 case LANG_TYPE: return lang_type_class;
1736 default: return no_type_class;
1740 /* Expand a call EXP to __builtin_classify_type. */
1742 static rtx
1743 expand_builtin_classify_type (tree exp)
1745 if (call_expr_nargs (exp))
1746 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1747 return GEN_INT (no_type_class);
1750 /* This helper macro, meant to be used in mathfn_built_in below,
1751 determines which among a set of three builtin math functions is
1752 appropriate for a given type mode. The `F' and `L' cases are
1753 automatically generated from the `double' case. */
1754 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1755 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1756 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1757 fcodel = BUILT_IN_MATHFN##L ; break;
1758 /* Similar to above, but appends _R after any F/L suffix. */
1759 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1760 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1761 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1762 fcodel = BUILT_IN_MATHFN##L_R ; break;
1764 /* Return a function equivalent to FN but operating on floating-point
1765 values of type TYPE, or END_BUILTINS if no such function exists.
1766 This is purely an operation on built-in function codes; it does not
1767 guarantee that the target actually has an implementation of the
1768 function. */
1770 static built_in_function
1771 mathfn_built_in_2 (tree type, built_in_function fn)
1773 built_in_function fcode, fcodef, fcodel;
1775 switch (fn)
1777 CASE_MATHFN (BUILT_IN_ACOS)
1778 CASE_MATHFN (BUILT_IN_ACOSH)
1779 CASE_MATHFN (BUILT_IN_ASIN)
1780 CASE_MATHFN (BUILT_IN_ASINH)
1781 CASE_MATHFN (BUILT_IN_ATAN)
1782 CASE_MATHFN (BUILT_IN_ATAN2)
1783 CASE_MATHFN (BUILT_IN_ATANH)
1784 CASE_MATHFN (BUILT_IN_CBRT)
1785 CASE_MATHFN (BUILT_IN_CEIL)
1786 CASE_MATHFN (BUILT_IN_CEXPI)
1787 CASE_MATHFN (BUILT_IN_COPYSIGN)
1788 CASE_MATHFN (BUILT_IN_COS)
1789 CASE_MATHFN (BUILT_IN_COSH)
1790 CASE_MATHFN (BUILT_IN_DREM)
1791 CASE_MATHFN (BUILT_IN_ERF)
1792 CASE_MATHFN (BUILT_IN_ERFC)
1793 CASE_MATHFN (BUILT_IN_EXP)
1794 CASE_MATHFN (BUILT_IN_EXP10)
1795 CASE_MATHFN (BUILT_IN_EXP2)
1796 CASE_MATHFN (BUILT_IN_EXPM1)
1797 CASE_MATHFN (BUILT_IN_FABS)
1798 CASE_MATHFN (BUILT_IN_FDIM)
1799 CASE_MATHFN (BUILT_IN_FLOOR)
1800 CASE_MATHFN (BUILT_IN_FMA)
1801 CASE_MATHFN (BUILT_IN_FMAX)
1802 CASE_MATHFN (BUILT_IN_FMIN)
1803 CASE_MATHFN (BUILT_IN_FMOD)
1804 CASE_MATHFN (BUILT_IN_FREXP)
1805 CASE_MATHFN (BUILT_IN_GAMMA)
1806 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1807 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1808 CASE_MATHFN (BUILT_IN_HYPOT)
1809 CASE_MATHFN (BUILT_IN_ILOGB)
1810 CASE_MATHFN (BUILT_IN_ICEIL)
1811 CASE_MATHFN (BUILT_IN_IFLOOR)
1812 CASE_MATHFN (BUILT_IN_INF)
1813 CASE_MATHFN (BUILT_IN_IRINT)
1814 CASE_MATHFN (BUILT_IN_IROUND)
1815 CASE_MATHFN (BUILT_IN_ISINF)
1816 CASE_MATHFN (BUILT_IN_J0)
1817 CASE_MATHFN (BUILT_IN_J1)
1818 CASE_MATHFN (BUILT_IN_JN)
1819 CASE_MATHFN (BUILT_IN_LCEIL)
1820 CASE_MATHFN (BUILT_IN_LDEXP)
1821 CASE_MATHFN (BUILT_IN_LFLOOR)
1822 CASE_MATHFN (BUILT_IN_LGAMMA)
1823 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1824 CASE_MATHFN (BUILT_IN_LLCEIL)
1825 CASE_MATHFN (BUILT_IN_LLFLOOR)
1826 CASE_MATHFN (BUILT_IN_LLRINT)
1827 CASE_MATHFN (BUILT_IN_LLROUND)
1828 CASE_MATHFN (BUILT_IN_LOG)
1829 CASE_MATHFN (BUILT_IN_LOG10)
1830 CASE_MATHFN (BUILT_IN_LOG1P)
1831 CASE_MATHFN (BUILT_IN_LOG2)
1832 CASE_MATHFN (BUILT_IN_LOGB)
1833 CASE_MATHFN (BUILT_IN_LRINT)
1834 CASE_MATHFN (BUILT_IN_LROUND)
1835 CASE_MATHFN (BUILT_IN_MODF)
1836 CASE_MATHFN (BUILT_IN_NAN)
1837 CASE_MATHFN (BUILT_IN_NANS)
1838 CASE_MATHFN (BUILT_IN_NEARBYINT)
1839 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1840 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1841 CASE_MATHFN (BUILT_IN_POW)
1842 CASE_MATHFN (BUILT_IN_POWI)
1843 CASE_MATHFN (BUILT_IN_POW10)
1844 CASE_MATHFN (BUILT_IN_REMAINDER)
1845 CASE_MATHFN (BUILT_IN_REMQUO)
1846 CASE_MATHFN (BUILT_IN_RINT)
1847 CASE_MATHFN (BUILT_IN_ROUND)
1848 CASE_MATHFN (BUILT_IN_SCALB)
1849 CASE_MATHFN (BUILT_IN_SCALBLN)
1850 CASE_MATHFN (BUILT_IN_SCALBN)
1851 CASE_MATHFN (BUILT_IN_SIGNBIT)
1852 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1853 CASE_MATHFN (BUILT_IN_SIN)
1854 CASE_MATHFN (BUILT_IN_SINCOS)
1855 CASE_MATHFN (BUILT_IN_SINH)
1856 CASE_MATHFN (BUILT_IN_SQRT)
1857 CASE_MATHFN (BUILT_IN_TAN)
1858 CASE_MATHFN (BUILT_IN_TANH)
1859 CASE_MATHFN (BUILT_IN_TGAMMA)
1860 CASE_MATHFN (BUILT_IN_TRUNC)
1861 CASE_MATHFN (BUILT_IN_Y0)
1862 CASE_MATHFN (BUILT_IN_Y1)
1863 CASE_MATHFN (BUILT_IN_YN)
1865 default:
1866 return END_BUILTINS;
1869 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1870 return fcode;
1871 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1872 return fcodef;
1873 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1874 return fcodel;
1875 else
1876 return END_BUILTINS;
1879 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1880 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1881 otherwise use the explicit declaration. If we can't do the conversion,
1882 return null. */
1884 static tree
1885 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1887 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1888 if (fcode2 == END_BUILTINS)
1889 return NULL_TREE;
1891 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1892 return NULL_TREE;
1894 return builtin_decl_explicit (fcode2);
1897 /* Like mathfn_built_in_1(), but always use the implicit array. */
1899 tree
1900 mathfn_built_in (tree type, enum built_in_function fn)
1902 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1905 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
1906 return its code, otherwise return IFN_LAST. Note that this function
1907 only tests whether the function is defined in internals.def, not whether
1908 it is actually available on the target. */
1910 internal_fn
1911 associated_internal_fn (tree fndecl)
1913 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
1914 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
1915 switch (DECL_FUNCTION_CODE (fndecl))
1917 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
1918 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1919 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
1920 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1921 #include "internal-fn.def"
1923 CASE_FLT_FN (BUILT_IN_POW10):
1924 return IFN_EXP10;
1926 CASE_FLT_FN (BUILT_IN_DREM):
1927 return IFN_REMAINDER;
1929 CASE_FLT_FN (BUILT_IN_SCALBN):
1930 CASE_FLT_FN (BUILT_IN_SCALBLN):
1931 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
1932 return IFN_LDEXP;
1933 return IFN_LAST;
1935 default:
1936 return IFN_LAST;
1940 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
1941 on the current target by a call to an internal function, return the
1942 code of that internal function, otherwise return IFN_LAST. The caller
1943 is responsible for ensuring that any side-effects of the built-in
1944 call are dealt with correctly. E.g. if CALL sets errno, the caller
1945 must decide that the errno result isn't needed or make it available
1946 in some other way. */
1948 internal_fn
1949 replacement_internal_fn (gcall *call)
1951 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1953 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
1954 if (ifn != IFN_LAST)
1956 tree_pair types = direct_internal_fn_types (ifn, call);
1957 if (direct_internal_fn_supported_p (ifn, types))
1958 return ifn;
1961 return IFN_LAST;
1964 /* If errno must be maintained, expand the RTL to check if the result,
1965 TARGET, of a built-in function call, EXP, is NaN, and if so set
1966 errno to EDOM. */
1968 static void
1969 expand_errno_check (tree exp, rtx target)
1971 rtx_code_label *lab = gen_label_rtx ();
1973 /* Test the result; if it is NaN, set errno=EDOM because
1974 the argument was not in the domain. */
1975 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1976 NULL_RTX, NULL, lab,
1977 /* The jump is very likely. */
1978 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1980 #ifdef TARGET_EDOM
1981 /* If this built-in doesn't throw an exception, set errno directly. */
1982 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
1984 #ifdef GEN_ERRNO_RTX
1985 rtx errno_rtx = GEN_ERRNO_RTX;
1986 #else
1987 rtx errno_rtx
1988 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1989 #endif
1990 emit_move_insn (errno_rtx,
1991 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
1992 emit_label (lab);
1993 return;
1995 #endif
1997 /* Make sure the library call isn't expanded as a tail call. */
1998 CALL_EXPR_TAILCALL (exp) = 0;
2000 /* We can't set errno=EDOM directly; let the library call do it.
2001 Pop the arguments right away in case the call gets deleted. */
2002 NO_DEFER_POP;
2003 expand_call (exp, target, 0);
2004 OK_DEFER_POP;
2005 emit_label (lab);
2008 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2009 Return NULL_RTX if a normal call should be emitted rather than expanding
2010 the function in-line. EXP is the expression that is a call to the builtin
2011 function; if convenient, the result should be placed in TARGET.
2012 SUBTARGET may be used as the target for computing one of EXP's operands. */
2014 static rtx
2015 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2017 optab builtin_optab;
2018 rtx op0;
2019 rtx_insn *insns;
2020 tree fndecl = get_callee_fndecl (exp);
2021 machine_mode mode;
2022 bool errno_set = false;
2023 bool try_widening = false;
2024 tree arg;
2026 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2027 return NULL_RTX;
2029 arg = CALL_EXPR_ARG (exp, 0);
2031 switch (DECL_FUNCTION_CODE (fndecl))
2033 CASE_FLT_FN (BUILT_IN_SQRT):
2034 errno_set = ! tree_expr_nonnegative_p (arg);
2035 try_widening = true;
2036 builtin_optab = sqrt_optab;
2037 break;
2038 CASE_FLT_FN (BUILT_IN_EXP):
2039 errno_set = true; builtin_optab = exp_optab; break;
2040 CASE_FLT_FN (BUILT_IN_EXP10):
2041 CASE_FLT_FN (BUILT_IN_POW10):
2042 errno_set = true; builtin_optab = exp10_optab; break;
2043 CASE_FLT_FN (BUILT_IN_EXP2):
2044 errno_set = true; builtin_optab = exp2_optab; break;
2045 CASE_FLT_FN (BUILT_IN_EXPM1):
2046 errno_set = true; builtin_optab = expm1_optab; break;
2047 CASE_FLT_FN (BUILT_IN_LOGB):
2048 errno_set = true; builtin_optab = logb_optab; break;
2049 CASE_FLT_FN (BUILT_IN_LOG):
2050 errno_set = true; builtin_optab = log_optab; break;
2051 CASE_FLT_FN (BUILT_IN_LOG10):
2052 errno_set = true; builtin_optab = log10_optab; break;
2053 CASE_FLT_FN (BUILT_IN_LOG2):
2054 errno_set = true; builtin_optab = log2_optab; break;
2055 CASE_FLT_FN (BUILT_IN_LOG1P):
2056 errno_set = true; builtin_optab = log1p_optab; break;
2057 CASE_FLT_FN (BUILT_IN_ASIN):
2058 builtin_optab = asin_optab; break;
2059 CASE_FLT_FN (BUILT_IN_ACOS):
2060 builtin_optab = acos_optab; break;
2061 CASE_FLT_FN (BUILT_IN_TAN):
2062 builtin_optab = tan_optab; break;
2063 CASE_FLT_FN (BUILT_IN_ATAN):
2064 builtin_optab = atan_optab; break;
2065 CASE_FLT_FN (BUILT_IN_FLOOR):
2066 builtin_optab = floor_optab; break;
2067 CASE_FLT_FN (BUILT_IN_CEIL):
2068 builtin_optab = ceil_optab; break;
2069 CASE_FLT_FN (BUILT_IN_TRUNC):
2070 builtin_optab = btrunc_optab; break;
2071 CASE_FLT_FN (BUILT_IN_ROUND):
2072 builtin_optab = round_optab; break;
2073 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2074 builtin_optab = nearbyint_optab;
2075 if (flag_trapping_math)
2076 break;
2077 /* Else fallthrough and expand as rint. */
2078 CASE_FLT_FN (BUILT_IN_RINT):
2079 builtin_optab = rint_optab; break;
2080 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2081 builtin_optab = significand_optab; break;
2082 default:
2083 gcc_unreachable ();
2086 /* Make a suitable register to place result in. */
2087 mode = TYPE_MODE (TREE_TYPE (exp));
2089 if (! flag_errno_math || ! HONOR_NANS (mode))
2090 errno_set = false;
2092 /* Before working hard, check whether the instruction is available, but try
2093 to widen the mode for specific operations. */
2094 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2095 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2096 && (!errno_set || !optimize_insn_for_size_p ()))
2098 rtx result = gen_reg_rtx (mode);
2100 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2101 need to expand the argument again. This way, we will not perform
2102 side-effects more the once. */
2103 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2105 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2107 start_sequence ();
2109 /* Compute into RESULT.
2110 Set RESULT to wherever the result comes back. */
2111 result = expand_unop (mode, builtin_optab, op0, result, 0);
2113 if (result != 0)
2115 if (errno_set)
2116 expand_errno_check (exp, result);
2118 /* Output the entire sequence. */
2119 insns = get_insns ();
2120 end_sequence ();
2121 emit_insn (insns);
2122 return result;
2125 /* If we were unable to expand via the builtin, stop the sequence
2126 (without outputting the insns) and call to the library function
2127 with the stabilized argument list. */
2128 end_sequence ();
2131 return expand_call (exp, target, target == const0_rtx);
2134 /* Expand a call to the builtin binary math functions (pow and atan2).
2135 Return NULL_RTX if a normal call should be emitted rather than expanding the
2136 function in-line. EXP is the expression that is a call to the builtin
2137 function; if convenient, the result should be placed in TARGET.
2138 SUBTARGET may be used as the target for computing one of EXP's
2139 operands. */
2141 static rtx
2142 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2144 optab builtin_optab;
2145 rtx op0, op1, result;
2146 rtx_insn *insns;
2147 int op1_type = REAL_TYPE;
2148 tree fndecl = get_callee_fndecl (exp);
2149 tree arg0, arg1;
2150 machine_mode mode;
2151 bool errno_set = true;
2153 switch (DECL_FUNCTION_CODE (fndecl))
2155 CASE_FLT_FN (BUILT_IN_SCALBN):
2156 CASE_FLT_FN (BUILT_IN_SCALBLN):
2157 CASE_FLT_FN (BUILT_IN_LDEXP):
2158 op1_type = INTEGER_TYPE;
2159 default:
2160 break;
2163 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2164 return NULL_RTX;
2166 arg0 = CALL_EXPR_ARG (exp, 0);
2167 arg1 = CALL_EXPR_ARG (exp, 1);
2169 switch (DECL_FUNCTION_CODE (fndecl))
2171 CASE_FLT_FN (BUILT_IN_POW):
2172 builtin_optab = pow_optab; break;
2173 CASE_FLT_FN (BUILT_IN_ATAN2):
2174 builtin_optab = atan2_optab; break;
2175 CASE_FLT_FN (BUILT_IN_SCALB):
2176 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2177 return 0;
2178 builtin_optab = scalb_optab; break;
2179 CASE_FLT_FN (BUILT_IN_SCALBN):
2180 CASE_FLT_FN (BUILT_IN_SCALBLN):
2181 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2182 return 0;
2183 /* Fall through... */
2184 CASE_FLT_FN (BUILT_IN_LDEXP):
2185 builtin_optab = ldexp_optab; break;
2186 CASE_FLT_FN (BUILT_IN_FMOD):
2187 builtin_optab = fmod_optab; break;
2188 CASE_FLT_FN (BUILT_IN_REMAINDER):
2189 CASE_FLT_FN (BUILT_IN_DREM):
2190 builtin_optab = remainder_optab; break;
2191 default:
2192 gcc_unreachable ();
2195 /* Make a suitable register to place result in. */
2196 mode = TYPE_MODE (TREE_TYPE (exp));
2198 /* Before working hard, check whether the instruction is available. */
2199 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2200 return NULL_RTX;
2202 result = gen_reg_rtx (mode);
2204 if (! flag_errno_math || ! HONOR_NANS (mode))
2205 errno_set = false;
2207 if (errno_set && optimize_insn_for_size_p ())
2208 return 0;
2210 /* Always stabilize the argument list. */
2211 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2212 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2214 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2215 op1 = expand_normal (arg1);
2217 start_sequence ();
2219 /* Compute into RESULT.
2220 Set RESULT to wherever the result comes back. */
2221 result = expand_binop (mode, builtin_optab, op0, op1,
2222 result, 0, OPTAB_DIRECT);
2224 /* If we were unable to expand via the builtin, stop the sequence
2225 (without outputting the insns) and call to the library function
2226 with the stabilized argument list. */
2227 if (result == 0)
2229 end_sequence ();
2230 return expand_call (exp, target, target == const0_rtx);
2233 if (errno_set)
2234 expand_errno_check (exp, result);
2236 /* Output the entire sequence. */
2237 insns = get_insns ();
2238 end_sequence ();
2239 emit_insn (insns);
2241 return result;
2244 /* Expand a call to the builtin trinary math functions (fma).
2245 Return NULL_RTX if a normal call should be emitted rather than expanding the
2246 function in-line. EXP is the expression that is a call to the builtin
2247 function; if convenient, the result should be placed in TARGET.
2248 SUBTARGET may be used as the target for computing one of EXP's
2249 operands. */
2251 static rtx
2252 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2254 optab builtin_optab;
2255 rtx op0, op1, op2, result;
2256 rtx_insn *insns;
2257 tree fndecl = get_callee_fndecl (exp);
2258 tree arg0, arg1, arg2;
2259 machine_mode mode;
2261 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2262 return NULL_RTX;
2264 arg0 = CALL_EXPR_ARG (exp, 0);
2265 arg1 = CALL_EXPR_ARG (exp, 1);
2266 arg2 = CALL_EXPR_ARG (exp, 2);
2268 switch (DECL_FUNCTION_CODE (fndecl))
2270 CASE_FLT_FN (BUILT_IN_FMA):
2271 builtin_optab = fma_optab; break;
2272 default:
2273 gcc_unreachable ();
2276 /* Make a suitable register to place result in. */
2277 mode = TYPE_MODE (TREE_TYPE (exp));
2279 /* Before working hard, check whether the instruction is available. */
2280 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2281 return NULL_RTX;
2283 result = gen_reg_rtx (mode);
2285 /* Always stabilize the argument list. */
2286 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2287 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2288 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2290 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2291 op1 = expand_normal (arg1);
2292 op2 = expand_normal (arg2);
2294 start_sequence ();
2296 /* Compute into RESULT.
2297 Set RESULT to wherever the result comes back. */
2298 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2299 result, 0);
2301 /* If we were unable to expand via the builtin, stop the sequence
2302 (without outputting the insns) and call to the library function
2303 with the stabilized argument list. */
2304 if (result == 0)
2306 end_sequence ();
2307 return expand_call (exp, target, target == const0_rtx);
2310 /* Output the entire sequence. */
2311 insns = get_insns ();
2312 end_sequence ();
2313 emit_insn (insns);
2315 return result;
2318 /* Expand a call to the builtin sin and cos math functions.
2319 Return NULL_RTX if a normal call should be emitted rather than expanding the
2320 function in-line. EXP is the expression that is a call to the builtin
2321 function; if convenient, the result should be placed in TARGET.
2322 SUBTARGET may be used as the target for computing one of EXP's
2323 operands. */
2325 static rtx
2326 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2328 optab builtin_optab;
2329 rtx op0;
2330 rtx_insn *insns;
2331 tree fndecl = get_callee_fndecl (exp);
2332 machine_mode mode;
2333 tree arg;
2335 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2336 return NULL_RTX;
2338 arg = CALL_EXPR_ARG (exp, 0);
2340 switch (DECL_FUNCTION_CODE (fndecl))
2342 CASE_FLT_FN (BUILT_IN_SIN):
2343 CASE_FLT_FN (BUILT_IN_COS):
2344 builtin_optab = sincos_optab; break;
2345 default:
2346 gcc_unreachable ();
2349 /* Make a suitable register to place result in. */
2350 mode = TYPE_MODE (TREE_TYPE (exp));
2352 /* Check if sincos insn is available, otherwise fallback
2353 to sin or cos insn. */
2354 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2355 switch (DECL_FUNCTION_CODE (fndecl))
2357 CASE_FLT_FN (BUILT_IN_SIN):
2358 builtin_optab = sin_optab; break;
2359 CASE_FLT_FN (BUILT_IN_COS):
2360 builtin_optab = cos_optab; break;
2361 default:
2362 gcc_unreachable ();
2365 /* Before working hard, check whether the instruction is available. */
2366 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2368 rtx result = gen_reg_rtx (mode);
2370 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2371 need to expand the argument again. This way, we will not perform
2372 side-effects more the once. */
2373 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2375 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2377 start_sequence ();
2379 /* Compute into RESULT.
2380 Set RESULT to wherever the result comes back. */
2381 if (builtin_optab == sincos_optab)
2383 int ok;
2385 switch (DECL_FUNCTION_CODE (fndecl))
2387 CASE_FLT_FN (BUILT_IN_SIN):
2388 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2389 break;
2390 CASE_FLT_FN (BUILT_IN_COS):
2391 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2392 break;
2393 default:
2394 gcc_unreachable ();
2396 gcc_assert (ok);
2398 else
2399 result = expand_unop (mode, builtin_optab, op0, result, 0);
2401 if (result != 0)
2403 /* Output the entire sequence. */
2404 insns = get_insns ();
2405 end_sequence ();
2406 emit_insn (insns);
2407 return result;
2410 /* If we were unable to expand via the builtin, stop the sequence
2411 (without outputting the insns) and call to the library function
2412 with the stabilized argument list. */
2413 end_sequence ();
2416 return expand_call (exp, target, target == const0_rtx);
2419 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2420 return an RTL instruction code that implements the functionality.
2421 If that isn't possible or available return CODE_FOR_nothing. */
2423 static enum insn_code
2424 interclass_mathfn_icode (tree arg, tree fndecl)
2426 bool errno_set = false;
2427 optab builtin_optab = unknown_optab;
2428 machine_mode mode;
2430 switch (DECL_FUNCTION_CODE (fndecl))
2432 CASE_FLT_FN (BUILT_IN_ILOGB):
2433 errno_set = true; builtin_optab = ilogb_optab; break;
2434 CASE_FLT_FN (BUILT_IN_ISINF):
2435 builtin_optab = isinf_optab; break;
2436 case BUILT_IN_ISNORMAL:
2437 case BUILT_IN_ISFINITE:
2438 CASE_FLT_FN (BUILT_IN_FINITE):
2439 case BUILT_IN_FINITED32:
2440 case BUILT_IN_FINITED64:
2441 case BUILT_IN_FINITED128:
2442 case BUILT_IN_ISINFD32:
2443 case BUILT_IN_ISINFD64:
2444 case BUILT_IN_ISINFD128:
2445 /* These builtins have no optabs (yet). */
2446 break;
2447 default:
2448 gcc_unreachable ();
2451 /* There's no easy way to detect the case we need to set EDOM. */
2452 if (flag_errno_math && errno_set)
2453 return CODE_FOR_nothing;
2455 /* Optab mode depends on the mode of the input argument. */
2456 mode = TYPE_MODE (TREE_TYPE (arg));
2458 if (builtin_optab)
2459 return optab_handler (builtin_optab, mode);
2460 return CODE_FOR_nothing;
2463 /* Expand a call to one of the builtin math functions that operate on
2464 floating point argument and output an integer result (ilogb, isinf,
2465 isnan, etc).
2466 Return 0 if a normal call should be emitted rather than expanding the
2467 function in-line. EXP is the expression that is a call to the builtin
2468 function; if convenient, the result should be placed in TARGET. */
2470 static rtx
2471 expand_builtin_interclass_mathfn (tree exp, rtx target)
2473 enum insn_code icode = CODE_FOR_nothing;
2474 rtx op0;
2475 tree fndecl = get_callee_fndecl (exp);
2476 machine_mode mode;
2477 tree arg;
2479 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2480 return NULL_RTX;
2482 arg = CALL_EXPR_ARG (exp, 0);
2483 icode = interclass_mathfn_icode (arg, fndecl);
2484 mode = TYPE_MODE (TREE_TYPE (arg));
2486 if (icode != CODE_FOR_nothing)
2488 struct expand_operand ops[1];
2489 rtx_insn *last = get_last_insn ();
2490 tree orig_arg = arg;
2492 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2493 need to expand the argument again. This way, we will not perform
2494 side-effects more the once. */
2495 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2497 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2499 if (mode != GET_MODE (op0))
2500 op0 = convert_to_mode (mode, op0, 0);
2502 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2503 if (maybe_legitimize_operands (icode, 0, 1, ops)
2504 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2505 return ops[0].value;
2507 delete_insns_since (last);
2508 CALL_EXPR_ARG (exp, 0) = orig_arg;
2511 return NULL_RTX;
2514 /* Expand a call to the builtin sincos math function.
2515 Return NULL_RTX if a normal call should be emitted rather than expanding the
2516 function in-line. EXP is the expression that is a call to the builtin
2517 function. */
2519 static rtx
2520 expand_builtin_sincos (tree exp)
2522 rtx op0, op1, op2, target1, target2;
2523 machine_mode mode;
2524 tree arg, sinp, cosp;
2525 int result;
2526 location_t loc = EXPR_LOCATION (exp);
2527 tree alias_type, alias_off;
2529 if (!validate_arglist (exp, REAL_TYPE,
2530 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2531 return NULL_RTX;
2533 arg = CALL_EXPR_ARG (exp, 0);
2534 sinp = CALL_EXPR_ARG (exp, 1);
2535 cosp = CALL_EXPR_ARG (exp, 2);
2537 /* Make a suitable register to place result in. */
2538 mode = TYPE_MODE (TREE_TYPE (arg));
2540 /* Check if sincos insn is available, otherwise emit the call. */
2541 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2542 return NULL_RTX;
2544 target1 = gen_reg_rtx (mode);
2545 target2 = gen_reg_rtx (mode);
2547 op0 = expand_normal (arg);
2548 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2549 alias_off = build_int_cst (alias_type, 0);
2550 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2551 sinp, alias_off));
2552 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2553 cosp, alias_off));
2555 /* Compute into target1 and target2.
2556 Set TARGET to wherever the result comes back. */
2557 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2558 gcc_assert (result);
2560 /* Move target1 and target2 to the memory locations indicated
2561 by op1 and op2. */
2562 emit_move_insn (op1, target1);
2563 emit_move_insn (op2, target2);
2565 return const0_rtx;
2568 /* Expand a call to the internal cexpi builtin to the sincos math function.
2569 EXP is the expression that is a call to the builtin function; if convenient,
2570 the result should be placed in TARGET. */
2572 static rtx
2573 expand_builtin_cexpi (tree exp, rtx target)
2575 tree fndecl = get_callee_fndecl (exp);
2576 tree arg, type;
2577 machine_mode mode;
2578 rtx op0, op1, op2;
2579 location_t loc = EXPR_LOCATION (exp);
2581 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2582 return NULL_RTX;
2584 arg = CALL_EXPR_ARG (exp, 0);
2585 type = TREE_TYPE (arg);
2586 mode = TYPE_MODE (TREE_TYPE (arg));
2588 /* Try expanding via a sincos optab, fall back to emitting a libcall
2589 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2590 is only generated from sincos, cexp or if we have either of them. */
2591 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2593 op1 = gen_reg_rtx (mode);
2594 op2 = gen_reg_rtx (mode);
2596 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2598 /* Compute into op1 and op2. */
2599 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2601 else if (targetm.libc_has_function (function_sincos))
2603 tree call, fn = NULL_TREE;
2604 tree top1, top2;
2605 rtx op1a, op2a;
2607 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2608 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2609 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2610 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2611 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2612 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2613 else
2614 gcc_unreachable ();
2616 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2617 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2618 op1a = copy_addr_to_reg (XEXP (op1, 0));
2619 op2a = copy_addr_to_reg (XEXP (op2, 0));
2620 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2621 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2623 /* Make sure not to fold the sincos call again. */
2624 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2625 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2626 call, 3, arg, top1, top2));
2628 else
2630 tree call, fn = NULL_TREE, narg;
2631 tree ctype = build_complex_type (type);
2633 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2634 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2635 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2636 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2637 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2638 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2639 else
2640 gcc_unreachable ();
2642 /* If we don't have a decl for cexp create one. This is the
2643 friendliest fallback if the user calls __builtin_cexpi
2644 without full target C99 function support. */
2645 if (fn == NULL_TREE)
2647 tree fntype;
2648 const char *name = NULL;
2650 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2651 name = "cexpf";
2652 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2653 name = "cexp";
2654 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2655 name = "cexpl";
2657 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2658 fn = build_fn_decl (name, fntype);
2661 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2662 build_real (type, dconst0), arg);
2664 /* Make sure not to fold the cexp call again. */
2665 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2666 return expand_expr (build_call_nary (ctype, call, 1, narg),
2667 target, VOIDmode, EXPAND_NORMAL);
2670 /* Now build the proper return type. */
2671 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2672 make_tree (TREE_TYPE (arg), op2),
2673 make_tree (TREE_TYPE (arg), op1)),
2674 target, VOIDmode, EXPAND_NORMAL);
2677 /* Conveniently construct a function call expression. FNDECL names the
2678 function to be called, N is the number of arguments, and the "..."
2679 parameters are the argument expressions. Unlike build_call_exr
2680 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2682 static tree
2683 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2685 va_list ap;
2686 tree fntype = TREE_TYPE (fndecl);
2687 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2689 va_start (ap, n);
2690 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2691 va_end (ap);
2692 SET_EXPR_LOCATION (fn, loc);
2693 return fn;
2696 /* Expand a call to one of the builtin rounding functions gcc defines
2697 as an extension (lfloor and lceil). As these are gcc extensions we
2698 do not need to worry about setting errno to EDOM.
2699 If expanding via optab fails, lower expression to (int)(floor(x)).
2700 EXP is the expression that is a call to the builtin function;
2701 if convenient, the result should be placed in TARGET. */
2703 static rtx
2704 expand_builtin_int_roundingfn (tree exp, rtx target)
2706 convert_optab builtin_optab;
2707 rtx op0, tmp;
2708 rtx_insn *insns;
2709 tree fndecl = get_callee_fndecl (exp);
2710 enum built_in_function fallback_fn;
2711 tree fallback_fndecl;
2712 machine_mode mode;
2713 tree arg;
2715 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2716 gcc_unreachable ();
2718 arg = CALL_EXPR_ARG (exp, 0);
2720 switch (DECL_FUNCTION_CODE (fndecl))
2722 CASE_FLT_FN (BUILT_IN_ICEIL):
2723 CASE_FLT_FN (BUILT_IN_LCEIL):
2724 CASE_FLT_FN (BUILT_IN_LLCEIL):
2725 builtin_optab = lceil_optab;
2726 fallback_fn = BUILT_IN_CEIL;
2727 break;
2729 CASE_FLT_FN (BUILT_IN_IFLOOR):
2730 CASE_FLT_FN (BUILT_IN_LFLOOR):
2731 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2732 builtin_optab = lfloor_optab;
2733 fallback_fn = BUILT_IN_FLOOR;
2734 break;
2736 default:
2737 gcc_unreachable ();
2740 /* Make a suitable register to place result in. */
2741 mode = TYPE_MODE (TREE_TYPE (exp));
2743 target = gen_reg_rtx (mode);
2745 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2746 need to expand the argument again. This way, we will not perform
2747 side-effects more the once. */
2748 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2750 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2752 start_sequence ();
2754 /* Compute into TARGET. */
2755 if (expand_sfix_optab (target, op0, builtin_optab))
2757 /* Output the entire sequence. */
2758 insns = get_insns ();
2759 end_sequence ();
2760 emit_insn (insns);
2761 return target;
2764 /* If we were unable to expand via the builtin, stop the sequence
2765 (without outputting the insns). */
2766 end_sequence ();
2768 /* Fall back to floating point rounding optab. */
2769 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2771 /* For non-C99 targets we may end up without a fallback fndecl here
2772 if the user called __builtin_lfloor directly. In this case emit
2773 a call to the floor/ceil variants nevertheless. This should result
2774 in the best user experience for not full C99 targets. */
2775 if (fallback_fndecl == NULL_TREE)
2777 tree fntype;
2778 const char *name = NULL;
2780 switch (DECL_FUNCTION_CODE (fndecl))
2782 case BUILT_IN_ICEIL:
2783 case BUILT_IN_LCEIL:
2784 case BUILT_IN_LLCEIL:
2785 name = "ceil";
2786 break;
2787 case BUILT_IN_ICEILF:
2788 case BUILT_IN_LCEILF:
2789 case BUILT_IN_LLCEILF:
2790 name = "ceilf";
2791 break;
2792 case BUILT_IN_ICEILL:
2793 case BUILT_IN_LCEILL:
2794 case BUILT_IN_LLCEILL:
2795 name = "ceill";
2796 break;
2797 case BUILT_IN_IFLOOR:
2798 case BUILT_IN_LFLOOR:
2799 case BUILT_IN_LLFLOOR:
2800 name = "floor";
2801 break;
2802 case BUILT_IN_IFLOORF:
2803 case BUILT_IN_LFLOORF:
2804 case BUILT_IN_LLFLOORF:
2805 name = "floorf";
2806 break;
2807 case BUILT_IN_IFLOORL:
2808 case BUILT_IN_LFLOORL:
2809 case BUILT_IN_LLFLOORL:
2810 name = "floorl";
2811 break;
2812 default:
2813 gcc_unreachable ();
2816 fntype = build_function_type_list (TREE_TYPE (arg),
2817 TREE_TYPE (arg), NULL_TREE);
2818 fallback_fndecl = build_fn_decl (name, fntype);
2821 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2823 tmp = expand_normal (exp);
2824 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2826 /* Truncate the result of floating point optab to integer
2827 via expand_fix (). */
2828 target = gen_reg_rtx (mode);
2829 expand_fix (target, tmp, 0);
2831 return target;
2834 /* Expand a call to one of the builtin math functions doing integer
2835 conversion (lrint).
2836 Return 0 if a normal call should be emitted rather than expanding the
2837 function in-line. EXP is the expression that is a call to the builtin
2838 function; if convenient, the result should be placed in TARGET. */
2840 static rtx
2841 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2843 convert_optab builtin_optab;
2844 rtx op0;
2845 rtx_insn *insns;
2846 tree fndecl = get_callee_fndecl (exp);
2847 tree arg;
2848 machine_mode mode;
2849 enum built_in_function fallback_fn = BUILT_IN_NONE;
2851 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2852 gcc_unreachable ();
2854 arg = CALL_EXPR_ARG (exp, 0);
2856 switch (DECL_FUNCTION_CODE (fndecl))
2858 CASE_FLT_FN (BUILT_IN_IRINT):
2859 fallback_fn = BUILT_IN_LRINT;
2860 /* FALLTHRU */
2861 CASE_FLT_FN (BUILT_IN_LRINT):
2862 CASE_FLT_FN (BUILT_IN_LLRINT):
2863 builtin_optab = lrint_optab;
2864 break;
2866 CASE_FLT_FN (BUILT_IN_IROUND):
2867 fallback_fn = BUILT_IN_LROUND;
2868 /* FALLTHRU */
2869 CASE_FLT_FN (BUILT_IN_LROUND):
2870 CASE_FLT_FN (BUILT_IN_LLROUND):
2871 builtin_optab = lround_optab;
2872 break;
2874 default:
2875 gcc_unreachable ();
2878 /* There's no easy way to detect the case we need to set EDOM. */
2879 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2880 return NULL_RTX;
2882 /* Make a suitable register to place result in. */
2883 mode = TYPE_MODE (TREE_TYPE (exp));
2885 /* There's no easy way to detect the case we need to set EDOM. */
2886 if (!flag_errno_math)
2888 rtx result = gen_reg_rtx (mode);
2890 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2891 need to expand the argument again. This way, we will not perform
2892 side-effects more the once. */
2893 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2895 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2897 start_sequence ();
2899 if (expand_sfix_optab (result, op0, builtin_optab))
2901 /* Output the entire sequence. */
2902 insns = get_insns ();
2903 end_sequence ();
2904 emit_insn (insns);
2905 return result;
2908 /* If we were unable to expand via the builtin, stop the sequence
2909 (without outputting the insns) and call to the library function
2910 with the stabilized argument list. */
2911 end_sequence ();
2914 if (fallback_fn != BUILT_IN_NONE)
2916 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2917 targets, (int) round (x) should never be transformed into
2918 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2919 a call to lround in the hope that the target provides at least some
2920 C99 functions. This should result in the best user experience for
2921 not full C99 targets. */
2922 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2923 fallback_fn, 0);
2925 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2926 fallback_fndecl, 1, arg);
2928 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2929 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2930 return convert_to_mode (mode, target, 0);
2933 return expand_call (exp, target, target == const0_rtx);
2936 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2937 a normal call should be emitted rather than expanding the function
2938 in-line. EXP is the expression that is a call to the builtin
2939 function; if convenient, the result should be placed in TARGET. */
2941 static rtx
2942 expand_builtin_powi (tree exp, rtx target)
2944 tree arg0, arg1;
2945 rtx op0, op1;
2946 machine_mode mode;
2947 machine_mode mode2;
2949 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2950 return NULL_RTX;
2952 arg0 = CALL_EXPR_ARG (exp, 0);
2953 arg1 = CALL_EXPR_ARG (exp, 1);
2954 mode = TYPE_MODE (TREE_TYPE (exp));
2956 /* Emit a libcall to libgcc. */
2958 /* Mode of the 2nd argument must match that of an int. */
2959 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2961 if (target == NULL_RTX)
2962 target = gen_reg_rtx (mode);
2964 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2965 if (GET_MODE (op0) != mode)
2966 op0 = convert_to_mode (mode, op0, 0);
2967 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2968 if (GET_MODE (op1) != mode2)
2969 op1 = convert_to_mode (mode2, op1, 0);
2971 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2972 target, LCT_CONST, mode, 2,
2973 op0, mode, op1, mode2);
2975 return target;
2978 /* Expand expression EXP which is a call to the strlen builtin. Return
2979 NULL_RTX if we failed the caller should emit a normal call, otherwise
2980 try to get the result in TARGET, if convenient. */
2982 static rtx
2983 expand_builtin_strlen (tree exp, rtx target,
2984 machine_mode target_mode)
2986 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2987 return NULL_RTX;
2988 else
2990 struct expand_operand ops[4];
2991 rtx pat;
2992 tree len;
2993 tree src = CALL_EXPR_ARG (exp, 0);
2994 rtx src_reg;
2995 rtx_insn *before_strlen;
2996 machine_mode insn_mode = target_mode;
2997 enum insn_code icode = CODE_FOR_nothing;
2998 unsigned int align;
3000 /* If the length can be computed at compile-time, return it. */
3001 len = c_strlen (src, 0);
3002 if (len)
3003 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3005 /* If the length can be computed at compile-time and is constant
3006 integer, but there are side-effects in src, evaluate
3007 src for side-effects, then return len.
3008 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3009 can be optimized into: i++; x = 3; */
3010 len = c_strlen (src, 1);
3011 if (len && TREE_CODE (len) == INTEGER_CST)
3013 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3014 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3017 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3019 /* If SRC is not a pointer type, don't do this operation inline. */
3020 if (align == 0)
3021 return NULL_RTX;
3023 /* Bail out if we can't compute strlen in the right mode. */
3024 while (insn_mode != VOIDmode)
3026 icode = optab_handler (strlen_optab, insn_mode);
3027 if (icode != CODE_FOR_nothing)
3028 break;
3030 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3032 if (insn_mode == VOIDmode)
3033 return NULL_RTX;
3035 /* Make a place to hold the source address. We will not expand
3036 the actual source until we are sure that the expansion will
3037 not fail -- there are trees that cannot be expanded twice. */
3038 src_reg = gen_reg_rtx (Pmode);
3040 /* Mark the beginning of the strlen sequence so we can emit the
3041 source operand later. */
3042 before_strlen = get_last_insn ();
3044 create_output_operand (&ops[0], target, insn_mode);
3045 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3046 create_integer_operand (&ops[2], 0);
3047 create_integer_operand (&ops[3], align);
3048 if (!maybe_expand_insn (icode, 4, ops))
3049 return NULL_RTX;
3051 /* Now that we are assured of success, expand the source. */
3052 start_sequence ();
3053 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3054 if (pat != src_reg)
3056 #ifdef POINTERS_EXTEND_UNSIGNED
3057 if (GET_MODE (pat) != Pmode)
3058 pat = convert_to_mode (Pmode, pat,
3059 POINTERS_EXTEND_UNSIGNED);
3060 #endif
3061 emit_move_insn (src_reg, pat);
3063 pat = get_insns ();
3064 end_sequence ();
3066 if (before_strlen)
3067 emit_insn_after (pat, before_strlen);
3068 else
3069 emit_insn_before (pat, get_insns ());
3071 /* Return the value in the proper mode for this function. */
3072 if (GET_MODE (ops[0].value) == target_mode)
3073 target = ops[0].value;
3074 else if (target != 0)
3075 convert_move (target, ops[0].value, 0);
3076 else
3077 target = convert_to_mode (target_mode, ops[0].value, 0);
3079 return target;
3083 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3084 bytes from constant string DATA + OFFSET and return it as target
3085 constant. */
3087 static rtx
3088 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3089 machine_mode mode)
3091 const char *str = (const char *) data;
3093 gcc_assert (offset >= 0
3094 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3095 <= strlen (str) + 1));
3097 return c_readstr (str + offset, mode);
3100 /* LEN specify length of the block of memcpy/memset operation.
3101 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3102 In some cases we can make very likely guess on max size, then we
3103 set it into PROBABLE_MAX_SIZE. */
3105 static void
3106 determine_block_size (tree len, rtx len_rtx,
3107 unsigned HOST_WIDE_INT *min_size,
3108 unsigned HOST_WIDE_INT *max_size,
3109 unsigned HOST_WIDE_INT *probable_max_size)
3111 if (CONST_INT_P (len_rtx))
3113 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3114 return;
3116 else
3118 wide_int min, max;
3119 enum value_range_type range_type = VR_UNDEFINED;
3121 /* Determine bounds from the type. */
3122 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3123 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3124 else
3125 *min_size = 0;
3126 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3127 *probable_max_size = *max_size
3128 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3129 else
3130 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3132 if (TREE_CODE (len) == SSA_NAME)
3133 range_type = get_range_info (len, &min, &max);
3134 if (range_type == VR_RANGE)
3136 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3137 *min_size = min.to_uhwi ();
3138 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3139 *probable_max_size = *max_size = max.to_uhwi ();
3141 else if (range_type == VR_ANTI_RANGE)
3143 /* Anti range 0...N lets us to determine minimal size to N+1. */
3144 if (min == 0)
3146 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3147 *min_size = max.to_uhwi () + 1;
3149 /* Code like
3151 int n;
3152 if (n < 100)
3153 memcpy (a, b, n)
3155 Produce anti range allowing negative values of N. We still
3156 can use the information and make a guess that N is not negative.
3158 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3159 *probable_max_size = min.to_uhwi () - 1;
3162 gcc_checking_assert (*max_size <=
3163 (unsigned HOST_WIDE_INT)
3164 GET_MODE_MASK (GET_MODE (len_rtx)));
3167 /* Helper function to do the actual work for expand_builtin_memcpy. */
3169 static rtx
3170 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
3172 const char *src_str;
3173 unsigned int src_align = get_pointer_alignment (src);
3174 unsigned int dest_align = get_pointer_alignment (dest);
3175 rtx dest_mem, src_mem, dest_addr, len_rtx;
3176 HOST_WIDE_INT expected_size = -1;
3177 unsigned int expected_align = 0;
3178 unsigned HOST_WIDE_INT min_size;
3179 unsigned HOST_WIDE_INT max_size;
3180 unsigned HOST_WIDE_INT probable_max_size;
3182 /* If DEST is not a pointer type, call the normal function. */
3183 if (dest_align == 0)
3184 return NULL_RTX;
3186 /* If either SRC is not a pointer type, don't do this
3187 operation in-line. */
3188 if (src_align == 0)
3189 return NULL_RTX;
3191 if (currently_expanding_gimple_stmt)
3192 stringop_block_profile (currently_expanding_gimple_stmt,
3193 &expected_align, &expected_size);
3195 if (expected_align < dest_align)
3196 expected_align = dest_align;
3197 dest_mem = get_memory_rtx (dest, len);
3198 set_mem_align (dest_mem, dest_align);
3199 len_rtx = expand_normal (len);
3200 determine_block_size (len, len_rtx, &min_size, &max_size,
3201 &probable_max_size);
3202 src_str = c_getstr (src);
3204 /* If SRC is a string constant and block move would be done
3205 by pieces, we can avoid loading the string from memory
3206 and only stored the computed constants. */
3207 if (src_str
3208 && CONST_INT_P (len_rtx)
3209 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3210 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3211 CONST_CAST (char *, src_str),
3212 dest_align, false))
3214 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3215 builtin_memcpy_read_str,
3216 CONST_CAST (char *, src_str),
3217 dest_align, false, 0);
3218 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3219 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3220 return dest_mem;
3223 src_mem = get_memory_rtx (src, len);
3224 set_mem_align (src_mem, src_align);
3226 /* Copy word part most expediently. */
3227 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3228 CALL_EXPR_TAILCALL (exp)
3229 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3230 expected_align, expected_size,
3231 min_size, max_size, probable_max_size);
3233 if (dest_addr == 0)
3235 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3236 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3239 return dest_addr;
3242 /* Expand a call EXP to the memcpy builtin.
3243 Return NULL_RTX if we failed, the caller should emit a normal call,
3244 otherwise try to get the result in TARGET, if convenient (and in
3245 mode MODE if that's convenient). */
3247 static rtx
3248 expand_builtin_memcpy (tree exp, rtx target)
3250 if (!validate_arglist (exp,
3251 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3252 return NULL_RTX;
3253 else
3255 tree dest = CALL_EXPR_ARG (exp, 0);
3256 tree src = CALL_EXPR_ARG (exp, 1);
3257 tree len = CALL_EXPR_ARG (exp, 2);
3258 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3262 /* Expand an instrumented call EXP to the memcpy builtin.
3263 Return NULL_RTX if we failed, the caller should emit a normal call,
3264 otherwise try to get the result in TARGET, if convenient (and in
3265 mode MODE if that's convenient). */
3267 static rtx
3268 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3270 if (!validate_arglist (exp,
3271 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3272 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3273 INTEGER_TYPE, VOID_TYPE))
3274 return NULL_RTX;
3275 else
3277 tree dest = CALL_EXPR_ARG (exp, 0);
3278 tree src = CALL_EXPR_ARG (exp, 2);
3279 tree len = CALL_EXPR_ARG (exp, 4);
3280 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3282 /* Return src bounds with the result. */
3283 if (res)
3285 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3286 expand_normal (CALL_EXPR_ARG (exp, 1)));
3287 res = chkp_join_splitted_slot (res, bnd);
3289 return res;
3293 /* Expand a call EXP to the mempcpy builtin.
3294 Return NULL_RTX if we failed; the caller should emit a normal call,
3295 otherwise try to get the result in TARGET, if convenient (and in
3296 mode MODE if that's convenient). If ENDP is 0 return the
3297 destination pointer, if ENDP is 1 return the end pointer ala
3298 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3299 stpcpy. */
3301 static rtx
3302 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3304 if (!validate_arglist (exp,
3305 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3306 return NULL_RTX;
3307 else
3309 tree dest = CALL_EXPR_ARG (exp, 0);
3310 tree src = CALL_EXPR_ARG (exp, 1);
3311 tree len = CALL_EXPR_ARG (exp, 2);
3312 return expand_builtin_mempcpy_args (dest, src, len,
3313 target, mode, /*endp=*/ 1,
3314 exp);
3318 /* Expand an instrumented call EXP to the mempcpy builtin.
3319 Return NULL_RTX if we failed, the caller should emit a normal call,
3320 otherwise try to get the result in TARGET, if convenient (and in
3321 mode MODE if that's convenient). */
3323 static rtx
3324 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3326 if (!validate_arglist (exp,
3327 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3328 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3329 INTEGER_TYPE, VOID_TYPE))
3330 return NULL_RTX;
3331 else
3333 tree dest = CALL_EXPR_ARG (exp, 0);
3334 tree src = CALL_EXPR_ARG (exp, 2);
3335 tree len = CALL_EXPR_ARG (exp, 4);
3336 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3337 mode, 1, exp);
3339 /* Return src bounds with the result. */
3340 if (res)
3342 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3343 expand_normal (CALL_EXPR_ARG (exp, 1)));
3344 res = chkp_join_splitted_slot (res, bnd);
3346 return res;
3350 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3351 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3352 so that this can also be called without constructing an actual CALL_EXPR.
3353 The other arguments and return value are the same as for
3354 expand_builtin_mempcpy. */
3356 static rtx
3357 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3358 rtx target, machine_mode mode, int endp,
3359 tree orig_exp)
3361 tree fndecl = get_callee_fndecl (orig_exp);
3363 /* If return value is ignored, transform mempcpy into memcpy. */
3364 if (target == const0_rtx
3365 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3366 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3368 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3369 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3370 dest, src, len);
3371 return expand_expr (result, target, mode, EXPAND_NORMAL);
3373 else if (target == const0_rtx
3374 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3376 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3377 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3378 dest, src, len);
3379 return expand_expr (result, target, mode, EXPAND_NORMAL);
3381 else
3383 const char *src_str;
3384 unsigned int src_align = get_pointer_alignment (src);
3385 unsigned int dest_align = get_pointer_alignment (dest);
3386 rtx dest_mem, src_mem, len_rtx;
3388 /* If either SRC or DEST is not a pointer type, don't do this
3389 operation in-line. */
3390 if (dest_align == 0 || src_align == 0)
3391 return NULL_RTX;
3393 /* If LEN is not constant, call the normal function. */
3394 if (! tree_fits_uhwi_p (len))
3395 return NULL_RTX;
3397 len_rtx = expand_normal (len);
3398 src_str = c_getstr (src);
3400 /* If SRC is a string constant and block move would be done
3401 by pieces, we can avoid loading the string from memory
3402 and only stored the computed constants. */
3403 if (src_str
3404 && CONST_INT_P (len_rtx)
3405 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3406 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3407 CONST_CAST (char *, src_str),
3408 dest_align, false))
3410 dest_mem = get_memory_rtx (dest, len);
3411 set_mem_align (dest_mem, dest_align);
3412 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3413 builtin_memcpy_read_str,
3414 CONST_CAST (char *, src_str),
3415 dest_align, false, endp);
3416 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3417 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3418 return dest_mem;
3421 if (CONST_INT_P (len_rtx)
3422 && can_move_by_pieces (INTVAL (len_rtx),
3423 MIN (dest_align, src_align)))
3425 dest_mem = get_memory_rtx (dest, len);
3426 set_mem_align (dest_mem, dest_align);
3427 src_mem = get_memory_rtx (src, len);
3428 set_mem_align (src_mem, src_align);
3429 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3430 MIN (dest_align, src_align), endp);
3431 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3432 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3433 return dest_mem;
3436 return NULL_RTX;
3440 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3441 we failed, the caller should emit a normal call, otherwise try to
3442 get the result in TARGET, if convenient. If ENDP is 0 return the
3443 destination pointer, if ENDP is 1 return the end pointer ala
3444 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3445 stpcpy. */
3447 static rtx
3448 expand_movstr (tree dest, tree src, rtx target, int endp)
3450 struct expand_operand ops[3];
3451 rtx dest_mem;
3452 rtx src_mem;
3454 if (!targetm.have_movstr ())
3455 return NULL_RTX;
3457 dest_mem = get_memory_rtx (dest, NULL);
3458 src_mem = get_memory_rtx (src, NULL);
3459 if (!endp)
3461 target = force_reg (Pmode, XEXP (dest_mem, 0));
3462 dest_mem = replace_equiv_address (dest_mem, target);
3465 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3466 create_fixed_operand (&ops[1], dest_mem);
3467 create_fixed_operand (&ops[2], src_mem);
3468 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3469 return NULL_RTX;
3471 if (endp && target != const0_rtx)
3473 target = ops[0].value;
3474 /* movstr is supposed to set end to the address of the NUL
3475 terminator. If the caller requested a mempcpy-like return value,
3476 adjust it. */
3477 if (endp == 1)
3479 rtx tem = plus_constant (GET_MODE (target),
3480 gen_lowpart (GET_MODE (target), target), 1);
3481 emit_move_insn (target, force_operand (tem, NULL_RTX));
3484 return target;
3487 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3488 NULL_RTX if we failed the caller should emit a normal call, otherwise
3489 try to get the result in TARGET, if convenient (and in mode MODE if that's
3490 convenient). */
3492 static rtx
3493 expand_builtin_strcpy (tree exp, rtx target)
3495 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3497 tree dest = CALL_EXPR_ARG (exp, 0);
3498 tree src = CALL_EXPR_ARG (exp, 1);
3499 return expand_builtin_strcpy_args (dest, src, target);
3501 return NULL_RTX;
3504 /* Helper function to do the actual work for expand_builtin_strcpy. The
3505 arguments to the builtin_strcpy call DEST and SRC are broken out
3506 so that this can also be called without constructing an actual CALL_EXPR.
3507 The other arguments and return value are the same as for
3508 expand_builtin_strcpy. */
3510 static rtx
3511 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3513 return expand_movstr (dest, src, target, /*endp=*/0);
3516 /* Expand a call EXP to the stpcpy builtin.
3517 Return NULL_RTX if we failed the caller should emit a normal call,
3518 otherwise try to get the result in TARGET, if convenient (and in
3519 mode MODE if that's convenient). */
3521 static rtx
3522 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3524 tree dst, src;
3525 location_t loc = EXPR_LOCATION (exp);
3527 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3528 return NULL_RTX;
3530 dst = CALL_EXPR_ARG (exp, 0);
3531 src = CALL_EXPR_ARG (exp, 1);
3533 /* If return value is ignored, transform stpcpy into strcpy. */
3534 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3536 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3537 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3538 return expand_expr (result, target, mode, EXPAND_NORMAL);
3540 else
3542 tree len, lenp1;
3543 rtx ret;
3545 /* Ensure we get an actual string whose length can be evaluated at
3546 compile-time, not an expression containing a string. This is
3547 because the latter will potentially produce pessimized code
3548 when used to produce the return value. */
3549 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3550 return expand_movstr (dst, src, target, /*endp=*/2);
3552 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3553 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3554 target, mode, /*endp=*/2,
3555 exp);
3557 if (ret)
3558 return ret;
3560 if (TREE_CODE (len) == INTEGER_CST)
3562 rtx len_rtx = expand_normal (len);
3564 if (CONST_INT_P (len_rtx))
3566 ret = expand_builtin_strcpy_args (dst, src, target);
3568 if (ret)
3570 if (! target)
3572 if (mode != VOIDmode)
3573 target = gen_reg_rtx (mode);
3574 else
3575 target = gen_reg_rtx (GET_MODE (ret));
3577 if (GET_MODE (target) != GET_MODE (ret))
3578 ret = gen_lowpart (GET_MODE (target), ret);
3580 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3581 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3582 gcc_assert (ret);
3584 return target;
3589 return expand_movstr (dst, src, target, /*endp=*/2);
3593 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3594 bytes from constant string DATA + OFFSET and return it as target
3595 constant. */
3598 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3599 machine_mode mode)
3601 const char *str = (const char *) data;
3603 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3604 return const0_rtx;
3606 return c_readstr (str + offset, mode);
3609 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3610 NULL_RTX if we failed the caller should emit a normal call. */
3612 static rtx
3613 expand_builtin_strncpy (tree exp, rtx target)
3615 location_t loc = EXPR_LOCATION (exp);
3617 if (validate_arglist (exp,
3618 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3620 tree dest = CALL_EXPR_ARG (exp, 0);
3621 tree src = CALL_EXPR_ARG (exp, 1);
3622 tree len = CALL_EXPR_ARG (exp, 2);
3623 tree slen = c_strlen (src, 1);
3625 /* We must be passed a constant len and src parameter. */
3626 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3627 return NULL_RTX;
3629 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3631 /* We're required to pad with trailing zeros if the requested
3632 len is greater than strlen(s2)+1. In that case try to
3633 use store_by_pieces, if it fails, punt. */
3634 if (tree_int_cst_lt (slen, len))
3636 unsigned int dest_align = get_pointer_alignment (dest);
3637 const char *p = c_getstr (src);
3638 rtx dest_mem;
3640 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3641 || !can_store_by_pieces (tree_to_uhwi (len),
3642 builtin_strncpy_read_str,
3643 CONST_CAST (char *, p),
3644 dest_align, false))
3645 return NULL_RTX;
3647 dest_mem = get_memory_rtx (dest, len);
3648 store_by_pieces (dest_mem, tree_to_uhwi (len),
3649 builtin_strncpy_read_str,
3650 CONST_CAST (char *, p), dest_align, false, 0);
3651 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3652 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3653 return dest_mem;
3656 return NULL_RTX;
3659 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3660 bytes from constant string DATA + OFFSET and return it as target
3661 constant. */
3664 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3665 machine_mode mode)
3667 const char *c = (const char *) data;
3668 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3670 memset (p, *c, GET_MODE_SIZE (mode));
3672 return c_readstr (p, mode);
3675 /* Callback routine for store_by_pieces. Return the RTL of a register
3676 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3677 char value given in the RTL register data. For example, if mode is
3678 4 bytes wide, return the RTL for 0x01010101*data. */
3680 static rtx
3681 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3682 machine_mode mode)
3684 rtx target, coeff;
3685 size_t size;
3686 char *p;
3688 size = GET_MODE_SIZE (mode);
3689 if (size == 1)
3690 return (rtx) data;
3692 p = XALLOCAVEC (char, size);
3693 memset (p, 1, size);
3694 coeff = c_readstr (p, mode);
3696 target = convert_to_mode (mode, (rtx) data, 1);
3697 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3698 return force_reg (mode, target);
3701 /* Expand expression EXP, which is a call to the memset builtin. Return
3702 NULL_RTX if we failed the caller should emit a normal call, otherwise
3703 try to get the result in TARGET, if convenient (and in mode MODE if that's
3704 convenient). */
3706 static rtx
3707 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3709 if (!validate_arglist (exp,
3710 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3711 return NULL_RTX;
3712 else
3714 tree dest = CALL_EXPR_ARG (exp, 0);
3715 tree val = CALL_EXPR_ARG (exp, 1);
3716 tree len = CALL_EXPR_ARG (exp, 2);
3717 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3721 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3722 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3723 try to get the result in TARGET, if convenient (and in mode MODE if that's
3724 convenient). */
3726 static rtx
3727 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3729 if (!validate_arglist (exp,
3730 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3731 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3732 return NULL_RTX;
3733 else
3735 tree dest = CALL_EXPR_ARG (exp, 0);
3736 tree val = CALL_EXPR_ARG (exp, 2);
3737 tree len = CALL_EXPR_ARG (exp, 3);
3738 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3740 /* Return src bounds with the result. */
3741 if (res)
3743 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3744 expand_normal (CALL_EXPR_ARG (exp, 1)));
3745 res = chkp_join_splitted_slot (res, bnd);
3747 return res;
3751 /* Helper function to do the actual work for expand_builtin_memset. The
3752 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3753 so that this can also be called without constructing an actual CALL_EXPR.
3754 The other arguments and return value are the same as for
3755 expand_builtin_memset. */
3757 static rtx
3758 expand_builtin_memset_args (tree dest, tree val, tree len,
3759 rtx target, machine_mode mode, tree orig_exp)
3761 tree fndecl, fn;
3762 enum built_in_function fcode;
3763 machine_mode val_mode;
3764 char c;
3765 unsigned int dest_align;
3766 rtx dest_mem, dest_addr, len_rtx;
3767 HOST_WIDE_INT expected_size = -1;
3768 unsigned int expected_align = 0;
3769 unsigned HOST_WIDE_INT min_size;
3770 unsigned HOST_WIDE_INT max_size;
3771 unsigned HOST_WIDE_INT probable_max_size;
3773 dest_align = get_pointer_alignment (dest);
3775 /* If DEST is not a pointer type, don't do this operation in-line. */
3776 if (dest_align == 0)
3777 return NULL_RTX;
3779 if (currently_expanding_gimple_stmt)
3780 stringop_block_profile (currently_expanding_gimple_stmt,
3781 &expected_align, &expected_size);
3783 if (expected_align < dest_align)
3784 expected_align = dest_align;
3786 /* If the LEN parameter is zero, return DEST. */
3787 if (integer_zerop (len))
3789 /* Evaluate and ignore VAL in case it has side-effects. */
3790 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3791 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3794 /* Stabilize the arguments in case we fail. */
3795 dest = builtin_save_expr (dest);
3796 val = builtin_save_expr (val);
3797 len = builtin_save_expr (len);
3799 len_rtx = expand_normal (len);
3800 determine_block_size (len, len_rtx, &min_size, &max_size,
3801 &probable_max_size);
3802 dest_mem = get_memory_rtx (dest, len);
3803 val_mode = TYPE_MODE (unsigned_char_type_node);
3805 if (TREE_CODE (val) != INTEGER_CST)
3807 rtx val_rtx;
3809 val_rtx = expand_normal (val);
3810 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3812 /* Assume that we can memset by pieces if we can store
3813 * the coefficients by pieces (in the required modes).
3814 * We can't pass builtin_memset_gen_str as that emits RTL. */
3815 c = 1;
3816 if (tree_fits_uhwi_p (len)
3817 && can_store_by_pieces (tree_to_uhwi (len),
3818 builtin_memset_read_str, &c, dest_align,
3819 true))
3821 val_rtx = force_reg (val_mode, val_rtx);
3822 store_by_pieces (dest_mem, tree_to_uhwi (len),
3823 builtin_memset_gen_str, val_rtx, dest_align,
3824 true, 0);
3826 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3827 dest_align, expected_align,
3828 expected_size, min_size, max_size,
3829 probable_max_size))
3830 goto do_libcall;
3832 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3833 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3834 return dest_mem;
3837 if (target_char_cast (val, &c))
3838 goto do_libcall;
3840 if (c)
3842 if (tree_fits_uhwi_p (len)
3843 && can_store_by_pieces (tree_to_uhwi (len),
3844 builtin_memset_read_str, &c, dest_align,
3845 true))
3846 store_by_pieces (dest_mem, tree_to_uhwi (len),
3847 builtin_memset_read_str, &c, dest_align, true, 0);
3848 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3849 gen_int_mode (c, val_mode),
3850 dest_align, expected_align,
3851 expected_size, min_size, max_size,
3852 probable_max_size))
3853 goto do_libcall;
3855 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3856 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3857 return dest_mem;
3860 set_mem_align (dest_mem, dest_align);
3861 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3862 CALL_EXPR_TAILCALL (orig_exp)
3863 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3864 expected_align, expected_size,
3865 min_size, max_size,
3866 probable_max_size);
3868 if (dest_addr == 0)
3870 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3871 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3874 return dest_addr;
3876 do_libcall:
3877 fndecl = get_callee_fndecl (orig_exp);
3878 fcode = DECL_FUNCTION_CODE (fndecl);
3879 if (fcode == BUILT_IN_MEMSET
3880 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3881 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3882 dest, val, len);
3883 else if (fcode == BUILT_IN_BZERO)
3884 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3885 dest, len);
3886 else
3887 gcc_unreachable ();
3888 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3889 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3890 return expand_call (fn, target, target == const0_rtx);
3893 /* Expand expression EXP, which is a call to the bzero builtin. Return
3894 NULL_RTX if we failed the caller should emit a normal call. */
3896 static rtx
3897 expand_builtin_bzero (tree exp)
3899 tree dest, size;
3900 location_t loc = EXPR_LOCATION (exp);
3902 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3903 return NULL_RTX;
3905 dest = CALL_EXPR_ARG (exp, 0);
3906 size = CALL_EXPR_ARG (exp, 1);
3908 /* New argument list transforming bzero(ptr x, int y) to
3909 memset(ptr x, int 0, size_t y). This is done this way
3910 so that if it isn't expanded inline, we fallback to
3911 calling bzero instead of memset. */
3913 return expand_builtin_memset_args (dest, integer_zero_node,
3914 fold_convert_loc (loc,
3915 size_type_node, size),
3916 const0_rtx, VOIDmode, exp);
3919 /* Try to expand cmpstr operation ICODE with the given operands.
3920 Return the result rtx on success, otherwise return null. */
3922 static rtx
3923 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
3924 HOST_WIDE_INT align)
3926 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3928 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3929 target = NULL_RTX;
3931 struct expand_operand ops[4];
3932 create_output_operand (&ops[0], target, insn_mode);
3933 create_fixed_operand (&ops[1], arg1_rtx);
3934 create_fixed_operand (&ops[2], arg2_rtx);
3935 create_integer_operand (&ops[3], align);
3936 if (maybe_expand_insn (icode, 4, ops))
3937 return ops[0].value;
3938 return NULL_RTX;
3941 /* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
3942 ARG3_TYPE is the type of ARG3_RTX. Return the result rtx on success,
3943 otherwise return null. */
3945 static rtx
3946 expand_cmpstrn_or_cmpmem (insn_code icode, rtx target, rtx arg1_rtx,
3947 rtx arg2_rtx, tree arg3_type, rtx arg3_rtx,
3948 HOST_WIDE_INT align)
3950 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3952 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3953 target = NULL_RTX;
3955 struct expand_operand ops[5];
3956 create_output_operand (&ops[0], target, insn_mode);
3957 create_fixed_operand (&ops[1], arg1_rtx);
3958 create_fixed_operand (&ops[2], arg2_rtx);
3959 create_convert_operand_from (&ops[3], arg3_rtx, TYPE_MODE (arg3_type),
3960 TYPE_UNSIGNED (arg3_type));
3961 create_integer_operand (&ops[4], align);
3962 if (maybe_expand_insn (icode, 5, ops))
3963 return ops[0].value;
3964 return NULL_RTX;
3967 /* Expand expression EXP, which is a call to the memcmp built-in function.
3968 Return NULL_RTX if we failed and the caller should emit a normal call,
3969 otherwise try to get the result in TARGET, if convenient. */
3971 static rtx
3972 expand_builtin_memcmp (tree exp, rtx target)
3974 if (!validate_arglist (exp,
3975 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3976 return NULL_RTX;
3978 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3979 implementing memcmp because it will stop if it encounters two
3980 zero bytes. */
3981 insn_code icode = direct_optab_handler (cmpmem_optab, SImode);
3982 if (icode == CODE_FOR_nothing)
3983 return NULL_RTX;
3985 tree arg1 = CALL_EXPR_ARG (exp, 0);
3986 tree arg2 = CALL_EXPR_ARG (exp, 1);
3987 tree len = CALL_EXPR_ARG (exp, 2);
3989 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3990 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3992 /* If we don't have POINTER_TYPE, call the function. */
3993 if (arg1_align == 0 || arg2_align == 0)
3994 return NULL_RTX;
3996 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3997 location_t loc = EXPR_LOCATION (exp);
3998 rtx arg1_rtx = get_memory_rtx (arg1, len);
3999 rtx arg2_rtx = get_memory_rtx (arg2, len);
4000 rtx arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4002 /* Set MEM_SIZE as appropriate. */
4003 if (CONST_INT_P (arg3_rtx))
4005 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
4006 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
4009 rtx result = expand_cmpstrn_or_cmpmem (icode, target, arg1_rtx, arg2_rtx,
4010 TREE_TYPE (len), arg3_rtx,
4011 MIN (arg1_align, arg2_align));
4012 if (result)
4014 /* Return the value in the proper mode for this function. */
4015 if (GET_MODE (result) == mode)
4016 return result;
4018 if (target != 0)
4020 convert_move (target, result, 0);
4021 return target;
4024 return convert_to_mode (mode, result, 0);
4027 result = target;
4028 if (! (result != 0
4029 && REG_P (result) && GET_MODE (result) == mode
4030 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4031 result = gen_reg_rtx (mode);
4033 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4034 TYPE_MODE (integer_type_node), 3,
4035 XEXP (arg1_rtx, 0), Pmode,
4036 XEXP (arg2_rtx, 0), Pmode,
4037 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4038 TYPE_UNSIGNED (sizetype)),
4039 TYPE_MODE (sizetype));
4040 return result;
4043 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4044 if we failed the caller should emit a normal call, otherwise try to get
4045 the result in TARGET, if convenient. */
4047 static rtx
4048 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4050 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4051 return NULL_RTX;
4053 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4054 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4055 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
4057 rtx arg1_rtx, arg2_rtx;
4058 tree fndecl, fn;
4059 tree arg1 = CALL_EXPR_ARG (exp, 0);
4060 tree arg2 = CALL_EXPR_ARG (exp, 1);
4061 rtx result = NULL_RTX;
4063 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4064 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4066 /* If we don't have POINTER_TYPE, call the function. */
4067 if (arg1_align == 0 || arg2_align == 0)
4068 return NULL_RTX;
4070 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4071 arg1 = builtin_save_expr (arg1);
4072 arg2 = builtin_save_expr (arg2);
4074 arg1_rtx = get_memory_rtx (arg1, NULL);
4075 arg2_rtx = get_memory_rtx (arg2, NULL);
4077 /* Try to call cmpstrsi. */
4078 if (cmpstr_icode != CODE_FOR_nothing)
4079 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4080 MIN (arg1_align, arg2_align));
4082 /* Try to determine at least one length and call cmpstrnsi. */
4083 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4085 tree len;
4086 rtx arg3_rtx;
4088 tree len1 = c_strlen (arg1, 1);
4089 tree len2 = c_strlen (arg2, 1);
4091 if (len1)
4092 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4093 if (len2)
4094 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4096 /* If we don't have a constant length for the first, use the length
4097 of the second, if we know it. We don't require a constant for
4098 this case; some cost analysis could be done if both are available
4099 but neither is constant. For now, assume they're equally cheap,
4100 unless one has side effects. If both strings have constant lengths,
4101 use the smaller. */
4103 if (!len1)
4104 len = len2;
4105 else if (!len2)
4106 len = len1;
4107 else if (TREE_SIDE_EFFECTS (len1))
4108 len = len2;
4109 else if (TREE_SIDE_EFFECTS (len2))
4110 len = len1;
4111 else if (TREE_CODE (len1) != INTEGER_CST)
4112 len = len2;
4113 else if (TREE_CODE (len2) != INTEGER_CST)
4114 len = len1;
4115 else if (tree_int_cst_lt (len1, len2))
4116 len = len1;
4117 else
4118 len = len2;
4120 /* If both arguments have side effects, we cannot optimize. */
4121 if (len && !TREE_SIDE_EFFECTS (len))
4123 arg3_rtx = expand_normal (len);
4124 result = expand_cmpstrn_or_cmpmem
4125 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4126 arg3_rtx, MIN (arg1_align, arg2_align));
4130 if (result)
4132 /* Return the value in the proper mode for this function. */
4133 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4134 if (GET_MODE (result) == mode)
4135 return result;
4136 if (target == 0)
4137 return convert_to_mode (mode, result, 0);
4138 convert_move (target, result, 0);
4139 return target;
4142 /* Expand the library call ourselves using a stabilized argument
4143 list to avoid re-evaluating the function's arguments twice. */
4144 fndecl = get_callee_fndecl (exp);
4145 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4146 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4147 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4148 return expand_call (fn, target, target == const0_rtx);
4150 return NULL_RTX;
4153 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4154 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4155 the result in TARGET, if convenient. */
4157 static rtx
4158 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4159 ATTRIBUTE_UNUSED machine_mode mode)
4161 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4163 if (!validate_arglist (exp,
4164 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4165 return NULL_RTX;
4167 /* If c_strlen can determine an expression for one of the string
4168 lengths, and it doesn't have side effects, then emit cmpstrnsi
4169 using length MIN(strlen(string)+1, arg3). */
4170 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4171 if (cmpstrn_icode != CODE_FOR_nothing)
4173 tree len, len1, len2;
4174 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4175 rtx result;
4176 tree fndecl, fn;
4177 tree arg1 = CALL_EXPR_ARG (exp, 0);
4178 tree arg2 = CALL_EXPR_ARG (exp, 1);
4179 tree arg3 = CALL_EXPR_ARG (exp, 2);
4181 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4182 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4184 len1 = c_strlen (arg1, 1);
4185 len2 = c_strlen (arg2, 1);
4187 if (len1)
4188 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4189 if (len2)
4190 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4192 /* If we don't have a constant length for the first, use the length
4193 of the second, if we know it. We don't require a constant for
4194 this case; some cost analysis could be done if both are available
4195 but neither is constant. For now, assume they're equally cheap,
4196 unless one has side effects. If both strings have constant lengths,
4197 use the smaller. */
4199 if (!len1)
4200 len = len2;
4201 else if (!len2)
4202 len = len1;
4203 else if (TREE_SIDE_EFFECTS (len1))
4204 len = len2;
4205 else if (TREE_SIDE_EFFECTS (len2))
4206 len = len1;
4207 else if (TREE_CODE (len1) != INTEGER_CST)
4208 len = len2;
4209 else if (TREE_CODE (len2) != INTEGER_CST)
4210 len = len1;
4211 else if (tree_int_cst_lt (len1, len2))
4212 len = len1;
4213 else
4214 len = len2;
4216 /* If both arguments have side effects, we cannot optimize. */
4217 if (!len || TREE_SIDE_EFFECTS (len))
4218 return NULL_RTX;
4220 /* The actual new length parameter is MIN(len,arg3). */
4221 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4222 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4224 /* If we don't have POINTER_TYPE, call the function. */
4225 if (arg1_align == 0 || arg2_align == 0)
4226 return NULL_RTX;
4228 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4229 arg1 = builtin_save_expr (arg1);
4230 arg2 = builtin_save_expr (arg2);
4231 len = builtin_save_expr (len);
4233 arg1_rtx = get_memory_rtx (arg1, len);
4234 arg2_rtx = get_memory_rtx (arg2, len);
4235 arg3_rtx = expand_normal (len);
4236 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4237 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4238 MIN (arg1_align, arg2_align));
4239 if (result)
4241 /* Return the value in the proper mode for this function. */
4242 mode = TYPE_MODE (TREE_TYPE (exp));
4243 if (GET_MODE (result) == mode)
4244 return result;
4245 if (target == 0)
4246 return convert_to_mode (mode, result, 0);
4247 convert_move (target, result, 0);
4248 return target;
4251 /* Expand the library call ourselves using a stabilized argument
4252 list to avoid re-evaluating the function's arguments twice. */
4253 fndecl = get_callee_fndecl (exp);
4254 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4255 arg1, arg2, len);
4256 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4257 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4258 return expand_call (fn, target, target == const0_rtx);
4260 return NULL_RTX;
4263 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4264 if that's convenient. */
4267 expand_builtin_saveregs (void)
4269 rtx val;
4270 rtx_insn *seq;
4272 /* Don't do __builtin_saveregs more than once in a function.
4273 Save the result of the first call and reuse it. */
4274 if (saveregs_value != 0)
4275 return saveregs_value;
4277 /* When this function is called, it means that registers must be
4278 saved on entry to this function. So we migrate the call to the
4279 first insn of this function. */
4281 start_sequence ();
4283 /* Do whatever the machine needs done in this case. */
4284 val = targetm.calls.expand_builtin_saveregs ();
4286 seq = get_insns ();
4287 end_sequence ();
4289 saveregs_value = val;
4291 /* Put the insns after the NOTE that starts the function. If this
4292 is inside a start_sequence, make the outer-level insn chain current, so
4293 the code is placed at the start of the function. */
4294 push_topmost_sequence ();
4295 emit_insn_after (seq, entry_of_function ());
4296 pop_topmost_sequence ();
4298 return val;
4301 /* Expand a call to __builtin_next_arg. */
4303 static rtx
4304 expand_builtin_next_arg (void)
4306 /* Checking arguments is already done in fold_builtin_next_arg
4307 that must be called before this function. */
4308 return expand_binop (ptr_mode, add_optab,
4309 crtl->args.internal_arg_pointer,
4310 crtl->args.arg_offset_rtx,
4311 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4314 /* Make it easier for the backends by protecting the valist argument
4315 from multiple evaluations. */
4317 static tree
4318 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4320 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4322 /* The current way of determining the type of valist is completely
4323 bogus. We should have the information on the va builtin instead. */
4324 if (!vatype)
4325 vatype = targetm.fn_abi_va_list (cfun->decl);
4327 if (TREE_CODE (vatype) == ARRAY_TYPE)
4329 if (TREE_SIDE_EFFECTS (valist))
4330 valist = save_expr (valist);
4332 /* For this case, the backends will be expecting a pointer to
4333 vatype, but it's possible we've actually been given an array
4334 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4335 So fix it. */
4336 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4338 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4339 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4342 else
4344 tree pt = build_pointer_type (vatype);
4346 if (! needs_lvalue)
4348 if (! TREE_SIDE_EFFECTS (valist))
4349 return valist;
4351 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4352 TREE_SIDE_EFFECTS (valist) = 1;
4355 if (TREE_SIDE_EFFECTS (valist))
4356 valist = save_expr (valist);
4357 valist = fold_build2_loc (loc, MEM_REF,
4358 vatype, valist, build_int_cst (pt, 0));
4361 return valist;
4364 /* The "standard" definition of va_list is void*. */
4366 tree
4367 std_build_builtin_va_list (void)
4369 return ptr_type_node;
4372 /* The "standard" abi va_list is va_list_type_node. */
4374 tree
4375 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4377 return va_list_type_node;
4380 /* The "standard" type of va_list is va_list_type_node. */
4382 tree
4383 std_canonical_va_list_type (tree type)
4385 tree wtype, htype;
4387 if (INDIRECT_REF_P (type))
4388 type = TREE_TYPE (type);
4389 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4390 type = TREE_TYPE (type);
4391 wtype = va_list_type_node;
4392 htype = type;
4393 /* Treat structure va_list types. */
4394 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4395 htype = TREE_TYPE (htype);
4396 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4398 /* If va_list is an array type, the argument may have decayed
4399 to a pointer type, e.g. by being passed to another function.
4400 In that case, unwrap both types so that we can compare the
4401 underlying records. */
4402 if (TREE_CODE (htype) == ARRAY_TYPE
4403 || POINTER_TYPE_P (htype))
4405 wtype = TREE_TYPE (wtype);
4406 htype = TREE_TYPE (htype);
4409 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4410 return va_list_type_node;
4412 return NULL_TREE;
4415 /* The "standard" implementation of va_start: just assign `nextarg' to
4416 the variable. */
4418 void
4419 std_expand_builtin_va_start (tree valist, rtx nextarg)
4421 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4422 convert_move (va_r, nextarg, 0);
4424 /* We do not have any valid bounds for the pointer, so
4425 just store zero bounds for it. */
4426 if (chkp_function_instrumented_p (current_function_decl))
4427 chkp_expand_bounds_reset_for_mem (valist,
4428 make_tree (TREE_TYPE (valist),
4429 nextarg));
4432 /* Expand EXP, a call to __builtin_va_start. */
4434 static rtx
4435 expand_builtin_va_start (tree exp)
4437 rtx nextarg;
4438 tree valist;
4439 location_t loc = EXPR_LOCATION (exp);
4441 if (call_expr_nargs (exp) < 2)
4443 error_at (loc, "too few arguments to function %<va_start%>");
4444 return const0_rtx;
4447 if (fold_builtin_next_arg (exp, true))
4448 return const0_rtx;
4450 nextarg = expand_builtin_next_arg ();
4451 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4453 if (targetm.expand_builtin_va_start)
4454 targetm.expand_builtin_va_start (valist, nextarg);
4455 else
4456 std_expand_builtin_va_start (valist, nextarg);
4458 return const0_rtx;
4461 /* Expand EXP, a call to __builtin_va_end. */
4463 static rtx
4464 expand_builtin_va_end (tree exp)
4466 tree valist = CALL_EXPR_ARG (exp, 0);
4468 /* Evaluate for side effects, if needed. I hate macros that don't
4469 do that. */
4470 if (TREE_SIDE_EFFECTS (valist))
4471 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4473 return const0_rtx;
4476 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4477 builtin rather than just as an assignment in stdarg.h because of the
4478 nastiness of array-type va_list types. */
4480 static rtx
4481 expand_builtin_va_copy (tree exp)
4483 tree dst, src, t;
4484 location_t loc = EXPR_LOCATION (exp);
4486 dst = CALL_EXPR_ARG (exp, 0);
4487 src = CALL_EXPR_ARG (exp, 1);
4489 dst = stabilize_va_list_loc (loc, dst, 1);
4490 src = stabilize_va_list_loc (loc, src, 0);
4492 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4494 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4496 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4497 TREE_SIDE_EFFECTS (t) = 1;
4498 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4500 else
4502 rtx dstb, srcb, size;
4504 /* Evaluate to pointers. */
4505 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4506 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4507 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4508 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4510 dstb = convert_memory_address (Pmode, dstb);
4511 srcb = convert_memory_address (Pmode, srcb);
4513 /* "Dereference" to BLKmode memories. */
4514 dstb = gen_rtx_MEM (BLKmode, dstb);
4515 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4516 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4517 srcb = gen_rtx_MEM (BLKmode, srcb);
4518 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4519 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4521 /* Copy. */
4522 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4525 return const0_rtx;
4528 /* Expand a call to one of the builtin functions __builtin_frame_address or
4529 __builtin_return_address. */
4531 static rtx
4532 expand_builtin_frame_address (tree fndecl, tree exp)
4534 /* The argument must be a nonnegative integer constant.
4535 It counts the number of frames to scan up the stack.
4536 The value is either the frame pointer value or the return
4537 address saved in that frame. */
4538 if (call_expr_nargs (exp) == 0)
4539 /* Warning about missing arg was already issued. */
4540 return const0_rtx;
4541 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4543 error ("invalid argument to %qD", fndecl);
4544 return const0_rtx;
4546 else
4548 /* Number of frames to scan up the stack. */
4549 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4551 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4553 /* Some ports cannot access arbitrary stack frames. */
4554 if (tem == NULL)
4556 warning (0, "unsupported argument to %qD", fndecl);
4557 return const0_rtx;
4560 if (count)
4562 /* Warn since no effort is made to ensure that any frame
4563 beyond the current one exists or can be safely reached. */
4564 warning (OPT_Wframe_address, "calling %qD with "
4565 "a nonzero argument is unsafe", fndecl);
4568 /* For __builtin_frame_address, return what we've got. */
4569 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4570 return tem;
4572 if (!REG_P (tem)
4573 && ! CONSTANT_P (tem))
4574 tem = copy_addr_to_reg (tem);
4575 return tem;
4579 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4580 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4581 is the same as for allocate_dynamic_stack_space. */
4583 static rtx
4584 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4586 rtx op0;
4587 rtx result;
4588 bool valid_arglist;
4589 unsigned int align;
4590 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4591 == BUILT_IN_ALLOCA_WITH_ALIGN);
4593 valid_arglist
4594 = (alloca_with_align
4595 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4596 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4598 if (!valid_arglist)
4599 return NULL_RTX;
4601 /* Compute the argument. */
4602 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4604 /* Compute the alignment. */
4605 align = (alloca_with_align
4606 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4607 : BIGGEST_ALIGNMENT);
4609 /* Allocate the desired space. */
4610 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4611 result = convert_memory_address (ptr_mode, result);
4613 return result;
4616 /* Expand a call to bswap builtin in EXP.
4617 Return NULL_RTX if a normal call should be emitted rather than expanding the
4618 function in-line. If convenient, the result should be placed in TARGET.
4619 SUBTARGET may be used as the target for computing one of EXP's operands. */
4621 static rtx
4622 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4623 rtx subtarget)
4625 tree arg;
4626 rtx op0;
4628 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4629 return NULL_RTX;
4631 arg = CALL_EXPR_ARG (exp, 0);
4632 op0 = expand_expr (arg,
4633 subtarget && GET_MODE (subtarget) == target_mode
4634 ? subtarget : NULL_RTX,
4635 target_mode, EXPAND_NORMAL);
4636 if (GET_MODE (op0) != target_mode)
4637 op0 = convert_to_mode (target_mode, op0, 1);
4639 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4641 gcc_assert (target);
4643 return convert_to_mode (target_mode, target, 1);
4646 /* Expand a call to a unary builtin in EXP.
4647 Return NULL_RTX if a normal call should be emitted rather than expanding the
4648 function in-line. If convenient, the result should be placed in TARGET.
4649 SUBTARGET may be used as the target for computing one of EXP's operands. */
4651 static rtx
4652 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4653 rtx subtarget, optab op_optab)
4655 rtx op0;
4657 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4658 return NULL_RTX;
4660 /* Compute the argument. */
4661 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4662 (subtarget
4663 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4664 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4665 VOIDmode, EXPAND_NORMAL);
4666 /* Compute op, into TARGET if possible.
4667 Set TARGET to wherever the result comes back. */
4668 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4669 op_optab, op0, target, op_optab != clrsb_optab);
4670 gcc_assert (target);
4672 return convert_to_mode (target_mode, target, 0);
4675 /* Expand a call to __builtin_expect. We just return our argument
4676 as the builtin_expect semantic should've been already executed by
4677 tree branch prediction pass. */
4679 static rtx
4680 expand_builtin_expect (tree exp, rtx target)
4682 tree arg;
4684 if (call_expr_nargs (exp) < 2)
4685 return const0_rtx;
4686 arg = CALL_EXPR_ARG (exp, 0);
4688 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4689 /* When guessing was done, the hints should be already stripped away. */
4690 gcc_assert (!flag_guess_branch_prob
4691 || optimize == 0 || seen_error ());
4692 return target;
4695 /* Expand a call to __builtin_assume_aligned. We just return our first
4696 argument as the builtin_assume_aligned semantic should've been already
4697 executed by CCP. */
4699 static rtx
4700 expand_builtin_assume_aligned (tree exp, rtx target)
4702 if (call_expr_nargs (exp) < 2)
4703 return const0_rtx;
4704 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4705 EXPAND_NORMAL);
4706 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4707 && (call_expr_nargs (exp) < 3
4708 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4709 return target;
4712 void
4713 expand_builtin_trap (void)
4715 if (targetm.have_trap ())
4717 rtx_insn *insn = emit_insn (targetm.gen_trap ());
4718 /* For trap insns when not accumulating outgoing args force
4719 REG_ARGS_SIZE note to prevent crossjumping of calls with
4720 different args sizes. */
4721 if (!ACCUMULATE_OUTGOING_ARGS)
4722 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4724 else
4725 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4726 emit_barrier ();
4729 /* Expand a call to __builtin_unreachable. We do nothing except emit
4730 a barrier saying that control flow will not pass here.
4732 It is the responsibility of the program being compiled to ensure
4733 that control flow does never reach __builtin_unreachable. */
4734 static void
4735 expand_builtin_unreachable (void)
4737 emit_barrier ();
4740 /* Expand EXP, a call to fabs, fabsf or fabsl.
4741 Return NULL_RTX if a normal call should be emitted rather than expanding
4742 the function inline. If convenient, the result should be placed
4743 in TARGET. SUBTARGET may be used as the target for computing
4744 the operand. */
4746 static rtx
4747 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4749 machine_mode mode;
4750 tree arg;
4751 rtx op0;
4753 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4754 return NULL_RTX;
4756 arg = CALL_EXPR_ARG (exp, 0);
4757 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4758 mode = TYPE_MODE (TREE_TYPE (arg));
4759 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4760 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4763 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4764 Return NULL is a normal call should be emitted rather than expanding the
4765 function inline. If convenient, the result should be placed in TARGET.
4766 SUBTARGET may be used as the target for computing the operand. */
4768 static rtx
4769 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4771 rtx op0, op1;
4772 tree arg;
4774 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4775 return NULL_RTX;
4777 arg = CALL_EXPR_ARG (exp, 0);
4778 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4780 arg = CALL_EXPR_ARG (exp, 1);
4781 op1 = expand_normal (arg);
4783 return expand_copysign (op0, op1, target);
4786 /* Expand a call to __builtin___clear_cache. */
4788 static rtx
4789 expand_builtin___clear_cache (tree exp)
4791 if (!targetm.code_for_clear_cache)
4793 #ifdef CLEAR_INSN_CACHE
4794 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4795 does something. Just do the default expansion to a call to
4796 __clear_cache(). */
4797 return NULL_RTX;
4798 #else
4799 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4800 does nothing. There is no need to call it. Do nothing. */
4801 return const0_rtx;
4802 #endif /* CLEAR_INSN_CACHE */
4805 /* We have a "clear_cache" insn, and it will handle everything. */
4806 tree begin, end;
4807 rtx begin_rtx, end_rtx;
4809 /* We must not expand to a library call. If we did, any
4810 fallback library function in libgcc that might contain a call to
4811 __builtin___clear_cache() would recurse infinitely. */
4812 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4814 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4815 return const0_rtx;
4818 if (targetm.have_clear_cache ())
4820 struct expand_operand ops[2];
4822 begin = CALL_EXPR_ARG (exp, 0);
4823 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4825 end = CALL_EXPR_ARG (exp, 1);
4826 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4828 create_address_operand (&ops[0], begin_rtx);
4829 create_address_operand (&ops[1], end_rtx);
4830 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
4831 return const0_rtx;
4833 return const0_rtx;
4836 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4838 static rtx
4839 round_trampoline_addr (rtx tramp)
4841 rtx temp, addend, mask;
4843 /* If we don't need too much alignment, we'll have been guaranteed
4844 proper alignment by get_trampoline_type. */
4845 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4846 return tramp;
4848 /* Round address up to desired boundary. */
4849 temp = gen_reg_rtx (Pmode);
4850 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4851 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4853 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4854 temp, 0, OPTAB_LIB_WIDEN);
4855 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4856 temp, 0, OPTAB_LIB_WIDEN);
4858 return tramp;
4861 static rtx
4862 expand_builtin_init_trampoline (tree exp, bool onstack)
4864 tree t_tramp, t_func, t_chain;
4865 rtx m_tramp, r_tramp, r_chain, tmp;
4867 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4868 POINTER_TYPE, VOID_TYPE))
4869 return NULL_RTX;
4871 t_tramp = CALL_EXPR_ARG (exp, 0);
4872 t_func = CALL_EXPR_ARG (exp, 1);
4873 t_chain = CALL_EXPR_ARG (exp, 2);
4875 r_tramp = expand_normal (t_tramp);
4876 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4877 MEM_NOTRAP_P (m_tramp) = 1;
4879 /* If ONSTACK, the TRAMP argument should be the address of a field
4880 within the local function's FRAME decl. Either way, let's see if
4881 we can fill in the MEM_ATTRs for this memory. */
4882 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4883 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4885 /* Creator of a heap trampoline is responsible for making sure the
4886 address is aligned to at least STACK_BOUNDARY. Normally malloc
4887 will ensure this anyhow. */
4888 tmp = round_trampoline_addr (r_tramp);
4889 if (tmp != r_tramp)
4891 m_tramp = change_address (m_tramp, BLKmode, tmp);
4892 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4893 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4896 /* The FUNC argument should be the address of the nested function.
4897 Extract the actual function decl to pass to the hook. */
4898 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4899 t_func = TREE_OPERAND (t_func, 0);
4900 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4902 r_chain = expand_normal (t_chain);
4904 /* Generate insns to initialize the trampoline. */
4905 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4907 if (onstack)
4909 trampolines_created = 1;
4911 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4912 "trampoline generated for nested function %qD", t_func);
4915 return const0_rtx;
4918 static rtx
4919 expand_builtin_adjust_trampoline (tree exp)
4921 rtx tramp;
4923 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4924 return NULL_RTX;
4926 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4927 tramp = round_trampoline_addr (tramp);
4928 if (targetm.calls.trampoline_adjust_address)
4929 tramp = targetm.calls.trampoline_adjust_address (tramp);
4931 return tramp;
4934 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4935 function. The function first checks whether the back end provides
4936 an insn to implement signbit for the respective mode. If not, it
4937 checks whether the floating point format of the value is such that
4938 the sign bit can be extracted. If that is not the case, error out.
4939 EXP is the expression that is a call to the builtin function; if
4940 convenient, the result should be placed in TARGET. */
4941 static rtx
4942 expand_builtin_signbit (tree exp, rtx target)
4944 const struct real_format *fmt;
4945 machine_mode fmode, imode, rmode;
4946 tree arg;
4947 int word, bitpos;
4948 enum insn_code icode;
4949 rtx temp;
4950 location_t loc = EXPR_LOCATION (exp);
4952 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4953 return NULL_RTX;
4955 arg = CALL_EXPR_ARG (exp, 0);
4956 fmode = TYPE_MODE (TREE_TYPE (arg));
4957 rmode = TYPE_MODE (TREE_TYPE (exp));
4958 fmt = REAL_MODE_FORMAT (fmode);
4960 arg = builtin_save_expr (arg);
4962 /* Expand the argument yielding a RTX expression. */
4963 temp = expand_normal (arg);
4965 /* Check if the back end provides an insn that handles signbit for the
4966 argument's mode. */
4967 icode = optab_handler (signbit_optab, fmode);
4968 if (icode != CODE_FOR_nothing)
4970 rtx_insn *last = get_last_insn ();
4971 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4972 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4973 return target;
4974 delete_insns_since (last);
4977 /* For floating point formats without a sign bit, implement signbit
4978 as "ARG < 0.0". */
4979 bitpos = fmt->signbit_ro;
4980 if (bitpos < 0)
4982 /* But we can't do this if the format supports signed zero. */
4983 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
4985 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4986 build_real (TREE_TYPE (arg), dconst0));
4987 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4990 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4992 imode = int_mode_for_mode (fmode);
4993 gcc_assert (imode != BLKmode);
4994 temp = gen_lowpart (imode, temp);
4996 else
4998 imode = word_mode;
4999 /* Handle targets with different FP word orders. */
5000 if (FLOAT_WORDS_BIG_ENDIAN)
5001 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5002 else
5003 word = bitpos / BITS_PER_WORD;
5004 temp = operand_subword_force (temp, word, fmode);
5005 bitpos = bitpos % BITS_PER_WORD;
5008 /* Force the intermediate word_mode (or narrower) result into a
5009 register. This avoids attempting to create paradoxical SUBREGs
5010 of floating point modes below. */
5011 temp = force_reg (imode, temp);
5013 /* If the bitpos is within the "result mode" lowpart, the operation
5014 can be implement with a single bitwise AND. Otherwise, we need
5015 a right shift and an AND. */
5017 if (bitpos < GET_MODE_BITSIZE (rmode))
5019 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5021 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5022 temp = gen_lowpart (rmode, temp);
5023 temp = expand_binop (rmode, and_optab, temp,
5024 immed_wide_int_const (mask, rmode),
5025 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5027 else
5029 /* Perform a logical right shift to place the signbit in the least
5030 significant bit, then truncate the result to the desired mode
5031 and mask just this bit. */
5032 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5033 temp = gen_lowpart (rmode, temp);
5034 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5035 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5038 return temp;
5041 /* Expand fork or exec calls. TARGET is the desired target of the
5042 call. EXP is the call. FN is the
5043 identificator of the actual function. IGNORE is nonzero if the
5044 value is to be ignored. */
5046 static rtx
5047 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5049 tree id, decl;
5050 tree call;
5052 /* If we are not profiling, just call the function. */
5053 if (!profile_arc_flag)
5054 return NULL_RTX;
5056 /* Otherwise call the wrapper. This should be equivalent for the rest of
5057 compiler, so the code does not diverge, and the wrapper may run the
5058 code necessary for keeping the profiling sane. */
5060 switch (DECL_FUNCTION_CODE (fn))
5062 case BUILT_IN_FORK:
5063 id = get_identifier ("__gcov_fork");
5064 break;
5066 case BUILT_IN_EXECL:
5067 id = get_identifier ("__gcov_execl");
5068 break;
5070 case BUILT_IN_EXECV:
5071 id = get_identifier ("__gcov_execv");
5072 break;
5074 case BUILT_IN_EXECLP:
5075 id = get_identifier ("__gcov_execlp");
5076 break;
5078 case BUILT_IN_EXECLE:
5079 id = get_identifier ("__gcov_execle");
5080 break;
5082 case BUILT_IN_EXECVP:
5083 id = get_identifier ("__gcov_execvp");
5084 break;
5086 case BUILT_IN_EXECVE:
5087 id = get_identifier ("__gcov_execve");
5088 break;
5090 default:
5091 gcc_unreachable ();
5094 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5095 FUNCTION_DECL, id, TREE_TYPE (fn));
5096 DECL_EXTERNAL (decl) = 1;
5097 TREE_PUBLIC (decl) = 1;
5098 DECL_ARTIFICIAL (decl) = 1;
5099 TREE_NOTHROW (decl) = 1;
5100 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5101 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5102 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5103 return expand_call (call, target, ignore);
5108 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5109 the pointer in these functions is void*, the tree optimizers may remove
5110 casts. The mode computed in expand_builtin isn't reliable either, due
5111 to __sync_bool_compare_and_swap.
5113 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5114 group of builtins. This gives us log2 of the mode size. */
5116 static inline machine_mode
5117 get_builtin_sync_mode (int fcode_diff)
5119 /* The size is not negotiable, so ask not to get BLKmode in return
5120 if the target indicates that a smaller size would be better. */
5121 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5124 /* Expand the memory expression LOC and return the appropriate memory operand
5125 for the builtin_sync operations. */
5127 static rtx
5128 get_builtin_sync_mem (tree loc, machine_mode mode)
5130 rtx addr, mem;
5132 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5133 addr = convert_memory_address (Pmode, addr);
5135 /* Note that we explicitly do not want any alias information for this
5136 memory, so that we kill all other live memories. Otherwise we don't
5137 satisfy the full barrier semantics of the intrinsic. */
5138 mem = validize_mem (gen_rtx_MEM (mode, addr));
5140 /* The alignment needs to be at least according to that of the mode. */
5141 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5142 get_pointer_alignment (loc)));
5143 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5144 MEM_VOLATILE_P (mem) = 1;
5146 return mem;
5149 /* Make sure an argument is in the right mode.
5150 EXP is the tree argument.
5151 MODE is the mode it should be in. */
5153 static rtx
5154 expand_expr_force_mode (tree exp, machine_mode mode)
5156 rtx val;
5157 machine_mode old_mode;
5159 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5160 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5161 of CONST_INTs, where we know the old_mode only from the call argument. */
5163 old_mode = GET_MODE (val);
5164 if (old_mode == VOIDmode)
5165 old_mode = TYPE_MODE (TREE_TYPE (exp));
5166 val = convert_modes (mode, old_mode, val, 1);
5167 return val;
5171 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5172 EXP is the CALL_EXPR. CODE is the rtx code
5173 that corresponds to the arithmetic or logical operation from the name;
5174 an exception here is that NOT actually means NAND. TARGET is an optional
5175 place for us to store the results; AFTER is true if this is the
5176 fetch_and_xxx form. */
5178 static rtx
5179 expand_builtin_sync_operation (machine_mode mode, tree exp,
5180 enum rtx_code code, bool after,
5181 rtx target)
5183 rtx val, mem;
5184 location_t loc = EXPR_LOCATION (exp);
5186 if (code == NOT && warn_sync_nand)
5188 tree fndecl = get_callee_fndecl (exp);
5189 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5191 static bool warned_f_a_n, warned_n_a_f;
5193 switch (fcode)
5195 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5196 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5197 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5198 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5199 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5200 if (warned_f_a_n)
5201 break;
5203 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5204 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5205 warned_f_a_n = true;
5206 break;
5208 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5209 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5210 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5211 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5212 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5213 if (warned_n_a_f)
5214 break;
5216 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5217 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5218 warned_n_a_f = true;
5219 break;
5221 default:
5222 gcc_unreachable ();
5226 /* Expand the operands. */
5227 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5228 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5230 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5231 after);
5234 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5235 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5236 true if this is the boolean form. TARGET is a place for us to store the
5237 results; this is NOT optional if IS_BOOL is true. */
5239 static rtx
5240 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5241 bool is_bool, rtx target)
5243 rtx old_val, new_val, mem;
5244 rtx *pbool, *poval;
5246 /* Expand the operands. */
5247 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5248 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5249 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5251 pbool = poval = NULL;
5252 if (target != const0_rtx)
5254 if (is_bool)
5255 pbool = &target;
5256 else
5257 poval = &target;
5259 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5260 false, MEMMODEL_SYNC_SEQ_CST,
5261 MEMMODEL_SYNC_SEQ_CST))
5262 return NULL_RTX;
5264 return target;
5267 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5268 general form is actually an atomic exchange, and some targets only
5269 support a reduced form with the second argument being a constant 1.
5270 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5271 the results. */
5273 static rtx
5274 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5275 rtx target)
5277 rtx val, mem;
5279 /* Expand the operands. */
5280 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5281 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5283 return expand_sync_lock_test_and_set (target, mem, val);
5286 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5288 static void
5289 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5291 rtx mem;
5293 /* Expand the operands. */
5294 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5296 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5299 /* Given an integer representing an ``enum memmodel'', verify its
5300 correctness and return the memory model enum. */
5302 static enum memmodel
5303 get_memmodel (tree exp)
5305 rtx op;
5306 unsigned HOST_WIDE_INT val;
5308 /* If the parameter is not a constant, it's a run time value so we'll just
5309 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5310 if (TREE_CODE (exp) != INTEGER_CST)
5311 return MEMMODEL_SEQ_CST;
5313 op = expand_normal (exp);
5315 val = INTVAL (op);
5316 if (targetm.memmodel_check)
5317 val = targetm.memmodel_check (val);
5318 else if (val & ~MEMMODEL_MASK)
5320 warning (OPT_Winvalid_memory_model,
5321 "Unknown architecture specifier in memory model to builtin.");
5322 return MEMMODEL_SEQ_CST;
5325 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5326 if (memmodel_base (val) >= MEMMODEL_LAST)
5328 warning (OPT_Winvalid_memory_model,
5329 "invalid memory model argument to builtin");
5330 return MEMMODEL_SEQ_CST;
5333 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5334 be conservative and promote consume to acquire. */
5335 if (val == MEMMODEL_CONSUME)
5336 val = MEMMODEL_ACQUIRE;
5338 return (enum memmodel) val;
5341 /* Expand the __atomic_exchange intrinsic:
5342 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5343 EXP is the CALL_EXPR.
5344 TARGET is an optional place for us to store the results. */
5346 static rtx
5347 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5349 rtx val, mem;
5350 enum memmodel model;
5352 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5354 if (!flag_inline_atomics)
5355 return NULL_RTX;
5357 /* Expand the operands. */
5358 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5359 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5361 return expand_atomic_exchange (target, mem, val, model);
5364 /* Expand the __atomic_compare_exchange intrinsic:
5365 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5366 TYPE desired, BOOL weak,
5367 enum memmodel success,
5368 enum memmodel failure)
5369 EXP is the CALL_EXPR.
5370 TARGET is an optional place for us to store the results. */
5372 static rtx
5373 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5374 rtx target)
5376 rtx expect, desired, mem, oldval;
5377 rtx_code_label *label;
5378 enum memmodel success, failure;
5379 tree weak;
5380 bool is_weak;
5382 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5383 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5385 if (failure > success)
5387 warning (OPT_Winvalid_memory_model,
5388 "failure memory model cannot be stronger than success memory "
5389 "model for %<__atomic_compare_exchange%>");
5390 success = MEMMODEL_SEQ_CST;
5393 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5395 warning (OPT_Winvalid_memory_model,
5396 "invalid failure memory model for "
5397 "%<__atomic_compare_exchange%>");
5398 failure = MEMMODEL_SEQ_CST;
5399 success = MEMMODEL_SEQ_CST;
5403 if (!flag_inline_atomics)
5404 return NULL_RTX;
5406 /* Expand the operands. */
5407 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5409 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5410 expect = convert_memory_address (Pmode, expect);
5411 expect = gen_rtx_MEM (mode, expect);
5412 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5414 weak = CALL_EXPR_ARG (exp, 3);
5415 is_weak = false;
5416 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5417 is_weak = true;
5419 if (target == const0_rtx)
5420 target = NULL;
5422 /* Lest the rtl backend create a race condition with an imporoper store
5423 to memory, always create a new pseudo for OLDVAL. */
5424 oldval = NULL;
5426 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5427 is_weak, success, failure))
5428 return NULL_RTX;
5430 /* Conditionally store back to EXPECT, lest we create a race condition
5431 with an improper store to memory. */
5432 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5433 the normal case where EXPECT is totally private, i.e. a register. At
5434 which point the store can be unconditional. */
5435 label = gen_label_rtx ();
5436 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5437 GET_MODE (target), 1, label);
5438 emit_move_insn (expect, oldval);
5439 emit_label (label);
5441 return target;
5444 /* Expand the __atomic_load intrinsic:
5445 TYPE __atomic_load (TYPE *object, enum memmodel)
5446 EXP is the CALL_EXPR.
5447 TARGET is an optional place for us to store the results. */
5449 static rtx
5450 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5452 rtx mem;
5453 enum memmodel model;
5455 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5456 if (is_mm_release (model) || is_mm_acq_rel (model))
5458 warning (OPT_Winvalid_memory_model,
5459 "invalid memory model for %<__atomic_load%>");
5460 model = MEMMODEL_SEQ_CST;
5463 if (!flag_inline_atomics)
5464 return NULL_RTX;
5466 /* Expand the operand. */
5467 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5469 return expand_atomic_load (target, mem, model);
5473 /* Expand the __atomic_store intrinsic:
5474 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5475 EXP is the CALL_EXPR.
5476 TARGET is an optional place for us to store the results. */
5478 static rtx
5479 expand_builtin_atomic_store (machine_mode mode, tree exp)
5481 rtx mem, val;
5482 enum memmodel model;
5484 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5485 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5486 || is_mm_release (model)))
5488 warning (OPT_Winvalid_memory_model,
5489 "invalid memory model for %<__atomic_store%>");
5490 model = MEMMODEL_SEQ_CST;
5493 if (!flag_inline_atomics)
5494 return NULL_RTX;
5496 /* Expand the operands. */
5497 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5498 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5500 return expand_atomic_store (mem, val, model, false);
5503 /* Expand the __atomic_fetch_XXX intrinsic:
5504 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5505 EXP is the CALL_EXPR.
5506 TARGET is an optional place for us to store the results.
5507 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5508 FETCH_AFTER is true if returning the result of the operation.
5509 FETCH_AFTER is false if returning the value before the operation.
5510 IGNORE is true if the result is not used.
5511 EXT_CALL is the correct builtin for an external call if this cannot be
5512 resolved to an instruction sequence. */
5514 static rtx
5515 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5516 enum rtx_code code, bool fetch_after,
5517 bool ignore, enum built_in_function ext_call)
5519 rtx val, mem, ret;
5520 enum memmodel model;
5521 tree fndecl;
5522 tree addr;
5524 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5526 /* Expand the operands. */
5527 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5528 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5530 /* Only try generating instructions if inlining is turned on. */
5531 if (flag_inline_atomics)
5533 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5534 if (ret)
5535 return ret;
5538 /* Return if a different routine isn't needed for the library call. */
5539 if (ext_call == BUILT_IN_NONE)
5540 return NULL_RTX;
5542 /* Change the call to the specified function. */
5543 fndecl = get_callee_fndecl (exp);
5544 addr = CALL_EXPR_FN (exp);
5545 STRIP_NOPS (addr);
5547 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5548 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5550 /* Expand the call here so we can emit trailing code. */
5551 ret = expand_call (exp, target, ignore);
5553 /* Replace the original function just in case it matters. */
5554 TREE_OPERAND (addr, 0) = fndecl;
5556 /* Then issue the arithmetic correction to return the right result. */
5557 if (!ignore)
5559 if (code == NOT)
5561 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5562 OPTAB_LIB_WIDEN);
5563 ret = expand_simple_unop (mode, NOT, ret, target, true);
5565 else
5566 ret = expand_simple_binop (mode, code, ret, val, target, true,
5567 OPTAB_LIB_WIDEN);
5569 return ret;
5572 /* Expand an atomic clear operation.
5573 void _atomic_clear (BOOL *obj, enum memmodel)
5574 EXP is the call expression. */
5576 static rtx
5577 expand_builtin_atomic_clear (tree exp)
5579 machine_mode mode;
5580 rtx mem, ret;
5581 enum memmodel model;
5583 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5584 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5585 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5587 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
5589 warning (OPT_Winvalid_memory_model,
5590 "invalid memory model for %<__atomic_store%>");
5591 model = MEMMODEL_SEQ_CST;
5594 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5595 Failing that, a store is issued by __atomic_store. The only way this can
5596 fail is if the bool type is larger than a word size. Unlikely, but
5597 handle it anyway for completeness. Assume a single threaded model since
5598 there is no atomic support in this case, and no barriers are required. */
5599 ret = expand_atomic_store (mem, const0_rtx, model, true);
5600 if (!ret)
5601 emit_move_insn (mem, const0_rtx);
5602 return const0_rtx;
5605 /* Expand an atomic test_and_set operation.
5606 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5607 EXP is the call expression. */
5609 static rtx
5610 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5612 rtx mem;
5613 enum memmodel model;
5614 machine_mode mode;
5616 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5617 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5618 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5620 return expand_atomic_test_and_set (target, mem, model);
5624 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5625 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5627 static tree
5628 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5630 int size;
5631 machine_mode mode;
5632 unsigned int mode_align, type_align;
5634 if (TREE_CODE (arg0) != INTEGER_CST)
5635 return NULL_TREE;
5637 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5638 mode = mode_for_size (size, MODE_INT, 0);
5639 mode_align = GET_MODE_ALIGNMENT (mode);
5641 if (TREE_CODE (arg1) == INTEGER_CST)
5643 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
5645 /* Either this argument is null, or it's a fake pointer encoding
5646 the alignment of the object. */
5647 val = val & -val;
5648 val *= BITS_PER_UNIT;
5650 if (val == 0 || mode_align < val)
5651 type_align = mode_align;
5652 else
5653 type_align = val;
5655 else
5657 tree ttype = TREE_TYPE (arg1);
5659 /* This function is usually invoked and folded immediately by the front
5660 end before anything else has a chance to look at it. The pointer
5661 parameter at this point is usually cast to a void *, so check for that
5662 and look past the cast. */
5663 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
5664 && VOID_TYPE_P (TREE_TYPE (ttype)))
5665 arg1 = TREE_OPERAND (arg1, 0);
5667 ttype = TREE_TYPE (arg1);
5668 gcc_assert (POINTER_TYPE_P (ttype));
5670 /* Get the underlying type of the object. */
5671 ttype = TREE_TYPE (ttype);
5672 type_align = TYPE_ALIGN (ttype);
5675 /* If the object has smaller alignment, the lock free routines cannot
5676 be used. */
5677 if (type_align < mode_align)
5678 return boolean_false_node;
5680 /* Check if a compare_and_swap pattern exists for the mode which represents
5681 the required size. The pattern is not allowed to fail, so the existence
5682 of the pattern indicates support is present. */
5683 if (can_compare_and_swap_p (mode, true))
5684 return boolean_true_node;
5685 else
5686 return boolean_false_node;
5689 /* Return true if the parameters to call EXP represent an object which will
5690 always generate lock free instructions. The first argument represents the
5691 size of the object, and the second parameter is a pointer to the object
5692 itself. If NULL is passed for the object, then the result is based on
5693 typical alignment for an object of the specified size. Otherwise return
5694 false. */
5696 static rtx
5697 expand_builtin_atomic_always_lock_free (tree exp)
5699 tree size;
5700 tree arg0 = CALL_EXPR_ARG (exp, 0);
5701 tree arg1 = CALL_EXPR_ARG (exp, 1);
5703 if (TREE_CODE (arg0) != INTEGER_CST)
5705 error ("non-constant argument 1 to __atomic_always_lock_free");
5706 return const0_rtx;
5709 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5710 if (size == boolean_true_node)
5711 return const1_rtx;
5712 return const0_rtx;
5715 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5716 is lock free on this architecture. */
5718 static tree
5719 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5721 if (!flag_inline_atomics)
5722 return NULL_TREE;
5724 /* If it isn't always lock free, don't generate a result. */
5725 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5726 return boolean_true_node;
5728 return NULL_TREE;
5731 /* Return true if the parameters to call EXP represent an object which will
5732 always generate lock free instructions. The first argument represents the
5733 size of the object, and the second parameter is a pointer to the object
5734 itself. If NULL is passed for the object, then the result is based on
5735 typical alignment for an object of the specified size. Otherwise return
5736 NULL*/
5738 static rtx
5739 expand_builtin_atomic_is_lock_free (tree exp)
5741 tree size;
5742 tree arg0 = CALL_EXPR_ARG (exp, 0);
5743 tree arg1 = CALL_EXPR_ARG (exp, 1);
5745 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5747 error ("non-integer argument 1 to __atomic_is_lock_free");
5748 return NULL_RTX;
5751 if (!flag_inline_atomics)
5752 return NULL_RTX;
5754 /* If the value is known at compile time, return the RTX for it. */
5755 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5756 if (size == boolean_true_node)
5757 return const1_rtx;
5759 return NULL_RTX;
5762 /* Expand the __atomic_thread_fence intrinsic:
5763 void __atomic_thread_fence (enum memmodel)
5764 EXP is the CALL_EXPR. */
5766 static void
5767 expand_builtin_atomic_thread_fence (tree exp)
5769 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5770 expand_mem_thread_fence (model);
5773 /* Expand the __atomic_signal_fence intrinsic:
5774 void __atomic_signal_fence (enum memmodel)
5775 EXP is the CALL_EXPR. */
5777 static void
5778 expand_builtin_atomic_signal_fence (tree exp)
5780 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5781 expand_mem_signal_fence (model);
5784 /* Expand the __sync_synchronize intrinsic. */
5786 static void
5787 expand_builtin_sync_synchronize (void)
5789 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
5792 static rtx
5793 expand_builtin_thread_pointer (tree exp, rtx target)
5795 enum insn_code icode;
5796 if (!validate_arglist (exp, VOID_TYPE))
5797 return const0_rtx;
5798 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5799 if (icode != CODE_FOR_nothing)
5801 struct expand_operand op;
5802 /* If the target is not sutitable then create a new target. */
5803 if (target == NULL_RTX
5804 || !REG_P (target)
5805 || GET_MODE (target) != Pmode)
5806 target = gen_reg_rtx (Pmode);
5807 create_output_operand (&op, target, Pmode);
5808 expand_insn (icode, 1, &op);
5809 return target;
5811 error ("__builtin_thread_pointer is not supported on this target");
5812 return const0_rtx;
5815 static void
5816 expand_builtin_set_thread_pointer (tree exp)
5818 enum insn_code icode;
5819 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5820 return;
5821 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5822 if (icode != CODE_FOR_nothing)
5824 struct expand_operand op;
5825 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5826 Pmode, EXPAND_NORMAL);
5827 create_input_operand (&op, val, Pmode);
5828 expand_insn (icode, 1, &op);
5829 return;
5831 error ("__builtin_set_thread_pointer is not supported on this target");
5835 /* Emit code to restore the current value of stack. */
5837 static void
5838 expand_stack_restore (tree var)
5840 rtx_insn *prev;
5841 rtx sa = expand_normal (var);
5843 sa = convert_memory_address (Pmode, sa);
5845 prev = get_last_insn ();
5846 emit_stack_restore (SAVE_BLOCK, sa);
5848 record_new_stack_level ();
5850 fixup_args_size_notes (prev, get_last_insn (), 0);
5853 /* Emit code to save the current value of stack. */
5855 static rtx
5856 expand_stack_save (void)
5858 rtx ret = NULL_RTX;
5860 emit_stack_save (SAVE_BLOCK, &ret);
5861 return ret;
5865 /* Expand an expression EXP that calls a built-in function,
5866 with result going to TARGET if that's convenient
5867 (and in mode MODE if that's convenient).
5868 SUBTARGET may be used as the target for computing one of EXP's operands.
5869 IGNORE is nonzero if the value is to be ignored. */
5872 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5873 int ignore)
5875 tree fndecl = get_callee_fndecl (exp);
5876 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5877 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5878 int flags;
5880 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5881 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5883 /* When ASan is enabled, we don't want to expand some memory/string
5884 builtins and rely on libsanitizer's hooks. This allows us to avoid
5885 redundant checks and be sure, that possible overflow will be detected
5886 by ASan. */
5888 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5889 return expand_call (exp, target, ignore);
5891 /* When not optimizing, generate calls to library functions for a certain
5892 set of builtins. */
5893 if (!optimize
5894 && !called_as_built_in (fndecl)
5895 && fcode != BUILT_IN_FORK
5896 && fcode != BUILT_IN_EXECL
5897 && fcode != BUILT_IN_EXECV
5898 && fcode != BUILT_IN_EXECLP
5899 && fcode != BUILT_IN_EXECLE
5900 && fcode != BUILT_IN_EXECVP
5901 && fcode != BUILT_IN_EXECVE
5902 && fcode != BUILT_IN_ALLOCA
5903 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5904 && fcode != BUILT_IN_FREE
5905 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5906 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5907 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5908 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5909 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5910 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5911 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5912 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5913 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5914 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5915 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5916 && fcode != BUILT_IN_CHKP_BNDRET)
5917 return expand_call (exp, target, ignore);
5919 /* The built-in function expanders test for target == const0_rtx
5920 to determine whether the function's result will be ignored. */
5921 if (ignore)
5922 target = const0_rtx;
5924 /* If the result of a pure or const built-in function is ignored, and
5925 none of its arguments are volatile, we can avoid expanding the
5926 built-in call and just evaluate the arguments for side-effects. */
5927 if (target == const0_rtx
5928 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5929 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5931 bool volatilep = false;
5932 tree arg;
5933 call_expr_arg_iterator iter;
5935 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5936 if (TREE_THIS_VOLATILE (arg))
5938 volatilep = true;
5939 break;
5942 if (! volatilep)
5944 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5945 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5946 return const0_rtx;
5950 /* expand_builtin_with_bounds is supposed to be used for
5951 instrumented builtin calls. */
5952 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
5954 switch (fcode)
5956 CASE_FLT_FN (BUILT_IN_FABS):
5957 case BUILT_IN_FABSD32:
5958 case BUILT_IN_FABSD64:
5959 case BUILT_IN_FABSD128:
5960 target = expand_builtin_fabs (exp, target, subtarget);
5961 if (target)
5962 return target;
5963 break;
5965 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5966 target = expand_builtin_copysign (exp, target, subtarget);
5967 if (target)
5968 return target;
5969 break;
5971 /* Just do a normal library call if we were unable to fold
5972 the values. */
5973 CASE_FLT_FN (BUILT_IN_CABS):
5974 break;
5976 CASE_FLT_FN (BUILT_IN_EXP):
5977 CASE_FLT_FN (BUILT_IN_EXP10):
5978 CASE_FLT_FN (BUILT_IN_POW10):
5979 CASE_FLT_FN (BUILT_IN_EXP2):
5980 CASE_FLT_FN (BUILT_IN_EXPM1):
5981 CASE_FLT_FN (BUILT_IN_LOGB):
5982 CASE_FLT_FN (BUILT_IN_LOG):
5983 CASE_FLT_FN (BUILT_IN_LOG10):
5984 CASE_FLT_FN (BUILT_IN_LOG2):
5985 CASE_FLT_FN (BUILT_IN_LOG1P):
5986 CASE_FLT_FN (BUILT_IN_TAN):
5987 CASE_FLT_FN (BUILT_IN_ASIN):
5988 CASE_FLT_FN (BUILT_IN_ACOS):
5989 CASE_FLT_FN (BUILT_IN_ATAN):
5990 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5991 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5992 because of possible accuracy problems. */
5993 if (! flag_unsafe_math_optimizations)
5994 break;
5995 CASE_FLT_FN (BUILT_IN_SQRT):
5996 CASE_FLT_FN (BUILT_IN_FLOOR):
5997 CASE_FLT_FN (BUILT_IN_CEIL):
5998 CASE_FLT_FN (BUILT_IN_TRUNC):
5999 CASE_FLT_FN (BUILT_IN_ROUND):
6000 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6001 CASE_FLT_FN (BUILT_IN_RINT):
6002 target = expand_builtin_mathfn (exp, target, subtarget);
6003 if (target)
6004 return target;
6005 break;
6007 CASE_FLT_FN (BUILT_IN_FMA):
6008 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6009 if (target)
6010 return target;
6011 break;
6013 CASE_FLT_FN (BUILT_IN_ILOGB):
6014 if (! flag_unsafe_math_optimizations)
6015 break;
6016 CASE_FLT_FN (BUILT_IN_ISINF):
6017 CASE_FLT_FN (BUILT_IN_FINITE):
6018 case BUILT_IN_ISFINITE:
6019 case BUILT_IN_ISNORMAL:
6020 target = expand_builtin_interclass_mathfn (exp, target);
6021 if (target)
6022 return target;
6023 break;
6025 CASE_FLT_FN (BUILT_IN_ICEIL):
6026 CASE_FLT_FN (BUILT_IN_LCEIL):
6027 CASE_FLT_FN (BUILT_IN_LLCEIL):
6028 CASE_FLT_FN (BUILT_IN_LFLOOR):
6029 CASE_FLT_FN (BUILT_IN_IFLOOR):
6030 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6031 target = expand_builtin_int_roundingfn (exp, target);
6032 if (target)
6033 return target;
6034 break;
6036 CASE_FLT_FN (BUILT_IN_IRINT):
6037 CASE_FLT_FN (BUILT_IN_LRINT):
6038 CASE_FLT_FN (BUILT_IN_LLRINT):
6039 CASE_FLT_FN (BUILT_IN_IROUND):
6040 CASE_FLT_FN (BUILT_IN_LROUND):
6041 CASE_FLT_FN (BUILT_IN_LLROUND):
6042 target = expand_builtin_int_roundingfn_2 (exp, target);
6043 if (target)
6044 return target;
6045 break;
6047 CASE_FLT_FN (BUILT_IN_POWI):
6048 target = expand_builtin_powi (exp, target);
6049 if (target)
6050 return target;
6051 break;
6053 CASE_FLT_FN (BUILT_IN_ATAN2):
6054 CASE_FLT_FN (BUILT_IN_LDEXP):
6055 CASE_FLT_FN (BUILT_IN_SCALB):
6056 CASE_FLT_FN (BUILT_IN_SCALBN):
6057 CASE_FLT_FN (BUILT_IN_SCALBLN):
6058 if (! flag_unsafe_math_optimizations)
6059 break;
6061 CASE_FLT_FN (BUILT_IN_FMOD):
6062 CASE_FLT_FN (BUILT_IN_REMAINDER):
6063 CASE_FLT_FN (BUILT_IN_DREM):
6064 CASE_FLT_FN (BUILT_IN_POW):
6065 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6066 if (target)
6067 return target;
6068 break;
6070 CASE_FLT_FN (BUILT_IN_CEXPI):
6071 target = expand_builtin_cexpi (exp, target);
6072 gcc_assert (target);
6073 return target;
6075 CASE_FLT_FN (BUILT_IN_SIN):
6076 CASE_FLT_FN (BUILT_IN_COS):
6077 if (! flag_unsafe_math_optimizations)
6078 break;
6079 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6080 if (target)
6081 return target;
6082 break;
6084 CASE_FLT_FN (BUILT_IN_SINCOS):
6085 if (! flag_unsafe_math_optimizations)
6086 break;
6087 target = expand_builtin_sincos (exp);
6088 if (target)
6089 return target;
6090 break;
6092 case BUILT_IN_APPLY_ARGS:
6093 return expand_builtin_apply_args ();
6095 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6096 FUNCTION with a copy of the parameters described by
6097 ARGUMENTS, and ARGSIZE. It returns a block of memory
6098 allocated on the stack into which is stored all the registers
6099 that might possibly be used for returning the result of a
6100 function. ARGUMENTS is the value returned by
6101 __builtin_apply_args. ARGSIZE is the number of bytes of
6102 arguments that must be copied. ??? How should this value be
6103 computed? We'll also need a safe worst case value for varargs
6104 functions. */
6105 case BUILT_IN_APPLY:
6106 if (!validate_arglist (exp, POINTER_TYPE,
6107 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6108 && !validate_arglist (exp, REFERENCE_TYPE,
6109 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6110 return const0_rtx;
6111 else
6113 rtx ops[3];
6115 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6116 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6117 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6119 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6122 /* __builtin_return (RESULT) causes the function to return the
6123 value described by RESULT. RESULT is address of the block of
6124 memory returned by __builtin_apply. */
6125 case BUILT_IN_RETURN:
6126 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6127 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6128 return const0_rtx;
6130 case BUILT_IN_SAVEREGS:
6131 return expand_builtin_saveregs ();
6133 case BUILT_IN_VA_ARG_PACK:
6134 /* All valid uses of __builtin_va_arg_pack () are removed during
6135 inlining. */
6136 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6137 return const0_rtx;
6139 case BUILT_IN_VA_ARG_PACK_LEN:
6140 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6141 inlining. */
6142 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6143 return const0_rtx;
6145 /* Return the address of the first anonymous stack arg. */
6146 case BUILT_IN_NEXT_ARG:
6147 if (fold_builtin_next_arg (exp, false))
6148 return const0_rtx;
6149 return expand_builtin_next_arg ();
6151 case BUILT_IN_CLEAR_CACHE:
6152 target = expand_builtin___clear_cache (exp);
6153 if (target)
6154 return target;
6155 break;
6157 case BUILT_IN_CLASSIFY_TYPE:
6158 return expand_builtin_classify_type (exp);
6160 case BUILT_IN_CONSTANT_P:
6161 return const0_rtx;
6163 case BUILT_IN_FRAME_ADDRESS:
6164 case BUILT_IN_RETURN_ADDRESS:
6165 return expand_builtin_frame_address (fndecl, exp);
6167 /* Returns the address of the area where the structure is returned.
6168 0 otherwise. */
6169 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6170 if (call_expr_nargs (exp) != 0
6171 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6172 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6173 return const0_rtx;
6174 else
6175 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6177 case BUILT_IN_ALLOCA:
6178 case BUILT_IN_ALLOCA_WITH_ALIGN:
6179 /* If the allocation stems from the declaration of a variable-sized
6180 object, it cannot accumulate. */
6181 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6182 if (target)
6183 return target;
6184 break;
6186 case BUILT_IN_STACK_SAVE:
6187 return expand_stack_save ();
6189 case BUILT_IN_STACK_RESTORE:
6190 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6191 return const0_rtx;
6193 case BUILT_IN_BSWAP16:
6194 case BUILT_IN_BSWAP32:
6195 case BUILT_IN_BSWAP64:
6196 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6197 if (target)
6198 return target;
6199 break;
6201 CASE_INT_FN (BUILT_IN_FFS):
6202 target = expand_builtin_unop (target_mode, exp, target,
6203 subtarget, ffs_optab);
6204 if (target)
6205 return target;
6206 break;
6208 CASE_INT_FN (BUILT_IN_CLZ):
6209 target = expand_builtin_unop (target_mode, exp, target,
6210 subtarget, clz_optab);
6211 if (target)
6212 return target;
6213 break;
6215 CASE_INT_FN (BUILT_IN_CTZ):
6216 target = expand_builtin_unop (target_mode, exp, target,
6217 subtarget, ctz_optab);
6218 if (target)
6219 return target;
6220 break;
6222 CASE_INT_FN (BUILT_IN_CLRSB):
6223 target = expand_builtin_unop (target_mode, exp, target,
6224 subtarget, clrsb_optab);
6225 if (target)
6226 return target;
6227 break;
6229 CASE_INT_FN (BUILT_IN_POPCOUNT):
6230 target = expand_builtin_unop (target_mode, exp, target,
6231 subtarget, popcount_optab);
6232 if (target)
6233 return target;
6234 break;
6236 CASE_INT_FN (BUILT_IN_PARITY):
6237 target = expand_builtin_unop (target_mode, exp, target,
6238 subtarget, parity_optab);
6239 if (target)
6240 return target;
6241 break;
6243 case BUILT_IN_STRLEN:
6244 target = expand_builtin_strlen (exp, target, target_mode);
6245 if (target)
6246 return target;
6247 break;
6249 case BUILT_IN_STRCPY:
6250 target = expand_builtin_strcpy (exp, target);
6251 if (target)
6252 return target;
6253 break;
6255 case BUILT_IN_STRNCPY:
6256 target = expand_builtin_strncpy (exp, target);
6257 if (target)
6258 return target;
6259 break;
6261 case BUILT_IN_STPCPY:
6262 target = expand_builtin_stpcpy (exp, target, mode);
6263 if (target)
6264 return target;
6265 break;
6267 case BUILT_IN_MEMCPY:
6268 target = expand_builtin_memcpy (exp, target);
6269 if (target)
6270 return target;
6271 break;
6273 case BUILT_IN_MEMPCPY:
6274 target = expand_builtin_mempcpy (exp, target, mode);
6275 if (target)
6276 return target;
6277 break;
6279 case BUILT_IN_MEMSET:
6280 target = expand_builtin_memset (exp, target, mode);
6281 if (target)
6282 return target;
6283 break;
6285 case BUILT_IN_BZERO:
6286 target = expand_builtin_bzero (exp);
6287 if (target)
6288 return target;
6289 break;
6291 case BUILT_IN_STRCMP:
6292 target = expand_builtin_strcmp (exp, target);
6293 if (target)
6294 return target;
6295 break;
6297 case BUILT_IN_STRNCMP:
6298 target = expand_builtin_strncmp (exp, target, mode);
6299 if (target)
6300 return target;
6301 break;
6303 case BUILT_IN_BCMP:
6304 case BUILT_IN_MEMCMP:
6305 target = expand_builtin_memcmp (exp, target);
6306 if (target)
6307 return target;
6308 break;
6310 case BUILT_IN_SETJMP:
6311 /* This should have been lowered to the builtins below. */
6312 gcc_unreachable ();
6314 case BUILT_IN_SETJMP_SETUP:
6315 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6316 and the receiver label. */
6317 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6319 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6320 VOIDmode, EXPAND_NORMAL);
6321 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6322 rtx_insn *label_r = label_rtx (label);
6324 /* This is copied from the handling of non-local gotos. */
6325 expand_builtin_setjmp_setup (buf_addr, label_r);
6326 nonlocal_goto_handler_labels
6327 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6328 nonlocal_goto_handler_labels);
6329 /* ??? Do not let expand_label treat us as such since we would
6330 not want to be both on the list of non-local labels and on
6331 the list of forced labels. */
6332 FORCED_LABEL (label) = 0;
6333 return const0_rtx;
6335 break;
6337 case BUILT_IN_SETJMP_RECEIVER:
6338 /* __builtin_setjmp_receiver is passed the receiver label. */
6339 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6341 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6342 rtx_insn *label_r = label_rtx (label);
6344 expand_builtin_setjmp_receiver (label_r);
6345 return const0_rtx;
6347 break;
6349 /* __builtin_longjmp is passed a pointer to an array of five words.
6350 It's similar to the C library longjmp function but works with
6351 __builtin_setjmp above. */
6352 case BUILT_IN_LONGJMP:
6353 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6355 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6356 VOIDmode, EXPAND_NORMAL);
6357 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6359 if (value != const1_rtx)
6361 error ("%<__builtin_longjmp%> second argument must be 1");
6362 return const0_rtx;
6365 expand_builtin_longjmp (buf_addr, value);
6366 return const0_rtx;
6368 break;
6370 case BUILT_IN_NONLOCAL_GOTO:
6371 target = expand_builtin_nonlocal_goto (exp);
6372 if (target)
6373 return target;
6374 break;
6376 /* This updates the setjmp buffer that is its argument with the value
6377 of the current stack pointer. */
6378 case BUILT_IN_UPDATE_SETJMP_BUF:
6379 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6381 rtx buf_addr
6382 = expand_normal (CALL_EXPR_ARG (exp, 0));
6384 expand_builtin_update_setjmp_buf (buf_addr);
6385 return const0_rtx;
6387 break;
6389 case BUILT_IN_TRAP:
6390 expand_builtin_trap ();
6391 return const0_rtx;
6393 case BUILT_IN_UNREACHABLE:
6394 expand_builtin_unreachable ();
6395 return const0_rtx;
6397 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6398 case BUILT_IN_SIGNBITD32:
6399 case BUILT_IN_SIGNBITD64:
6400 case BUILT_IN_SIGNBITD128:
6401 target = expand_builtin_signbit (exp, target);
6402 if (target)
6403 return target;
6404 break;
6406 /* Various hooks for the DWARF 2 __throw routine. */
6407 case BUILT_IN_UNWIND_INIT:
6408 expand_builtin_unwind_init ();
6409 return const0_rtx;
6410 case BUILT_IN_DWARF_CFA:
6411 return virtual_cfa_rtx;
6412 #ifdef DWARF2_UNWIND_INFO
6413 case BUILT_IN_DWARF_SP_COLUMN:
6414 return expand_builtin_dwarf_sp_column ();
6415 case BUILT_IN_INIT_DWARF_REG_SIZES:
6416 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6417 return const0_rtx;
6418 #endif
6419 case BUILT_IN_FROB_RETURN_ADDR:
6420 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6421 case BUILT_IN_EXTRACT_RETURN_ADDR:
6422 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6423 case BUILT_IN_EH_RETURN:
6424 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6425 CALL_EXPR_ARG (exp, 1));
6426 return const0_rtx;
6427 case BUILT_IN_EH_RETURN_DATA_REGNO:
6428 return expand_builtin_eh_return_data_regno (exp);
6429 case BUILT_IN_EXTEND_POINTER:
6430 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6431 case BUILT_IN_EH_POINTER:
6432 return expand_builtin_eh_pointer (exp);
6433 case BUILT_IN_EH_FILTER:
6434 return expand_builtin_eh_filter (exp);
6435 case BUILT_IN_EH_COPY_VALUES:
6436 return expand_builtin_eh_copy_values (exp);
6438 case BUILT_IN_VA_START:
6439 return expand_builtin_va_start (exp);
6440 case BUILT_IN_VA_END:
6441 return expand_builtin_va_end (exp);
6442 case BUILT_IN_VA_COPY:
6443 return expand_builtin_va_copy (exp);
6444 case BUILT_IN_EXPECT:
6445 return expand_builtin_expect (exp, target);
6446 case BUILT_IN_ASSUME_ALIGNED:
6447 return expand_builtin_assume_aligned (exp, target);
6448 case BUILT_IN_PREFETCH:
6449 expand_builtin_prefetch (exp);
6450 return const0_rtx;
6452 case BUILT_IN_INIT_TRAMPOLINE:
6453 return expand_builtin_init_trampoline (exp, true);
6454 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6455 return expand_builtin_init_trampoline (exp, false);
6456 case BUILT_IN_ADJUST_TRAMPOLINE:
6457 return expand_builtin_adjust_trampoline (exp);
6459 case BUILT_IN_FORK:
6460 case BUILT_IN_EXECL:
6461 case BUILT_IN_EXECV:
6462 case BUILT_IN_EXECLP:
6463 case BUILT_IN_EXECLE:
6464 case BUILT_IN_EXECVP:
6465 case BUILT_IN_EXECVE:
6466 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6467 if (target)
6468 return target;
6469 break;
6471 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6472 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6473 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6474 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6475 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6476 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6477 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6478 if (target)
6479 return target;
6480 break;
6482 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6483 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6484 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6485 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6486 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6487 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6488 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6489 if (target)
6490 return target;
6491 break;
6493 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6494 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6495 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6496 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6497 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6498 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6499 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6500 if (target)
6501 return target;
6502 break;
6504 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6505 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6506 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6507 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6508 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6509 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6510 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6511 if (target)
6512 return target;
6513 break;
6515 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6516 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6517 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6518 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6519 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6520 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6521 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6522 if (target)
6523 return target;
6524 break;
6526 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6527 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6528 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6529 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6530 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6531 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6532 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6533 if (target)
6534 return target;
6535 break;
6537 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6538 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6539 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6540 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6541 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6542 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6543 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6544 if (target)
6545 return target;
6546 break;
6548 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6549 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6550 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6551 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6552 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6553 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6554 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6555 if (target)
6556 return target;
6557 break;
6559 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6560 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6561 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6562 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6563 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6564 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6565 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6566 if (target)
6567 return target;
6568 break;
6570 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6571 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6572 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6573 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6574 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6575 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6576 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6577 if (target)
6578 return target;
6579 break;
6581 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6582 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6583 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6584 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6585 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6586 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6587 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6588 if (target)
6589 return target;
6590 break;
6592 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6593 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6594 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6595 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6596 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6597 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6598 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6599 if (target)
6600 return target;
6601 break;
6603 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6604 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6605 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6606 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6607 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6608 if (mode == VOIDmode)
6609 mode = TYPE_MODE (boolean_type_node);
6610 if (!target || !register_operand (target, mode))
6611 target = gen_reg_rtx (mode);
6613 mode = get_builtin_sync_mode
6614 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6615 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6616 if (target)
6617 return target;
6618 break;
6620 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6621 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6622 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6623 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6624 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6625 mode = get_builtin_sync_mode
6626 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6627 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6628 if (target)
6629 return target;
6630 break;
6632 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6633 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6634 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6635 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6636 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6637 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6638 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6639 if (target)
6640 return target;
6641 break;
6643 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6644 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6645 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6646 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6647 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6648 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6649 expand_builtin_sync_lock_release (mode, exp);
6650 return const0_rtx;
6652 case BUILT_IN_SYNC_SYNCHRONIZE:
6653 expand_builtin_sync_synchronize ();
6654 return const0_rtx;
6656 case BUILT_IN_ATOMIC_EXCHANGE_1:
6657 case BUILT_IN_ATOMIC_EXCHANGE_2:
6658 case BUILT_IN_ATOMIC_EXCHANGE_4:
6659 case BUILT_IN_ATOMIC_EXCHANGE_8:
6660 case BUILT_IN_ATOMIC_EXCHANGE_16:
6661 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6662 target = expand_builtin_atomic_exchange (mode, exp, target);
6663 if (target)
6664 return target;
6665 break;
6667 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6668 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6669 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6670 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6671 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6673 unsigned int nargs, z;
6674 vec<tree, va_gc> *vec;
6676 mode =
6677 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6678 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6679 if (target)
6680 return target;
6682 /* If this is turned into an external library call, the weak parameter
6683 must be dropped to match the expected parameter list. */
6684 nargs = call_expr_nargs (exp);
6685 vec_alloc (vec, nargs - 1);
6686 for (z = 0; z < 3; z++)
6687 vec->quick_push (CALL_EXPR_ARG (exp, z));
6688 /* Skip the boolean weak parameter. */
6689 for (z = 4; z < 6; z++)
6690 vec->quick_push (CALL_EXPR_ARG (exp, z));
6691 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6692 break;
6695 case BUILT_IN_ATOMIC_LOAD_1:
6696 case BUILT_IN_ATOMIC_LOAD_2:
6697 case BUILT_IN_ATOMIC_LOAD_4:
6698 case BUILT_IN_ATOMIC_LOAD_8:
6699 case BUILT_IN_ATOMIC_LOAD_16:
6700 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6701 target = expand_builtin_atomic_load (mode, exp, target);
6702 if (target)
6703 return target;
6704 break;
6706 case BUILT_IN_ATOMIC_STORE_1:
6707 case BUILT_IN_ATOMIC_STORE_2:
6708 case BUILT_IN_ATOMIC_STORE_4:
6709 case BUILT_IN_ATOMIC_STORE_8:
6710 case BUILT_IN_ATOMIC_STORE_16:
6711 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6712 target = expand_builtin_atomic_store (mode, exp);
6713 if (target)
6714 return const0_rtx;
6715 break;
6717 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6718 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6719 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6720 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6721 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6723 enum built_in_function lib;
6724 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6725 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6726 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6727 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6728 ignore, lib);
6729 if (target)
6730 return target;
6731 break;
6733 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6734 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6735 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6736 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6737 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6739 enum built_in_function lib;
6740 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6741 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6742 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6743 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6744 ignore, lib);
6745 if (target)
6746 return target;
6747 break;
6749 case BUILT_IN_ATOMIC_AND_FETCH_1:
6750 case BUILT_IN_ATOMIC_AND_FETCH_2:
6751 case BUILT_IN_ATOMIC_AND_FETCH_4:
6752 case BUILT_IN_ATOMIC_AND_FETCH_8:
6753 case BUILT_IN_ATOMIC_AND_FETCH_16:
6755 enum built_in_function lib;
6756 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6757 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6758 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6759 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6760 ignore, lib);
6761 if (target)
6762 return target;
6763 break;
6765 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6766 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6767 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6768 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6769 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6771 enum built_in_function lib;
6772 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6773 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6774 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6775 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6776 ignore, lib);
6777 if (target)
6778 return target;
6779 break;
6781 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6782 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6783 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6784 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6785 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6787 enum built_in_function lib;
6788 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6789 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6790 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6791 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6792 ignore, lib);
6793 if (target)
6794 return target;
6795 break;
6797 case BUILT_IN_ATOMIC_OR_FETCH_1:
6798 case BUILT_IN_ATOMIC_OR_FETCH_2:
6799 case BUILT_IN_ATOMIC_OR_FETCH_4:
6800 case BUILT_IN_ATOMIC_OR_FETCH_8:
6801 case BUILT_IN_ATOMIC_OR_FETCH_16:
6803 enum built_in_function lib;
6804 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6805 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6806 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6807 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6808 ignore, lib);
6809 if (target)
6810 return target;
6811 break;
6813 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6814 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6815 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6816 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6817 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6818 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6819 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6820 ignore, BUILT_IN_NONE);
6821 if (target)
6822 return target;
6823 break;
6825 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6826 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6827 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6828 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6829 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6830 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6831 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6832 ignore, BUILT_IN_NONE);
6833 if (target)
6834 return target;
6835 break;
6837 case BUILT_IN_ATOMIC_FETCH_AND_1:
6838 case BUILT_IN_ATOMIC_FETCH_AND_2:
6839 case BUILT_IN_ATOMIC_FETCH_AND_4:
6840 case BUILT_IN_ATOMIC_FETCH_AND_8:
6841 case BUILT_IN_ATOMIC_FETCH_AND_16:
6842 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6843 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6844 ignore, BUILT_IN_NONE);
6845 if (target)
6846 return target;
6847 break;
6849 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6850 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6851 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6852 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6853 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6854 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6855 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6856 ignore, BUILT_IN_NONE);
6857 if (target)
6858 return target;
6859 break;
6861 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6862 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6863 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6864 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6865 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6866 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6867 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6868 ignore, BUILT_IN_NONE);
6869 if (target)
6870 return target;
6871 break;
6873 case BUILT_IN_ATOMIC_FETCH_OR_1:
6874 case BUILT_IN_ATOMIC_FETCH_OR_2:
6875 case BUILT_IN_ATOMIC_FETCH_OR_4:
6876 case BUILT_IN_ATOMIC_FETCH_OR_8:
6877 case BUILT_IN_ATOMIC_FETCH_OR_16:
6878 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6879 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6880 ignore, BUILT_IN_NONE);
6881 if (target)
6882 return target;
6883 break;
6885 case BUILT_IN_ATOMIC_TEST_AND_SET:
6886 return expand_builtin_atomic_test_and_set (exp, target);
6888 case BUILT_IN_ATOMIC_CLEAR:
6889 return expand_builtin_atomic_clear (exp);
6891 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6892 return expand_builtin_atomic_always_lock_free (exp);
6894 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6895 target = expand_builtin_atomic_is_lock_free (exp);
6896 if (target)
6897 return target;
6898 break;
6900 case BUILT_IN_ATOMIC_THREAD_FENCE:
6901 expand_builtin_atomic_thread_fence (exp);
6902 return const0_rtx;
6904 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6905 expand_builtin_atomic_signal_fence (exp);
6906 return const0_rtx;
6908 case BUILT_IN_OBJECT_SIZE:
6909 return expand_builtin_object_size (exp);
6911 case BUILT_IN_MEMCPY_CHK:
6912 case BUILT_IN_MEMPCPY_CHK:
6913 case BUILT_IN_MEMMOVE_CHK:
6914 case BUILT_IN_MEMSET_CHK:
6915 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6916 if (target)
6917 return target;
6918 break;
6920 case BUILT_IN_STRCPY_CHK:
6921 case BUILT_IN_STPCPY_CHK:
6922 case BUILT_IN_STRNCPY_CHK:
6923 case BUILT_IN_STPNCPY_CHK:
6924 case BUILT_IN_STRCAT_CHK:
6925 case BUILT_IN_STRNCAT_CHK:
6926 case BUILT_IN_SNPRINTF_CHK:
6927 case BUILT_IN_VSNPRINTF_CHK:
6928 maybe_emit_chk_warning (exp, fcode);
6929 break;
6931 case BUILT_IN_SPRINTF_CHK:
6932 case BUILT_IN_VSPRINTF_CHK:
6933 maybe_emit_sprintf_chk_warning (exp, fcode);
6934 break;
6936 case BUILT_IN_FREE:
6937 if (warn_free_nonheap_object)
6938 maybe_emit_free_warning (exp);
6939 break;
6941 case BUILT_IN_THREAD_POINTER:
6942 return expand_builtin_thread_pointer (exp, target);
6944 case BUILT_IN_SET_THREAD_POINTER:
6945 expand_builtin_set_thread_pointer (exp);
6946 return const0_rtx;
6948 case BUILT_IN_CILK_DETACH:
6949 expand_builtin_cilk_detach (exp);
6950 return const0_rtx;
6952 case BUILT_IN_CILK_POP_FRAME:
6953 expand_builtin_cilk_pop_frame (exp);
6954 return const0_rtx;
6956 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6957 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6958 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6959 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6960 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6961 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6962 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6963 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6964 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6965 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6966 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6967 /* We allow user CHKP builtins if Pointer Bounds
6968 Checker is off. */
6969 if (!chkp_function_instrumented_p (current_function_decl))
6971 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
6972 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6973 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
6974 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
6975 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
6976 return expand_normal (CALL_EXPR_ARG (exp, 0));
6977 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
6978 return expand_normal (size_zero_node);
6979 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
6980 return expand_normal (size_int (-1));
6981 else
6982 return const0_rtx;
6984 /* FALLTHROUGH */
6986 case BUILT_IN_CHKP_BNDMK:
6987 case BUILT_IN_CHKP_BNDSTX:
6988 case BUILT_IN_CHKP_BNDCL:
6989 case BUILT_IN_CHKP_BNDCU:
6990 case BUILT_IN_CHKP_BNDLDX:
6991 case BUILT_IN_CHKP_BNDRET:
6992 case BUILT_IN_CHKP_INTERSECT:
6993 case BUILT_IN_CHKP_NARROW:
6994 case BUILT_IN_CHKP_EXTRACT_LOWER:
6995 case BUILT_IN_CHKP_EXTRACT_UPPER:
6996 /* Software implementation of Pointer Bounds Checker is NYI.
6997 Target support is required. */
6998 error ("Your target platform does not support -fcheck-pointer-bounds");
6999 break;
7001 case BUILT_IN_ACC_ON_DEVICE:
7002 /* Do library call, if we failed to expand the builtin when
7003 folding. */
7004 break;
7006 default: /* just do library call, if unknown builtin */
7007 break;
7010 /* The switch statement above can drop through to cause the function
7011 to be called normally. */
7012 return expand_call (exp, target, ignore);
7015 /* Similar to expand_builtin but is used for instrumented calls. */
7018 expand_builtin_with_bounds (tree exp, rtx target,
7019 rtx subtarget ATTRIBUTE_UNUSED,
7020 machine_mode mode, int ignore)
7022 tree fndecl = get_callee_fndecl (exp);
7023 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7025 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7027 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7028 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7030 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7031 && fcode < END_CHKP_BUILTINS);
7033 switch (fcode)
7035 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7036 target = expand_builtin_memcpy_with_bounds (exp, target);
7037 if (target)
7038 return target;
7039 break;
7041 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7042 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7043 if (target)
7044 return target;
7045 break;
7047 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7048 target = expand_builtin_memset_with_bounds (exp, target, mode);
7049 if (target)
7050 return target;
7051 break;
7053 default:
7054 break;
7057 /* The switch statement above can drop through to cause the function
7058 to be called normally. */
7059 return expand_call (exp, target, ignore);
7062 /* Determine whether a tree node represents a call to a built-in
7063 function. If the tree T is a call to a built-in function with
7064 the right number of arguments of the appropriate types, return
7065 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7066 Otherwise the return value is END_BUILTINS. */
7068 enum built_in_function
7069 builtin_mathfn_code (const_tree t)
7071 const_tree fndecl, arg, parmlist;
7072 const_tree argtype, parmtype;
7073 const_call_expr_arg_iterator iter;
7075 if (TREE_CODE (t) != CALL_EXPR
7076 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7077 return END_BUILTINS;
7079 fndecl = get_callee_fndecl (t);
7080 if (fndecl == NULL_TREE
7081 || TREE_CODE (fndecl) != FUNCTION_DECL
7082 || ! DECL_BUILT_IN (fndecl)
7083 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7084 return END_BUILTINS;
7086 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7087 init_const_call_expr_arg_iterator (t, &iter);
7088 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7090 /* If a function doesn't take a variable number of arguments,
7091 the last element in the list will have type `void'. */
7092 parmtype = TREE_VALUE (parmlist);
7093 if (VOID_TYPE_P (parmtype))
7095 if (more_const_call_expr_args_p (&iter))
7096 return END_BUILTINS;
7097 return DECL_FUNCTION_CODE (fndecl);
7100 if (! more_const_call_expr_args_p (&iter))
7101 return END_BUILTINS;
7103 arg = next_const_call_expr_arg (&iter);
7104 argtype = TREE_TYPE (arg);
7106 if (SCALAR_FLOAT_TYPE_P (parmtype))
7108 if (! SCALAR_FLOAT_TYPE_P (argtype))
7109 return END_BUILTINS;
7111 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7113 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7114 return END_BUILTINS;
7116 else if (POINTER_TYPE_P (parmtype))
7118 if (! POINTER_TYPE_P (argtype))
7119 return END_BUILTINS;
7121 else if (INTEGRAL_TYPE_P (parmtype))
7123 if (! INTEGRAL_TYPE_P (argtype))
7124 return END_BUILTINS;
7126 else
7127 return END_BUILTINS;
7130 /* Variable-length argument list. */
7131 return DECL_FUNCTION_CODE (fndecl);
7134 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7135 evaluate to a constant. */
7137 static tree
7138 fold_builtin_constant_p (tree arg)
7140 /* We return 1 for a numeric type that's known to be a constant
7141 value at compile-time or for an aggregate type that's a
7142 literal constant. */
7143 STRIP_NOPS (arg);
7145 /* If we know this is a constant, emit the constant of one. */
7146 if (CONSTANT_CLASS_P (arg)
7147 || (TREE_CODE (arg) == CONSTRUCTOR
7148 && TREE_CONSTANT (arg)))
7149 return integer_one_node;
7150 if (TREE_CODE (arg) == ADDR_EXPR)
7152 tree op = TREE_OPERAND (arg, 0);
7153 if (TREE_CODE (op) == STRING_CST
7154 || (TREE_CODE (op) == ARRAY_REF
7155 && integer_zerop (TREE_OPERAND (op, 1))
7156 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7157 return integer_one_node;
7160 /* If this expression has side effects, show we don't know it to be a
7161 constant. Likewise if it's a pointer or aggregate type since in
7162 those case we only want literals, since those are only optimized
7163 when generating RTL, not later.
7164 And finally, if we are compiling an initializer, not code, we
7165 need to return a definite result now; there's not going to be any
7166 more optimization done. */
7167 if (TREE_SIDE_EFFECTS (arg)
7168 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7169 || POINTER_TYPE_P (TREE_TYPE (arg))
7170 || cfun == 0
7171 || folding_initializer
7172 || force_folding_builtin_constant_p)
7173 return integer_zero_node;
7175 return NULL_TREE;
7178 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7179 return it as a truthvalue. */
7181 static tree
7182 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7183 tree predictor)
7185 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7187 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7188 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7189 ret_type = TREE_TYPE (TREE_TYPE (fn));
7190 pred_type = TREE_VALUE (arg_types);
7191 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7193 pred = fold_convert_loc (loc, pred_type, pred);
7194 expected = fold_convert_loc (loc, expected_type, expected);
7195 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7196 predictor);
7198 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7199 build_int_cst (ret_type, 0));
7202 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7203 NULL_TREE if no simplification is possible. */
7205 tree
7206 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7208 tree inner, fndecl, inner_arg0;
7209 enum tree_code code;
7211 /* Distribute the expected value over short-circuiting operators.
7212 See through the cast from truthvalue_type_node to long. */
7213 inner_arg0 = arg0;
7214 while (CONVERT_EXPR_P (inner_arg0)
7215 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7216 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7217 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7219 /* If this is a builtin_expect within a builtin_expect keep the
7220 inner one. See through a comparison against a constant. It
7221 might have been added to create a thruthvalue. */
7222 inner = inner_arg0;
7224 if (COMPARISON_CLASS_P (inner)
7225 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7226 inner = TREE_OPERAND (inner, 0);
7228 if (TREE_CODE (inner) == CALL_EXPR
7229 && (fndecl = get_callee_fndecl (inner))
7230 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7231 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7232 return arg0;
7234 inner = inner_arg0;
7235 code = TREE_CODE (inner);
7236 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7238 tree op0 = TREE_OPERAND (inner, 0);
7239 tree op1 = TREE_OPERAND (inner, 1);
7241 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7242 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7243 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7245 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7248 /* If the argument isn't invariant then there's nothing else we can do. */
7249 if (!TREE_CONSTANT (inner_arg0))
7250 return NULL_TREE;
7252 /* If we expect that a comparison against the argument will fold to
7253 a constant return the constant. In practice, this means a true
7254 constant or the address of a non-weak symbol. */
7255 inner = inner_arg0;
7256 STRIP_NOPS (inner);
7257 if (TREE_CODE (inner) == ADDR_EXPR)
7261 inner = TREE_OPERAND (inner, 0);
7263 while (TREE_CODE (inner) == COMPONENT_REF
7264 || TREE_CODE (inner) == ARRAY_REF);
7265 if ((TREE_CODE (inner) == VAR_DECL
7266 || TREE_CODE (inner) == FUNCTION_DECL)
7267 && DECL_WEAK (inner))
7268 return NULL_TREE;
7271 /* Otherwise, ARG0 already has the proper type for the return value. */
7272 return arg0;
7275 /* Fold a call to __builtin_classify_type with argument ARG. */
7277 static tree
7278 fold_builtin_classify_type (tree arg)
7280 if (arg == 0)
7281 return build_int_cst (integer_type_node, no_type_class);
7283 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7286 /* Fold a call to __builtin_strlen with argument ARG. */
7288 static tree
7289 fold_builtin_strlen (location_t loc, tree type, tree arg)
7291 if (!validate_arg (arg, POINTER_TYPE))
7292 return NULL_TREE;
7293 else
7295 tree len = c_strlen (arg, 0);
7297 if (len)
7298 return fold_convert_loc (loc, type, len);
7300 return NULL_TREE;
7304 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7306 static tree
7307 fold_builtin_inf (location_t loc, tree type, int warn)
7309 REAL_VALUE_TYPE real;
7311 /* __builtin_inff is intended to be usable to define INFINITY on all
7312 targets. If an infinity is not available, INFINITY expands "to a
7313 positive constant of type float that overflows at translation
7314 time", footnote "In this case, using INFINITY will violate the
7315 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7316 Thus we pedwarn to ensure this constraint violation is
7317 diagnosed. */
7318 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7319 pedwarn (loc, 0, "target format does not support infinity");
7321 real_inf (&real);
7322 return build_real (type, real);
7325 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7326 NULL_TREE if no simplification can be made. */
7328 static tree
7329 fold_builtin_sincos (location_t loc,
7330 tree arg0, tree arg1, tree arg2)
7332 tree type;
7333 tree fndecl, call = NULL_TREE;
7335 if (!validate_arg (arg0, REAL_TYPE)
7336 || !validate_arg (arg1, POINTER_TYPE)
7337 || !validate_arg (arg2, POINTER_TYPE))
7338 return NULL_TREE;
7340 type = TREE_TYPE (arg0);
7342 /* Calculate the result when the argument is a constant. */
7343 built_in_function fn = mathfn_built_in_2 (type, BUILT_IN_CEXPI);
7344 if (fn == END_BUILTINS)
7345 return NULL_TREE;
7347 /* Canonicalize sincos to cexpi. */
7348 if (TREE_CODE (arg0) == REAL_CST)
7350 tree complex_type = build_complex_type (type);
7351 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
7353 if (!call)
7355 if (!targetm.libc_has_function (function_c99_math_complex)
7356 || !builtin_decl_implicit_p (fn))
7357 return NULL_TREE;
7358 fndecl = builtin_decl_explicit (fn);
7359 call = build_call_expr_loc (loc, fndecl, 1, arg0);
7360 call = builtin_save_expr (call);
7363 return build2 (COMPOUND_EXPR, void_type_node,
7364 build2 (MODIFY_EXPR, void_type_node,
7365 build_fold_indirect_ref_loc (loc, arg1),
7366 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
7367 build2 (MODIFY_EXPR, void_type_node,
7368 build_fold_indirect_ref_loc (loc, arg2),
7369 fold_build1_loc (loc, REALPART_EXPR, type, call)));
7372 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
7373 arguments to the call, and TYPE is its return type.
7374 Return NULL_TREE if no simplification can be made. */
7376 static tree
7377 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
7379 if (!validate_arg (arg1, POINTER_TYPE)
7380 || !validate_arg (arg2, INTEGER_TYPE)
7381 || !validate_arg (len, INTEGER_TYPE))
7382 return NULL_TREE;
7383 else
7385 const char *p1;
7387 if (TREE_CODE (arg2) != INTEGER_CST
7388 || !tree_fits_uhwi_p (len))
7389 return NULL_TREE;
7391 p1 = c_getstr (arg1);
7392 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
7394 char c;
7395 const char *r;
7396 tree tem;
7398 if (target_char_cast (arg2, &c))
7399 return NULL_TREE;
7401 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
7403 if (r == NULL)
7404 return build_int_cst (TREE_TYPE (arg1), 0);
7406 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
7407 return fold_convert_loc (loc, type, tem);
7409 return NULL_TREE;
7413 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7414 Return NULL_TREE if no simplification can be made. */
7416 static tree
7417 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
7419 if (!validate_arg (arg1, POINTER_TYPE)
7420 || !validate_arg (arg2, POINTER_TYPE)
7421 || !validate_arg (len, INTEGER_TYPE))
7422 return NULL_TREE;
7424 /* If the LEN parameter is zero, return zero. */
7425 if (integer_zerop (len))
7426 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7427 arg1, arg2);
7429 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7430 if (operand_equal_p (arg1, arg2, 0))
7431 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7433 /* If len parameter is one, return an expression corresponding to
7434 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7435 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7437 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7438 tree cst_uchar_ptr_node
7439 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7441 tree ind1
7442 = fold_convert_loc (loc, integer_type_node,
7443 build1 (INDIRECT_REF, cst_uchar_node,
7444 fold_convert_loc (loc,
7445 cst_uchar_ptr_node,
7446 arg1)));
7447 tree ind2
7448 = fold_convert_loc (loc, integer_type_node,
7449 build1 (INDIRECT_REF, cst_uchar_node,
7450 fold_convert_loc (loc,
7451 cst_uchar_ptr_node,
7452 arg2)));
7453 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7456 return NULL_TREE;
7459 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
7460 Return NULL_TREE if no simplification can be made. */
7462 static tree
7463 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
7465 if (!validate_arg (arg1, POINTER_TYPE)
7466 || !validate_arg (arg2, POINTER_TYPE))
7467 return NULL_TREE;
7469 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7470 if (operand_equal_p (arg1, arg2, 0))
7471 return integer_zero_node;
7473 /* If the second arg is "", return *(const unsigned char*)arg1. */
7474 const char *p2 = c_getstr (arg2);
7475 if (p2 && *p2 == '\0')
7477 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7478 tree cst_uchar_ptr_node
7479 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7481 return fold_convert_loc (loc, integer_type_node,
7482 build1 (INDIRECT_REF, cst_uchar_node,
7483 fold_convert_loc (loc,
7484 cst_uchar_ptr_node,
7485 arg1)));
7488 /* If the first arg is "", return -*(const unsigned char*)arg2. */
7489 const char *p1 = c_getstr (arg1);
7490 if (p1 && *p1 == '\0')
7492 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7493 tree cst_uchar_ptr_node
7494 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7496 tree temp
7497 = fold_convert_loc (loc, integer_type_node,
7498 build1 (INDIRECT_REF, cst_uchar_node,
7499 fold_convert_loc (loc,
7500 cst_uchar_ptr_node,
7501 arg2)));
7502 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
7505 return NULL_TREE;
7508 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
7509 Return NULL_TREE if no simplification can be made. */
7511 static tree
7512 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
7514 if (!validate_arg (arg1, POINTER_TYPE)
7515 || !validate_arg (arg2, POINTER_TYPE)
7516 || !validate_arg (len, INTEGER_TYPE))
7517 return NULL_TREE;
7519 /* If the LEN parameter is zero, return zero. */
7520 if (integer_zerop (len))
7521 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7522 arg1, arg2);
7524 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7525 if (operand_equal_p (arg1, arg2, 0))
7526 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7528 /* If the second arg is "", and the length is greater than zero,
7529 return *(const unsigned char*)arg1. */
7530 const char *p2 = c_getstr (arg2);
7531 if (p2 && *p2 == '\0'
7532 && TREE_CODE (len) == INTEGER_CST
7533 && tree_int_cst_sgn (len) == 1)
7535 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7536 tree cst_uchar_ptr_node
7537 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7539 return fold_convert_loc (loc, integer_type_node,
7540 build1 (INDIRECT_REF, cst_uchar_node,
7541 fold_convert_loc (loc,
7542 cst_uchar_ptr_node,
7543 arg1)));
7546 /* If the first arg is "", and the length is greater than zero,
7547 return -*(const unsigned char*)arg2. */
7548 const char *p1 = c_getstr (arg1);
7549 if (p1 && *p1 == '\0'
7550 && TREE_CODE (len) == INTEGER_CST
7551 && tree_int_cst_sgn (len) == 1)
7553 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7554 tree cst_uchar_ptr_node
7555 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7557 tree temp = fold_convert_loc (loc, integer_type_node,
7558 build1 (INDIRECT_REF, cst_uchar_node,
7559 fold_convert_loc (loc,
7560 cst_uchar_ptr_node,
7561 arg2)));
7562 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
7565 /* If len parameter is one, return an expression corresponding to
7566 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7567 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7569 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7570 tree cst_uchar_ptr_node
7571 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7573 tree ind1 = fold_convert_loc (loc, integer_type_node,
7574 build1 (INDIRECT_REF, cst_uchar_node,
7575 fold_convert_loc (loc,
7576 cst_uchar_ptr_node,
7577 arg1)));
7578 tree ind2 = fold_convert_loc (loc, integer_type_node,
7579 build1 (INDIRECT_REF, cst_uchar_node,
7580 fold_convert_loc (loc,
7581 cst_uchar_ptr_node,
7582 arg2)));
7583 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7586 return NULL_TREE;
7589 /* Fold a call to builtin isascii with argument ARG. */
7591 static tree
7592 fold_builtin_isascii (location_t loc, tree arg)
7594 if (!validate_arg (arg, INTEGER_TYPE))
7595 return NULL_TREE;
7596 else
7598 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
7599 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
7600 build_int_cst (integer_type_node,
7601 ~ (unsigned HOST_WIDE_INT) 0x7f));
7602 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
7603 arg, integer_zero_node);
7607 /* Fold a call to builtin toascii with argument ARG. */
7609 static tree
7610 fold_builtin_toascii (location_t loc, tree arg)
7612 if (!validate_arg (arg, INTEGER_TYPE))
7613 return NULL_TREE;
7615 /* Transform toascii(c) -> (c & 0x7f). */
7616 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
7617 build_int_cst (integer_type_node, 0x7f));
7620 /* Fold a call to builtin isdigit with argument ARG. */
7622 static tree
7623 fold_builtin_isdigit (location_t loc, tree arg)
7625 if (!validate_arg (arg, INTEGER_TYPE))
7626 return NULL_TREE;
7627 else
7629 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
7630 /* According to the C standard, isdigit is unaffected by locale.
7631 However, it definitely is affected by the target character set. */
7632 unsigned HOST_WIDE_INT target_digit0
7633 = lang_hooks.to_target_charset ('0');
7635 if (target_digit0 == 0)
7636 return NULL_TREE;
7638 arg = fold_convert_loc (loc, unsigned_type_node, arg);
7639 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
7640 build_int_cst (unsigned_type_node, target_digit0));
7641 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
7642 build_int_cst (unsigned_type_node, 9));
7646 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
7648 static tree
7649 fold_builtin_fabs (location_t loc, tree arg, tree type)
7651 if (!validate_arg (arg, REAL_TYPE))
7652 return NULL_TREE;
7654 arg = fold_convert_loc (loc, type, arg);
7655 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7658 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
7660 static tree
7661 fold_builtin_abs (location_t loc, tree arg, tree type)
7663 if (!validate_arg (arg, INTEGER_TYPE))
7664 return NULL_TREE;
7666 arg = fold_convert_loc (loc, type, arg);
7667 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7670 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
7672 static tree
7673 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
7675 /* ??? Only expand to FMA_EXPR if it's directly supported. */
7676 if (validate_arg (arg0, REAL_TYPE)
7677 && validate_arg (arg1, REAL_TYPE)
7678 && validate_arg (arg2, REAL_TYPE)
7679 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
7680 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
7682 return NULL_TREE;
7685 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
7687 static tree
7688 fold_builtin_carg (location_t loc, tree arg, tree type)
7690 if (validate_arg (arg, COMPLEX_TYPE)
7691 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7693 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
7695 if (atan2_fn)
7697 tree new_arg = builtin_save_expr (arg);
7698 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
7699 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
7700 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
7704 return NULL_TREE;
7707 /* Fold a call to builtin frexp, we can assume the base is 2. */
7709 static tree
7710 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
7712 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7713 return NULL_TREE;
7715 STRIP_NOPS (arg0);
7717 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
7718 return NULL_TREE;
7720 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7722 /* Proceed if a valid pointer type was passed in. */
7723 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
7725 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
7726 tree frac, exp;
7728 switch (value->cl)
7730 case rvc_zero:
7731 /* For +-0, return (*exp = 0, +-0). */
7732 exp = integer_zero_node;
7733 frac = arg0;
7734 break;
7735 case rvc_nan:
7736 case rvc_inf:
7737 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
7738 return omit_one_operand_loc (loc, rettype, arg0, arg1);
7739 case rvc_normal:
7741 /* Since the frexp function always expects base 2, and in
7742 GCC normalized significands are already in the range
7743 [0.5, 1.0), we have exactly what frexp wants. */
7744 REAL_VALUE_TYPE frac_rvt = *value;
7745 SET_REAL_EXP (&frac_rvt, 0);
7746 frac = build_real (rettype, frac_rvt);
7747 exp = build_int_cst (integer_type_node, REAL_EXP (value));
7749 break;
7750 default:
7751 gcc_unreachable ();
7754 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7755 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
7756 TREE_SIDE_EFFECTS (arg1) = 1;
7757 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
7760 return NULL_TREE;
7763 /* Fold a call to builtin modf. */
7765 static tree
7766 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
7768 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7769 return NULL_TREE;
7771 STRIP_NOPS (arg0);
7773 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
7774 return NULL_TREE;
7776 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7778 /* Proceed if a valid pointer type was passed in. */
7779 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
7781 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
7782 REAL_VALUE_TYPE trunc, frac;
7784 switch (value->cl)
7786 case rvc_nan:
7787 case rvc_zero:
7788 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
7789 trunc = frac = *value;
7790 break;
7791 case rvc_inf:
7792 /* For +-Inf, return (*arg1 = arg0, +-0). */
7793 frac = dconst0;
7794 frac.sign = value->sign;
7795 trunc = *value;
7796 break;
7797 case rvc_normal:
7798 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
7799 real_trunc (&trunc, VOIDmode, value);
7800 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
7801 /* If the original number was negative and already
7802 integral, then the fractional part is -0.0. */
7803 if (value->sign && frac.cl == rvc_zero)
7804 frac.sign = value->sign;
7805 break;
7808 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7809 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
7810 build_real (rettype, trunc));
7811 TREE_SIDE_EFFECTS (arg1) = 1;
7812 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
7813 build_real (rettype, frac));
7816 return NULL_TREE;
7819 /* Given a location LOC, an interclass builtin function decl FNDECL
7820 and its single argument ARG, return an folded expression computing
7821 the same, or NULL_TREE if we either couldn't or didn't want to fold
7822 (the latter happen if there's an RTL instruction available). */
7824 static tree
7825 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
7827 machine_mode mode;
7829 if (!validate_arg (arg, REAL_TYPE))
7830 return NULL_TREE;
7832 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
7833 return NULL_TREE;
7835 mode = TYPE_MODE (TREE_TYPE (arg));
7837 /* If there is no optab, try generic code. */
7838 switch (DECL_FUNCTION_CODE (fndecl))
7840 tree result;
7842 CASE_FLT_FN (BUILT_IN_ISINF):
7844 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
7845 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
7846 tree const type = TREE_TYPE (arg);
7847 REAL_VALUE_TYPE r;
7848 char buf[128];
7850 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7851 real_from_string (&r, buf);
7852 result = build_call_expr (isgr_fn, 2,
7853 fold_build1_loc (loc, ABS_EXPR, type, arg),
7854 build_real (type, r));
7855 return result;
7857 CASE_FLT_FN (BUILT_IN_FINITE):
7858 case BUILT_IN_ISFINITE:
7860 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
7861 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7862 tree const type = TREE_TYPE (arg);
7863 REAL_VALUE_TYPE r;
7864 char buf[128];
7866 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7867 real_from_string (&r, buf);
7868 result = build_call_expr (isle_fn, 2,
7869 fold_build1_loc (loc, ABS_EXPR, type, arg),
7870 build_real (type, r));
7871 /*result = fold_build2_loc (loc, UNGT_EXPR,
7872 TREE_TYPE (TREE_TYPE (fndecl)),
7873 fold_build1_loc (loc, ABS_EXPR, type, arg),
7874 build_real (type, r));
7875 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
7876 TREE_TYPE (TREE_TYPE (fndecl)),
7877 result);*/
7878 return result;
7880 case BUILT_IN_ISNORMAL:
7882 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
7883 islessequal(fabs(x),DBL_MAX). */
7884 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7885 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
7886 tree const type = TREE_TYPE (arg);
7887 REAL_VALUE_TYPE rmax, rmin;
7888 char buf[128];
7890 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7891 real_from_string (&rmax, buf);
7892 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
7893 real_from_string (&rmin, buf);
7894 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
7895 result = build_call_expr (isle_fn, 2, arg,
7896 build_real (type, rmax));
7897 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
7898 build_call_expr (isge_fn, 2, arg,
7899 build_real (type, rmin)));
7900 return result;
7902 default:
7903 break;
7906 return NULL_TREE;
7909 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
7910 ARG is the argument for the call. */
7912 static tree
7913 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
7915 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7917 if (!validate_arg (arg, REAL_TYPE))
7918 return NULL_TREE;
7920 switch (builtin_index)
7922 case BUILT_IN_ISINF:
7923 if (!HONOR_INFINITIES (arg))
7924 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
7926 return NULL_TREE;
7928 case BUILT_IN_ISINF_SIGN:
7930 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
7931 /* In a boolean context, GCC will fold the inner COND_EXPR to
7932 1. So e.g. "if (isinf_sign(x))" would be folded to just
7933 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
7934 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
7935 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
7936 tree tmp = NULL_TREE;
7938 arg = builtin_save_expr (arg);
7940 if (signbit_fn && isinf_fn)
7942 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
7943 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
7945 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
7946 signbit_call, integer_zero_node);
7947 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
7948 isinf_call, integer_zero_node);
7950 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
7951 integer_minus_one_node, integer_one_node);
7952 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
7953 isinf_call, tmp,
7954 integer_zero_node);
7957 return tmp;
7960 case BUILT_IN_ISFINITE:
7961 if (!HONOR_NANS (arg)
7962 && !HONOR_INFINITIES (arg))
7963 return omit_one_operand_loc (loc, type, integer_one_node, arg);
7965 return NULL_TREE;
7967 case BUILT_IN_ISNAN:
7968 if (!HONOR_NANS (arg))
7969 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
7971 arg = builtin_save_expr (arg);
7972 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
7974 default:
7975 gcc_unreachable ();
7979 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
7980 This builtin will generate code to return the appropriate floating
7981 point classification depending on the value of the floating point
7982 number passed in. The possible return values must be supplied as
7983 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
7984 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
7985 one floating point argument which is "type generic". */
7987 static tree
7988 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
7990 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
7991 arg, type, res, tmp;
7992 machine_mode mode;
7993 REAL_VALUE_TYPE r;
7994 char buf[128];
7996 /* Verify the required arguments in the original call. */
7997 if (nargs != 6
7998 || !validate_arg (args[0], INTEGER_TYPE)
7999 || !validate_arg (args[1], INTEGER_TYPE)
8000 || !validate_arg (args[2], INTEGER_TYPE)
8001 || !validate_arg (args[3], INTEGER_TYPE)
8002 || !validate_arg (args[4], INTEGER_TYPE)
8003 || !validate_arg (args[5], REAL_TYPE))
8004 return NULL_TREE;
8006 fp_nan = args[0];
8007 fp_infinite = args[1];
8008 fp_normal = args[2];
8009 fp_subnormal = args[3];
8010 fp_zero = args[4];
8011 arg = args[5];
8012 type = TREE_TYPE (arg);
8013 mode = TYPE_MODE (type);
8014 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8016 /* fpclassify(x) ->
8017 isnan(x) ? FP_NAN :
8018 (fabs(x) == Inf ? FP_INFINITE :
8019 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8020 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8022 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8023 build_real (type, dconst0));
8024 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8025 tmp, fp_zero, fp_subnormal);
8027 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8028 real_from_string (&r, buf);
8029 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8030 arg, build_real (type, r));
8031 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8033 if (HONOR_INFINITIES (mode))
8035 real_inf (&r);
8036 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8037 build_real (type, r));
8038 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8039 fp_infinite, res);
8042 if (HONOR_NANS (mode))
8044 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8045 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8048 return res;
8051 /* Fold a call to an unordered comparison function such as
8052 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8053 being called and ARG0 and ARG1 are the arguments for the call.
8054 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8055 the opposite of the desired result. UNORDERED_CODE is used
8056 for modes that can hold NaNs and ORDERED_CODE is used for
8057 the rest. */
8059 static tree
8060 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8061 enum tree_code unordered_code,
8062 enum tree_code ordered_code)
8064 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8065 enum tree_code code;
8066 tree type0, type1;
8067 enum tree_code code0, code1;
8068 tree cmp_type = NULL_TREE;
8070 type0 = TREE_TYPE (arg0);
8071 type1 = TREE_TYPE (arg1);
8073 code0 = TREE_CODE (type0);
8074 code1 = TREE_CODE (type1);
8076 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8077 /* Choose the wider of two real types. */
8078 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8079 ? type0 : type1;
8080 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8081 cmp_type = type0;
8082 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8083 cmp_type = type1;
8085 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8086 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8088 if (unordered_code == UNORDERED_EXPR)
8090 if (!HONOR_NANS (arg0))
8091 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8092 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8095 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8096 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8097 fold_build2_loc (loc, code, type, arg0, arg1));
8100 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8101 arithmetics if it can never overflow, or into internal functions that
8102 return both result of arithmetics and overflowed boolean flag in
8103 a complex integer result, or some other check for overflow. */
8105 static tree
8106 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8107 tree arg0, tree arg1, tree arg2)
8109 enum internal_fn ifn = IFN_LAST;
8110 tree type = TREE_TYPE (TREE_TYPE (arg2));
8111 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8112 switch (fcode)
8114 case BUILT_IN_ADD_OVERFLOW:
8115 case BUILT_IN_SADD_OVERFLOW:
8116 case BUILT_IN_SADDL_OVERFLOW:
8117 case BUILT_IN_SADDLL_OVERFLOW:
8118 case BUILT_IN_UADD_OVERFLOW:
8119 case BUILT_IN_UADDL_OVERFLOW:
8120 case BUILT_IN_UADDLL_OVERFLOW:
8121 ifn = IFN_ADD_OVERFLOW;
8122 break;
8123 case BUILT_IN_SUB_OVERFLOW:
8124 case BUILT_IN_SSUB_OVERFLOW:
8125 case BUILT_IN_SSUBL_OVERFLOW:
8126 case BUILT_IN_SSUBLL_OVERFLOW:
8127 case BUILT_IN_USUB_OVERFLOW:
8128 case BUILT_IN_USUBL_OVERFLOW:
8129 case BUILT_IN_USUBLL_OVERFLOW:
8130 ifn = IFN_SUB_OVERFLOW;
8131 break;
8132 case BUILT_IN_MUL_OVERFLOW:
8133 case BUILT_IN_SMUL_OVERFLOW:
8134 case BUILT_IN_SMULL_OVERFLOW:
8135 case BUILT_IN_SMULLL_OVERFLOW:
8136 case BUILT_IN_UMUL_OVERFLOW:
8137 case BUILT_IN_UMULL_OVERFLOW:
8138 case BUILT_IN_UMULLL_OVERFLOW:
8139 ifn = IFN_MUL_OVERFLOW;
8140 break;
8141 default:
8142 gcc_unreachable ();
8144 tree ctype = build_complex_type (type);
8145 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8146 2, arg0, arg1);
8147 tree tgt = save_expr (call);
8148 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8149 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8150 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8151 tree store
8152 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8153 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8156 /* Fold a call to built-in function FNDECL with 0 arguments.
8157 This function returns NULL_TREE if no simplification was possible. */
8159 static tree
8160 fold_builtin_0 (location_t loc, tree fndecl)
8162 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8163 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8164 switch (fcode)
8166 CASE_FLT_FN (BUILT_IN_INF):
8167 case BUILT_IN_INFD32:
8168 case BUILT_IN_INFD64:
8169 case BUILT_IN_INFD128:
8170 return fold_builtin_inf (loc, type, true);
8172 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8173 return fold_builtin_inf (loc, type, false);
8175 case BUILT_IN_CLASSIFY_TYPE:
8176 return fold_builtin_classify_type (NULL_TREE);
8178 default:
8179 break;
8181 return NULL_TREE;
8184 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8185 This function returns NULL_TREE if no simplification was possible. */
8187 static tree
8188 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8190 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8191 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8193 if (TREE_CODE (arg0) == ERROR_MARK)
8194 return NULL_TREE;
8196 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
8197 return ret;
8199 switch (fcode)
8201 case BUILT_IN_CONSTANT_P:
8203 tree val = fold_builtin_constant_p (arg0);
8205 /* Gimplification will pull the CALL_EXPR for the builtin out of
8206 an if condition. When not optimizing, we'll not CSE it back.
8207 To avoid link error types of regressions, return false now. */
8208 if (!val && !optimize)
8209 val = integer_zero_node;
8211 return val;
8214 case BUILT_IN_CLASSIFY_TYPE:
8215 return fold_builtin_classify_type (arg0);
8217 case BUILT_IN_STRLEN:
8218 return fold_builtin_strlen (loc, type, arg0);
8220 CASE_FLT_FN (BUILT_IN_FABS):
8221 case BUILT_IN_FABSD32:
8222 case BUILT_IN_FABSD64:
8223 case BUILT_IN_FABSD128:
8224 return fold_builtin_fabs (loc, arg0, type);
8226 case BUILT_IN_ABS:
8227 case BUILT_IN_LABS:
8228 case BUILT_IN_LLABS:
8229 case BUILT_IN_IMAXABS:
8230 return fold_builtin_abs (loc, arg0, type);
8232 CASE_FLT_FN (BUILT_IN_CONJ):
8233 if (validate_arg (arg0, COMPLEX_TYPE)
8234 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8235 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8236 break;
8238 CASE_FLT_FN (BUILT_IN_CREAL):
8239 if (validate_arg (arg0, COMPLEX_TYPE)
8240 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8241 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8242 break;
8244 CASE_FLT_FN (BUILT_IN_CIMAG):
8245 if (validate_arg (arg0, COMPLEX_TYPE)
8246 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8247 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8248 break;
8250 CASE_FLT_FN (BUILT_IN_CARG):
8251 return fold_builtin_carg (loc, arg0, type);
8253 case BUILT_IN_ISASCII:
8254 return fold_builtin_isascii (loc, arg0);
8256 case BUILT_IN_TOASCII:
8257 return fold_builtin_toascii (loc, arg0);
8259 case BUILT_IN_ISDIGIT:
8260 return fold_builtin_isdigit (loc, arg0);
8262 CASE_FLT_FN (BUILT_IN_FINITE):
8263 case BUILT_IN_FINITED32:
8264 case BUILT_IN_FINITED64:
8265 case BUILT_IN_FINITED128:
8266 case BUILT_IN_ISFINITE:
8268 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
8269 if (ret)
8270 return ret;
8271 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8274 CASE_FLT_FN (BUILT_IN_ISINF):
8275 case BUILT_IN_ISINFD32:
8276 case BUILT_IN_ISINFD64:
8277 case BUILT_IN_ISINFD128:
8279 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
8280 if (ret)
8281 return ret;
8282 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8285 case BUILT_IN_ISNORMAL:
8286 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8288 case BUILT_IN_ISINF_SIGN:
8289 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
8291 CASE_FLT_FN (BUILT_IN_ISNAN):
8292 case BUILT_IN_ISNAND32:
8293 case BUILT_IN_ISNAND64:
8294 case BUILT_IN_ISNAND128:
8295 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
8297 case BUILT_IN_FREE:
8298 if (integer_zerop (arg0))
8299 return build_empty_stmt (loc);
8300 break;
8302 default:
8303 break;
8306 return NULL_TREE;
8310 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8311 This function returns NULL_TREE if no simplification was possible. */
8313 static tree
8314 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
8316 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8317 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8319 if (TREE_CODE (arg0) == ERROR_MARK
8320 || TREE_CODE (arg1) == ERROR_MARK)
8321 return NULL_TREE;
8323 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
8324 return ret;
8326 switch (fcode)
8328 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
8329 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
8330 if (validate_arg (arg0, REAL_TYPE)
8331 && validate_arg (arg1, POINTER_TYPE))
8332 return do_mpfr_lgamma_r (arg0, arg1, type);
8333 break;
8335 CASE_FLT_FN (BUILT_IN_FREXP):
8336 return fold_builtin_frexp (loc, arg0, arg1, type);
8338 CASE_FLT_FN (BUILT_IN_MODF):
8339 return fold_builtin_modf (loc, arg0, arg1, type);
8341 case BUILT_IN_STRSTR:
8342 return fold_builtin_strstr (loc, arg0, arg1, type);
8344 case BUILT_IN_STRSPN:
8345 return fold_builtin_strspn (loc, arg0, arg1);
8347 case BUILT_IN_STRCSPN:
8348 return fold_builtin_strcspn (loc, arg0, arg1);
8350 case BUILT_IN_STRCHR:
8351 case BUILT_IN_INDEX:
8352 return fold_builtin_strchr (loc, arg0, arg1, type);
8354 case BUILT_IN_STRRCHR:
8355 case BUILT_IN_RINDEX:
8356 return fold_builtin_strrchr (loc, arg0, arg1, type);
8358 case BUILT_IN_STRCMP:
8359 return fold_builtin_strcmp (loc, arg0, arg1);
8361 case BUILT_IN_STRPBRK:
8362 return fold_builtin_strpbrk (loc, arg0, arg1, type);
8364 case BUILT_IN_EXPECT:
8365 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
8367 case BUILT_IN_ISGREATER:
8368 return fold_builtin_unordered_cmp (loc, fndecl,
8369 arg0, arg1, UNLE_EXPR, LE_EXPR);
8370 case BUILT_IN_ISGREATEREQUAL:
8371 return fold_builtin_unordered_cmp (loc, fndecl,
8372 arg0, arg1, UNLT_EXPR, LT_EXPR);
8373 case BUILT_IN_ISLESS:
8374 return fold_builtin_unordered_cmp (loc, fndecl,
8375 arg0, arg1, UNGE_EXPR, GE_EXPR);
8376 case BUILT_IN_ISLESSEQUAL:
8377 return fold_builtin_unordered_cmp (loc, fndecl,
8378 arg0, arg1, UNGT_EXPR, GT_EXPR);
8379 case BUILT_IN_ISLESSGREATER:
8380 return fold_builtin_unordered_cmp (loc, fndecl,
8381 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
8382 case BUILT_IN_ISUNORDERED:
8383 return fold_builtin_unordered_cmp (loc, fndecl,
8384 arg0, arg1, UNORDERED_EXPR,
8385 NOP_EXPR);
8387 /* We do the folding for va_start in the expander. */
8388 case BUILT_IN_VA_START:
8389 break;
8391 case BUILT_IN_OBJECT_SIZE:
8392 return fold_builtin_object_size (arg0, arg1);
8394 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8395 return fold_builtin_atomic_always_lock_free (arg0, arg1);
8397 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8398 return fold_builtin_atomic_is_lock_free (arg0, arg1);
8400 default:
8401 break;
8403 return NULL_TREE;
8406 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
8407 and ARG2.
8408 This function returns NULL_TREE if no simplification was possible. */
8410 static tree
8411 fold_builtin_3 (location_t loc, tree fndecl,
8412 tree arg0, tree arg1, tree arg2)
8414 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8415 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8417 if (TREE_CODE (arg0) == ERROR_MARK
8418 || TREE_CODE (arg1) == ERROR_MARK
8419 || TREE_CODE (arg2) == ERROR_MARK)
8420 return NULL_TREE;
8422 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
8423 arg0, arg1, arg2))
8424 return ret;
8426 switch (fcode)
8429 CASE_FLT_FN (BUILT_IN_SINCOS):
8430 return fold_builtin_sincos (loc, arg0, arg1, arg2);
8432 CASE_FLT_FN (BUILT_IN_FMA):
8433 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
8435 CASE_FLT_FN (BUILT_IN_REMQUO):
8436 if (validate_arg (arg0, REAL_TYPE)
8437 && validate_arg (arg1, REAL_TYPE)
8438 && validate_arg (arg2, POINTER_TYPE))
8439 return do_mpfr_remquo (arg0, arg1, arg2);
8440 break;
8442 case BUILT_IN_STRNCMP:
8443 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
8445 case BUILT_IN_MEMCHR:
8446 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
8448 case BUILT_IN_BCMP:
8449 case BUILT_IN_MEMCMP:
8450 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
8452 case BUILT_IN_EXPECT:
8453 return fold_builtin_expect (loc, arg0, arg1, arg2);
8455 case BUILT_IN_ADD_OVERFLOW:
8456 case BUILT_IN_SUB_OVERFLOW:
8457 case BUILT_IN_MUL_OVERFLOW:
8458 case BUILT_IN_SADD_OVERFLOW:
8459 case BUILT_IN_SADDL_OVERFLOW:
8460 case BUILT_IN_SADDLL_OVERFLOW:
8461 case BUILT_IN_SSUB_OVERFLOW:
8462 case BUILT_IN_SSUBL_OVERFLOW:
8463 case BUILT_IN_SSUBLL_OVERFLOW:
8464 case BUILT_IN_SMUL_OVERFLOW:
8465 case BUILT_IN_SMULL_OVERFLOW:
8466 case BUILT_IN_SMULLL_OVERFLOW:
8467 case BUILT_IN_UADD_OVERFLOW:
8468 case BUILT_IN_UADDL_OVERFLOW:
8469 case BUILT_IN_UADDLL_OVERFLOW:
8470 case BUILT_IN_USUB_OVERFLOW:
8471 case BUILT_IN_USUBL_OVERFLOW:
8472 case BUILT_IN_USUBLL_OVERFLOW:
8473 case BUILT_IN_UMUL_OVERFLOW:
8474 case BUILT_IN_UMULL_OVERFLOW:
8475 case BUILT_IN_UMULLL_OVERFLOW:
8476 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
8478 default:
8479 break;
8481 return NULL_TREE;
8484 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
8485 arguments. IGNORE is true if the result of the
8486 function call is ignored. This function returns NULL_TREE if no
8487 simplification was possible. */
8489 tree
8490 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
8492 tree ret = NULL_TREE;
8494 switch (nargs)
8496 case 0:
8497 ret = fold_builtin_0 (loc, fndecl);
8498 break;
8499 case 1:
8500 ret = fold_builtin_1 (loc, fndecl, args[0]);
8501 break;
8502 case 2:
8503 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
8504 break;
8505 case 3:
8506 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
8507 break;
8508 default:
8509 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
8510 break;
8512 if (ret)
8514 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
8515 SET_EXPR_LOCATION (ret, loc);
8516 TREE_NO_WARNING (ret) = 1;
8517 return ret;
8519 return NULL_TREE;
8522 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
8523 list ARGS along with N new arguments in NEWARGS. SKIP is the number
8524 of arguments in ARGS to be omitted. OLDNARGS is the number of
8525 elements in ARGS. */
8527 static tree
8528 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
8529 int skip, tree fndecl, int n, va_list newargs)
8531 int nargs = oldnargs - skip + n;
8532 tree *buffer;
8534 if (n > 0)
8536 int i, j;
8538 buffer = XALLOCAVEC (tree, nargs);
8539 for (i = 0; i < n; i++)
8540 buffer[i] = va_arg (newargs, tree);
8541 for (j = skip; j < oldnargs; j++, i++)
8542 buffer[i] = args[j];
8544 else
8545 buffer = args + skip;
8547 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
8550 /* Return true if FNDECL shouldn't be folded right now.
8551 If a built-in function has an inline attribute always_inline
8552 wrapper, defer folding it after always_inline functions have
8553 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
8554 might not be performed. */
8556 bool
8557 avoid_folding_inline_builtin (tree fndecl)
8559 return (DECL_DECLARED_INLINE_P (fndecl)
8560 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
8561 && cfun
8562 && !cfun->always_inline_functions_inlined
8563 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
8566 /* A wrapper function for builtin folding that prevents warnings for
8567 "statement without effect" and the like, caused by removing the
8568 call node earlier than the warning is generated. */
8570 tree
8571 fold_call_expr (location_t loc, tree exp, bool ignore)
8573 tree ret = NULL_TREE;
8574 tree fndecl = get_callee_fndecl (exp);
8575 if (fndecl
8576 && TREE_CODE (fndecl) == FUNCTION_DECL
8577 && DECL_BUILT_IN (fndecl)
8578 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
8579 yet. Defer folding until we see all the arguments
8580 (after inlining). */
8581 && !CALL_EXPR_VA_ARG_PACK (exp))
8583 int nargs = call_expr_nargs (exp);
8585 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
8586 instead last argument is __builtin_va_arg_pack (). Defer folding
8587 even in that case, until arguments are finalized. */
8588 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
8590 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
8591 if (fndecl2
8592 && TREE_CODE (fndecl2) == FUNCTION_DECL
8593 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8594 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8595 return NULL_TREE;
8598 if (avoid_folding_inline_builtin (fndecl))
8599 return NULL_TREE;
8601 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8602 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
8603 CALL_EXPR_ARGP (exp), ignore);
8604 else
8606 tree *args = CALL_EXPR_ARGP (exp);
8607 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
8608 if (ret)
8609 return ret;
8612 return NULL_TREE;
8615 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
8616 N arguments are passed in the array ARGARRAY. Return a folded
8617 expression or NULL_TREE if no simplification was possible. */
8619 tree
8620 fold_builtin_call_array (location_t loc, tree,
8621 tree fn,
8622 int n,
8623 tree *argarray)
8625 if (TREE_CODE (fn) != ADDR_EXPR)
8626 return NULL_TREE;
8628 tree fndecl = TREE_OPERAND (fn, 0);
8629 if (TREE_CODE (fndecl) == FUNCTION_DECL
8630 && DECL_BUILT_IN (fndecl))
8632 /* If last argument is __builtin_va_arg_pack (), arguments to this
8633 function are not finalized yet. Defer folding until they are. */
8634 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
8636 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
8637 if (fndecl2
8638 && TREE_CODE (fndecl2) == FUNCTION_DECL
8639 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8640 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8641 return NULL_TREE;
8643 if (avoid_folding_inline_builtin (fndecl))
8644 return NULL_TREE;
8645 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8646 return targetm.fold_builtin (fndecl, n, argarray, false);
8647 else
8648 return fold_builtin_n (loc, fndecl, argarray, n, false);
8651 return NULL_TREE;
8654 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
8655 along with N new arguments specified as the "..." parameters. SKIP
8656 is the number of arguments in EXP to be omitted. This function is used
8657 to do varargs-to-varargs transformations. */
8659 static tree
8660 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
8662 va_list ap;
8663 tree t;
8665 va_start (ap, n);
8666 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
8667 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
8668 va_end (ap);
8670 return t;
8673 /* Validate a single argument ARG against a tree code CODE representing
8674 a type. */
8676 static bool
8677 validate_arg (const_tree arg, enum tree_code code)
8679 if (!arg)
8680 return false;
8681 else if (code == POINTER_TYPE)
8682 return POINTER_TYPE_P (TREE_TYPE (arg));
8683 else if (code == INTEGER_TYPE)
8684 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
8685 return code == TREE_CODE (TREE_TYPE (arg));
8688 /* This function validates the types of a function call argument list
8689 against a specified list of tree_codes. If the last specifier is a 0,
8690 that represents an ellipses, otherwise the last specifier must be a
8691 VOID_TYPE.
8693 This is the GIMPLE version of validate_arglist. Eventually we want to
8694 completely convert builtins.c to work from GIMPLEs and the tree based
8695 validate_arglist will then be removed. */
8697 bool
8698 validate_gimple_arglist (const gcall *call, ...)
8700 enum tree_code code;
8701 bool res = 0;
8702 va_list ap;
8703 const_tree arg;
8704 size_t i;
8706 va_start (ap, call);
8707 i = 0;
8711 code = (enum tree_code) va_arg (ap, int);
8712 switch (code)
8714 case 0:
8715 /* This signifies an ellipses, any further arguments are all ok. */
8716 res = true;
8717 goto end;
8718 case VOID_TYPE:
8719 /* This signifies an endlink, if no arguments remain, return
8720 true, otherwise return false. */
8721 res = (i == gimple_call_num_args (call));
8722 goto end;
8723 default:
8724 /* If no parameters remain or the parameter's code does not
8725 match the specified code, return false. Otherwise continue
8726 checking any remaining arguments. */
8727 arg = gimple_call_arg (call, i++);
8728 if (!validate_arg (arg, code))
8729 goto end;
8730 break;
8733 while (1);
8735 /* We need gotos here since we can only have one VA_CLOSE in a
8736 function. */
8737 end: ;
8738 va_end (ap);
8740 return res;
8743 /* Default target-specific builtin expander that does nothing. */
8746 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
8747 rtx target ATTRIBUTE_UNUSED,
8748 rtx subtarget ATTRIBUTE_UNUSED,
8749 machine_mode mode ATTRIBUTE_UNUSED,
8750 int ignore ATTRIBUTE_UNUSED)
8752 return NULL_RTX;
8755 /* Returns true is EXP represents data that would potentially reside
8756 in a readonly section. */
8758 bool
8759 readonly_data_expr (tree exp)
8761 STRIP_NOPS (exp);
8763 if (TREE_CODE (exp) != ADDR_EXPR)
8764 return false;
8766 exp = get_base_address (TREE_OPERAND (exp, 0));
8767 if (!exp)
8768 return false;
8770 /* Make sure we call decl_readonly_section only for trees it
8771 can handle (since it returns true for everything it doesn't
8772 understand). */
8773 if (TREE_CODE (exp) == STRING_CST
8774 || TREE_CODE (exp) == CONSTRUCTOR
8775 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
8776 return decl_readonly_section (exp, 0);
8777 else
8778 return false;
8781 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
8782 to the call, and TYPE is its return type.
8784 Return NULL_TREE if no simplification was possible, otherwise return the
8785 simplified form of the call as a tree.
8787 The simplified form may be a constant or other expression which
8788 computes the same value, but in a more efficient manner (including
8789 calls to other builtin functions).
8791 The call may contain arguments which need to be evaluated, but
8792 which are not useful to determine the result of the call. In
8793 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8794 COMPOUND_EXPR will be an argument which must be evaluated.
8795 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8796 COMPOUND_EXPR in the chain will contain the tree for the simplified
8797 form of the builtin function call. */
8799 static tree
8800 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
8802 if (!validate_arg (s1, POINTER_TYPE)
8803 || !validate_arg (s2, POINTER_TYPE))
8804 return NULL_TREE;
8805 else
8807 tree fn;
8808 const char *p1, *p2;
8810 p2 = c_getstr (s2);
8811 if (p2 == NULL)
8812 return NULL_TREE;
8814 p1 = c_getstr (s1);
8815 if (p1 != NULL)
8817 const char *r = strstr (p1, p2);
8818 tree tem;
8820 if (r == NULL)
8821 return build_int_cst (TREE_TYPE (s1), 0);
8823 /* Return an offset into the constant string argument. */
8824 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8825 return fold_convert_loc (loc, type, tem);
8828 /* The argument is const char *, and the result is char *, so we need
8829 a type conversion here to avoid a warning. */
8830 if (p2[0] == '\0')
8831 return fold_convert_loc (loc, type, s1);
8833 if (p2[1] != '\0')
8834 return NULL_TREE;
8836 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
8837 if (!fn)
8838 return NULL_TREE;
8840 /* New argument list transforming strstr(s1, s2) to
8841 strchr(s1, s2[0]). */
8842 return build_call_expr_loc (loc, fn, 2, s1,
8843 build_int_cst (integer_type_node, p2[0]));
8847 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
8848 the call, and TYPE is its return type.
8850 Return NULL_TREE if no simplification was possible, otherwise return the
8851 simplified form of the call as a tree.
8853 The simplified form may be a constant or other expression which
8854 computes the same value, but in a more efficient manner (including
8855 calls to other builtin functions).
8857 The call may contain arguments which need to be evaluated, but
8858 which are not useful to determine the result of the call. In
8859 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8860 COMPOUND_EXPR will be an argument which must be evaluated.
8861 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8862 COMPOUND_EXPR in the chain will contain the tree for the simplified
8863 form of the builtin function call. */
8865 static tree
8866 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
8868 if (!validate_arg (s1, POINTER_TYPE)
8869 || !validate_arg (s2, INTEGER_TYPE))
8870 return NULL_TREE;
8871 else
8873 const char *p1;
8875 if (TREE_CODE (s2) != INTEGER_CST)
8876 return NULL_TREE;
8878 p1 = c_getstr (s1);
8879 if (p1 != NULL)
8881 char c;
8882 const char *r;
8883 tree tem;
8885 if (target_char_cast (s2, &c))
8886 return NULL_TREE;
8888 r = strchr (p1, c);
8890 if (r == NULL)
8891 return build_int_cst (TREE_TYPE (s1), 0);
8893 /* Return an offset into the constant string argument. */
8894 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8895 return fold_convert_loc (loc, type, tem);
8897 return NULL_TREE;
8901 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
8902 the call, and TYPE is its return type.
8904 Return NULL_TREE if no simplification was possible, otherwise return the
8905 simplified form of the call as a tree.
8907 The simplified form may be a constant or other expression which
8908 computes the same value, but in a more efficient manner (including
8909 calls to other builtin functions).
8911 The call may contain arguments which need to be evaluated, but
8912 which are not useful to determine the result of the call. In
8913 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8914 COMPOUND_EXPR will be an argument which must be evaluated.
8915 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8916 COMPOUND_EXPR in the chain will contain the tree for the simplified
8917 form of the builtin function call. */
8919 static tree
8920 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
8922 if (!validate_arg (s1, POINTER_TYPE)
8923 || !validate_arg (s2, INTEGER_TYPE))
8924 return NULL_TREE;
8925 else
8927 tree fn;
8928 const char *p1;
8930 if (TREE_CODE (s2) != INTEGER_CST)
8931 return NULL_TREE;
8933 p1 = c_getstr (s1);
8934 if (p1 != NULL)
8936 char c;
8937 const char *r;
8938 tree tem;
8940 if (target_char_cast (s2, &c))
8941 return NULL_TREE;
8943 r = strrchr (p1, c);
8945 if (r == NULL)
8946 return build_int_cst (TREE_TYPE (s1), 0);
8948 /* Return an offset into the constant string argument. */
8949 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8950 return fold_convert_loc (loc, type, tem);
8953 if (! integer_zerop (s2))
8954 return NULL_TREE;
8956 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
8957 if (!fn)
8958 return NULL_TREE;
8960 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
8961 return build_call_expr_loc (loc, fn, 2, s1, s2);
8965 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
8966 to the call, and TYPE is its return type.
8968 Return NULL_TREE if no simplification was possible, otherwise return the
8969 simplified form of the call as a tree.
8971 The simplified form may be a constant or other expression which
8972 computes the same value, but in a more efficient manner (including
8973 calls to other builtin functions).
8975 The call may contain arguments which need to be evaluated, but
8976 which are not useful to determine the result of the call. In
8977 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8978 COMPOUND_EXPR will be an argument which must be evaluated.
8979 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8980 COMPOUND_EXPR in the chain will contain the tree for the simplified
8981 form of the builtin function call. */
8983 static tree
8984 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
8986 if (!validate_arg (s1, POINTER_TYPE)
8987 || !validate_arg (s2, POINTER_TYPE))
8988 return NULL_TREE;
8989 else
8991 tree fn;
8992 const char *p1, *p2;
8994 p2 = c_getstr (s2);
8995 if (p2 == NULL)
8996 return NULL_TREE;
8998 p1 = c_getstr (s1);
8999 if (p1 != NULL)
9001 const char *r = strpbrk (p1, p2);
9002 tree tem;
9004 if (r == NULL)
9005 return build_int_cst (TREE_TYPE (s1), 0);
9007 /* Return an offset into the constant string argument. */
9008 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9009 return fold_convert_loc (loc, type, tem);
9012 if (p2[0] == '\0')
9013 /* strpbrk(x, "") == NULL.
9014 Evaluate and ignore s1 in case it had side-effects. */
9015 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
9017 if (p2[1] != '\0')
9018 return NULL_TREE; /* Really call strpbrk. */
9020 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9021 if (!fn)
9022 return NULL_TREE;
9024 /* New argument list transforming strpbrk(s1, s2) to
9025 strchr(s1, s2[0]). */
9026 return build_call_expr_loc (loc, fn, 2, s1,
9027 build_int_cst (integer_type_node, p2[0]));
9031 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9032 to the call.
9034 Return NULL_TREE if no simplification was possible, otherwise return the
9035 simplified form of the call as a tree.
9037 The simplified form may be a constant or other expression which
9038 computes the same value, but in a more efficient manner (including
9039 calls to other builtin functions).
9041 The call may contain arguments which need to be evaluated, but
9042 which are not useful to determine the result of the call. In
9043 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9044 COMPOUND_EXPR will be an argument which must be evaluated.
9045 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9046 COMPOUND_EXPR in the chain will contain the tree for the simplified
9047 form of the builtin function call. */
9049 static tree
9050 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9052 if (!validate_arg (s1, POINTER_TYPE)
9053 || !validate_arg (s2, POINTER_TYPE))
9054 return NULL_TREE;
9055 else
9057 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9059 /* If either argument is "", return NULL_TREE. */
9060 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9061 /* Evaluate and ignore both arguments in case either one has
9062 side-effects. */
9063 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9064 s1, s2);
9065 return NULL_TREE;
9069 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9070 to the call.
9072 Return NULL_TREE if no simplification was possible, otherwise return the
9073 simplified form of the call as a tree.
9075 The simplified form may be a constant or other expression which
9076 computes the same value, but in a more efficient manner (including
9077 calls to other builtin functions).
9079 The call may contain arguments which need to be evaluated, but
9080 which are not useful to determine the result of the call. In
9081 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9082 COMPOUND_EXPR will be an argument which must be evaluated.
9083 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9084 COMPOUND_EXPR in the chain will contain the tree for the simplified
9085 form of the builtin function call. */
9087 static tree
9088 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9090 if (!validate_arg (s1, POINTER_TYPE)
9091 || !validate_arg (s2, POINTER_TYPE))
9092 return NULL_TREE;
9093 else
9095 /* If the first argument is "", return NULL_TREE. */
9096 const char *p1 = c_getstr (s1);
9097 if (p1 && *p1 == '\0')
9099 /* Evaluate and ignore argument s2 in case it has
9100 side-effects. */
9101 return omit_one_operand_loc (loc, size_type_node,
9102 size_zero_node, s2);
9105 /* If the second argument is "", return __builtin_strlen(s1). */
9106 const char *p2 = c_getstr (s2);
9107 if (p2 && *p2 == '\0')
9109 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9111 /* If the replacement _DECL isn't initialized, don't do the
9112 transformation. */
9113 if (!fn)
9114 return NULL_TREE;
9116 return build_call_expr_loc (loc, fn, 1, s1);
9118 return NULL_TREE;
9122 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9123 produced. False otherwise. This is done so that we don't output the error
9124 or warning twice or three times. */
9126 bool
9127 fold_builtin_next_arg (tree exp, bool va_start_p)
9129 tree fntype = TREE_TYPE (current_function_decl);
9130 int nargs = call_expr_nargs (exp);
9131 tree arg;
9132 /* There is good chance the current input_location points inside the
9133 definition of the va_start macro (perhaps on the token for
9134 builtin) in a system header, so warnings will not be emitted.
9135 Use the location in real source code. */
9136 source_location current_location =
9137 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9138 NULL);
9140 if (!stdarg_p (fntype))
9142 error ("%<va_start%> used in function with fixed args");
9143 return true;
9146 if (va_start_p)
9148 if (va_start_p && (nargs != 2))
9150 error ("wrong number of arguments to function %<va_start%>");
9151 return true;
9153 arg = CALL_EXPR_ARG (exp, 1);
9155 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9156 when we checked the arguments and if needed issued a warning. */
9157 else
9159 if (nargs == 0)
9161 /* Evidently an out of date version of <stdarg.h>; can't validate
9162 va_start's second argument, but can still work as intended. */
9163 warning_at (current_location,
9164 OPT_Wvarargs,
9165 "%<__builtin_next_arg%> called without an argument");
9166 return true;
9168 else if (nargs > 1)
9170 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9171 return true;
9173 arg = CALL_EXPR_ARG (exp, 0);
9176 if (TREE_CODE (arg) == SSA_NAME)
9177 arg = SSA_NAME_VAR (arg);
9179 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9180 or __builtin_next_arg (0) the first time we see it, after checking
9181 the arguments and if needed issuing a warning. */
9182 if (!integer_zerop (arg))
9184 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9186 /* Strip off all nops for the sake of the comparison. This
9187 is not quite the same as STRIP_NOPS. It does more.
9188 We must also strip off INDIRECT_EXPR for C++ reference
9189 parameters. */
9190 while (CONVERT_EXPR_P (arg)
9191 || TREE_CODE (arg) == INDIRECT_REF)
9192 arg = TREE_OPERAND (arg, 0);
9193 if (arg != last_parm)
9195 /* FIXME: Sometimes with the tree optimizers we can get the
9196 not the last argument even though the user used the last
9197 argument. We just warn and set the arg to be the last
9198 argument so that we will get wrong-code because of
9199 it. */
9200 warning_at (current_location,
9201 OPT_Wvarargs,
9202 "second parameter of %<va_start%> not last named argument");
9205 /* Undefined by C99 7.15.1.4p4 (va_start):
9206 "If the parameter parmN is declared with the register storage
9207 class, with a function or array type, or with a type that is
9208 not compatible with the type that results after application of
9209 the default argument promotions, the behavior is undefined."
9211 else if (DECL_REGISTER (arg))
9213 warning_at (current_location,
9214 OPT_Wvarargs,
9215 "undefined behaviour when second parameter of "
9216 "%<va_start%> is declared with %<register%> storage");
9219 /* We want to verify the second parameter just once before the tree
9220 optimizers are run and then avoid keeping it in the tree,
9221 as otherwise we could warn even for correct code like:
9222 void foo (int i, ...)
9223 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9224 if (va_start_p)
9225 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9226 else
9227 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9229 return false;
9233 /* Expand a call EXP to __builtin_object_size. */
9235 static rtx
9236 expand_builtin_object_size (tree exp)
9238 tree ost;
9239 int object_size_type;
9240 tree fndecl = get_callee_fndecl (exp);
9242 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9244 error ("%Kfirst argument of %D must be a pointer, second integer constant",
9245 exp, fndecl);
9246 expand_builtin_trap ();
9247 return const0_rtx;
9250 ost = CALL_EXPR_ARG (exp, 1);
9251 STRIP_NOPS (ost);
9253 if (TREE_CODE (ost) != INTEGER_CST
9254 || tree_int_cst_sgn (ost) < 0
9255 || compare_tree_int (ost, 3) > 0)
9257 error ("%Klast argument of %D is not integer constant between 0 and 3",
9258 exp, fndecl);
9259 expand_builtin_trap ();
9260 return const0_rtx;
9263 object_size_type = tree_to_shwi (ost);
9265 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9268 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9269 FCODE is the BUILT_IN_* to use.
9270 Return NULL_RTX if we failed; the caller should emit a normal call,
9271 otherwise try to get the result in TARGET, if convenient (and in
9272 mode MODE if that's convenient). */
9274 static rtx
9275 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9276 enum built_in_function fcode)
9278 tree dest, src, len, size;
9280 if (!validate_arglist (exp,
9281 POINTER_TYPE,
9282 fcode == BUILT_IN_MEMSET_CHK
9283 ? INTEGER_TYPE : POINTER_TYPE,
9284 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9285 return NULL_RTX;
9287 dest = CALL_EXPR_ARG (exp, 0);
9288 src = CALL_EXPR_ARG (exp, 1);
9289 len = CALL_EXPR_ARG (exp, 2);
9290 size = CALL_EXPR_ARG (exp, 3);
9292 if (! tree_fits_uhwi_p (size))
9293 return NULL_RTX;
9295 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
9297 tree fn;
9299 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
9301 warning_at (tree_nonartificial_location (exp),
9302 0, "%Kcall to %D will always overflow destination buffer",
9303 exp, get_callee_fndecl (exp));
9304 return NULL_RTX;
9307 fn = NULL_TREE;
9308 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9309 mem{cpy,pcpy,move,set} is available. */
9310 switch (fcode)
9312 case BUILT_IN_MEMCPY_CHK:
9313 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
9314 break;
9315 case BUILT_IN_MEMPCPY_CHK:
9316 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
9317 break;
9318 case BUILT_IN_MEMMOVE_CHK:
9319 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
9320 break;
9321 case BUILT_IN_MEMSET_CHK:
9322 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
9323 break;
9324 default:
9325 break;
9328 if (! fn)
9329 return NULL_RTX;
9331 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
9332 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9333 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9334 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9336 else if (fcode == BUILT_IN_MEMSET_CHK)
9337 return NULL_RTX;
9338 else
9340 unsigned int dest_align = get_pointer_alignment (dest);
9342 /* If DEST is not a pointer type, call the normal function. */
9343 if (dest_align == 0)
9344 return NULL_RTX;
9346 /* If SRC and DEST are the same (and not volatile), do nothing. */
9347 if (operand_equal_p (src, dest, 0))
9349 tree expr;
9351 if (fcode != BUILT_IN_MEMPCPY_CHK)
9353 /* Evaluate and ignore LEN in case it has side-effects. */
9354 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9355 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9358 expr = fold_build_pointer_plus (dest, len);
9359 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9362 /* __memmove_chk special case. */
9363 if (fcode == BUILT_IN_MEMMOVE_CHK)
9365 unsigned int src_align = get_pointer_alignment (src);
9367 if (src_align == 0)
9368 return NULL_RTX;
9370 /* If src is categorized for a readonly section we can use
9371 normal __memcpy_chk. */
9372 if (readonly_data_expr (src))
9374 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
9375 if (!fn)
9376 return NULL_RTX;
9377 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9378 dest, src, len, size);
9379 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9380 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9381 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9384 return NULL_RTX;
9388 /* Emit warning if a buffer overflow is detected at compile time. */
9390 static void
9391 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9393 int is_strlen = 0;
9394 tree len, size;
9395 location_t loc = tree_nonartificial_location (exp);
9397 switch (fcode)
9399 case BUILT_IN_STRCPY_CHK:
9400 case BUILT_IN_STPCPY_CHK:
9401 /* For __strcat_chk the warning will be emitted only if overflowing
9402 by at least strlen (dest) + 1 bytes. */
9403 case BUILT_IN_STRCAT_CHK:
9404 len = CALL_EXPR_ARG (exp, 1);
9405 size = CALL_EXPR_ARG (exp, 2);
9406 is_strlen = 1;
9407 break;
9408 case BUILT_IN_STRNCAT_CHK:
9409 case BUILT_IN_STRNCPY_CHK:
9410 case BUILT_IN_STPNCPY_CHK:
9411 len = CALL_EXPR_ARG (exp, 2);
9412 size = CALL_EXPR_ARG (exp, 3);
9413 break;
9414 case BUILT_IN_SNPRINTF_CHK:
9415 case BUILT_IN_VSNPRINTF_CHK:
9416 len = CALL_EXPR_ARG (exp, 1);
9417 size = CALL_EXPR_ARG (exp, 3);
9418 break;
9419 default:
9420 gcc_unreachable ();
9423 if (!len || !size)
9424 return;
9426 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9427 return;
9429 if (is_strlen)
9431 len = c_strlen (len, 1);
9432 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
9433 return;
9435 else if (fcode == BUILT_IN_STRNCAT_CHK)
9437 tree src = CALL_EXPR_ARG (exp, 1);
9438 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
9439 return;
9440 src = c_strlen (src, 1);
9441 if (! src || ! tree_fits_uhwi_p (src))
9443 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
9444 exp, get_callee_fndecl (exp));
9445 return;
9447 else if (tree_int_cst_lt (src, size))
9448 return;
9450 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
9451 return;
9453 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
9454 exp, get_callee_fndecl (exp));
9457 /* Emit warning if a buffer overflow is detected at compile time
9458 in __sprintf_chk/__vsprintf_chk calls. */
9460 static void
9461 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
9463 tree size, len, fmt;
9464 const char *fmt_str;
9465 int nargs = call_expr_nargs (exp);
9467 /* Verify the required arguments in the original call. */
9469 if (nargs < 4)
9470 return;
9471 size = CALL_EXPR_ARG (exp, 2);
9472 fmt = CALL_EXPR_ARG (exp, 3);
9474 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9475 return;
9477 /* Check whether the format is a literal string constant. */
9478 fmt_str = c_getstr (fmt);
9479 if (fmt_str == NULL)
9480 return;
9482 if (!init_target_chars ())
9483 return;
9485 /* If the format doesn't contain % args or %%, we know its size. */
9486 if (strchr (fmt_str, target_percent) == 0)
9487 len = build_int_cstu (size_type_node, strlen (fmt_str));
9488 /* If the format is "%s" and first ... argument is a string literal,
9489 we know it too. */
9490 else if (fcode == BUILT_IN_SPRINTF_CHK
9491 && strcmp (fmt_str, target_percent_s) == 0)
9493 tree arg;
9495 if (nargs < 5)
9496 return;
9497 arg = CALL_EXPR_ARG (exp, 4);
9498 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
9499 return;
9501 len = c_strlen (arg, 1);
9502 if (!len || ! tree_fits_uhwi_p (len))
9503 return;
9505 else
9506 return;
9508 if (! tree_int_cst_lt (len, size))
9509 warning_at (tree_nonartificial_location (exp),
9510 0, "%Kcall to %D will always overflow destination buffer",
9511 exp, get_callee_fndecl (exp));
9514 /* Emit warning if a free is called with address of a variable. */
9516 static void
9517 maybe_emit_free_warning (tree exp)
9519 tree arg = CALL_EXPR_ARG (exp, 0);
9521 STRIP_NOPS (arg);
9522 if (TREE_CODE (arg) != ADDR_EXPR)
9523 return;
9525 arg = get_base_address (TREE_OPERAND (arg, 0));
9526 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
9527 return;
9529 if (SSA_VAR_P (arg))
9530 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9531 "%Kattempt to free a non-heap object %qD", exp, arg);
9532 else
9533 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9534 "%Kattempt to free a non-heap object", exp);
9537 /* Fold a call to __builtin_object_size with arguments PTR and OST,
9538 if possible. */
9540 static tree
9541 fold_builtin_object_size (tree ptr, tree ost)
9543 unsigned HOST_WIDE_INT bytes;
9544 int object_size_type;
9546 if (!validate_arg (ptr, POINTER_TYPE)
9547 || !validate_arg (ost, INTEGER_TYPE))
9548 return NULL_TREE;
9550 STRIP_NOPS (ost);
9552 if (TREE_CODE (ost) != INTEGER_CST
9553 || tree_int_cst_sgn (ost) < 0
9554 || compare_tree_int (ost, 3) > 0)
9555 return NULL_TREE;
9557 object_size_type = tree_to_shwi (ost);
9559 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
9560 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
9561 and (size_t) 0 for types 2 and 3. */
9562 if (TREE_SIDE_EFFECTS (ptr))
9563 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
9565 if (TREE_CODE (ptr) == ADDR_EXPR)
9567 bytes = compute_builtin_object_size (ptr, object_size_type);
9568 if (wi::fits_to_tree_p (bytes, size_type_node))
9569 return build_int_cstu (size_type_node, bytes);
9571 else if (TREE_CODE (ptr) == SSA_NAME)
9573 /* If object size is not known yet, delay folding until
9574 later. Maybe subsequent passes will help determining
9575 it. */
9576 bytes = compute_builtin_object_size (ptr, object_size_type);
9577 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
9578 && wi::fits_to_tree_p (bytes, size_type_node))
9579 return build_int_cstu (size_type_node, bytes);
9582 return NULL_TREE;
9585 /* Builtins with folding operations that operate on "..." arguments
9586 need special handling; we need to store the arguments in a convenient
9587 data structure before attempting any folding. Fortunately there are
9588 only a few builtins that fall into this category. FNDECL is the
9589 function, EXP is the CALL_EXPR for the call. */
9591 static tree
9592 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
9594 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9595 tree ret = NULL_TREE;
9597 switch (fcode)
9599 case BUILT_IN_FPCLASSIFY:
9600 ret = fold_builtin_fpclassify (loc, args, nargs);
9601 break;
9603 default:
9604 break;
9606 if (ret)
9608 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9609 SET_EXPR_LOCATION (ret, loc);
9610 TREE_NO_WARNING (ret) = 1;
9611 return ret;
9613 return NULL_TREE;
9616 /* Initialize format string characters in the target charset. */
9618 bool
9619 init_target_chars (void)
9621 static bool init;
9622 if (!init)
9624 target_newline = lang_hooks.to_target_charset ('\n');
9625 target_percent = lang_hooks.to_target_charset ('%');
9626 target_c = lang_hooks.to_target_charset ('c');
9627 target_s = lang_hooks.to_target_charset ('s');
9628 if (target_newline == 0 || target_percent == 0 || target_c == 0
9629 || target_s == 0)
9630 return false;
9632 target_percent_c[0] = target_percent;
9633 target_percent_c[1] = target_c;
9634 target_percent_c[2] = '\0';
9636 target_percent_s[0] = target_percent;
9637 target_percent_s[1] = target_s;
9638 target_percent_s[2] = '\0';
9640 target_percent_s_newline[0] = target_percent;
9641 target_percent_s_newline[1] = target_s;
9642 target_percent_s_newline[2] = target_newline;
9643 target_percent_s_newline[3] = '\0';
9645 init = true;
9647 return true;
9650 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
9651 and no overflow/underflow occurred. INEXACT is true if M was not
9652 exactly calculated. TYPE is the tree type for the result. This
9653 function assumes that you cleared the MPFR flags and then
9654 calculated M to see if anything subsequently set a flag prior to
9655 entering this function. Return NULL_TREE if any checks fail. */
9657 static tree
9658 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
9660 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9661 overflow/underflow occurred. If -frounding-math, proceed iff the
9662 result of calling FUNC was exact. */
9663 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
9664 && (!flag_rounding_math || !inexact))
9666 REAL_VALUE_TYPE rr;
9668 real_from_mpfr (&rr, m, type, GMP_RNDN);
9669 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
9670 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9671 but the mpft_t is not, then we underflowed in the
9672 conversion. */
9673 if (real_isfinite (&rr)
9674 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
9676 REAL_VALUE_TYPE rmode;
9678 real_convert (&rmode, TYPE_MODE (type), &rr);
9679 /* Proceed iff the specified mode can hold the value. */
9680 if (real_identical (&rmode, &rr))
9681 return build_real (type, rmode);
9684 return NULL_TREE;
9687 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
9688 number and no overflow/underflow occurred. INEXACT is true if M
9689 was not exactly calculated. TYPE is the tree type for the result.
9690 This function assumes that you cleared the MPFR flags and then
9691 calculated M to see if anything subsequently set a flag prior to
9692 entering this function. Return NULL_TREE if any checks fail, if
9693 FORCE_CONVERT is true, then bypass the checks. */
9695 static tree
9696 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
9698 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9699 overflow/underflow occurred. If -frounding-math, proceed iff the
9700 result of calling FUNC was exact. */
9701 if (force_convert
9702 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
9703 && !mpfr_overflow_p () && !mpfr_underflow_p ()
9704 && (!flag_rounding_math || !inexact)))
9706 REAL_VALUE_TYPE re, im;
9708 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
9709 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
9710 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
9711 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9712 but the mpft_t is not, then we underflowed in the
9713 conversion. */
9714 if (force_convert
9715 || (real_isfinite (&re) && real_isfinite (&im)
9716 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
9717 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
9719 REAL_VALUE_TYPE re_mode, im_mode;
9721 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
9722 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
9723 /* Proceed iff the specified mode can hold the value. */
9724 if (force_convert
9725 || (real_identical (&re_mode, &re)
9726 && real_identical (&im_mode, &im)))
9727 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
9728 build_real (TREE_TYPE (type), im_mode));
9731 return NULL_TREE;
9734 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
9735 the pointer *(ARG_QUO) and return the result. The type is taken
9736 from the type of ARG0 and is used for setting the precision of the
9737 calculation and results. */
9739 static tree
9740 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
9742 tree const type = TREE_TYPE (arg0);
9743 tree result = NULL_TREE;
9745 STRIP_NOPS (arg0);
9746 STRIP_NOPS (arg1);
9748 /* To proceed, MPFR must exactly represent the target floating point
9749 format, which only happens when the target base equals two. */
9750 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9751 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9752 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
9754 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
9755 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
9757 if (real_isfinite (ra0) && real_isfinite (ra1))
9759 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9760 const int prec = fmt->p;
9761 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9762 tree result_rem;
9763 long integer_quo;
9764 mpfr_t m0, m1;
9766 mpfr_inits2 (prec, m0, m1, NULL);
9767 mpfr_from_real (m0, ra0, GMP_RNDN);
9768 mpfr_from_real (m1, ra1, GMP_RNDN);
9769 mpfr_clear_flags ();
9770 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
9771 /* Remquo is independent of the rounding mode, so pass
9772 inexact=0 to do_mpfr_ckconv(). */
9773 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
9774 mpfr_clears (m0, m1, NULL);
9775 if (result_rem)
9777 /* MPFR calculates quo in the host's long so it may
9778 return more bits in quo than the target int can hold
9779 if sizeof(host long) > sizeof(target int). This can
9780 happen even for native compilers in LP64 mode. In
9781 these cases, modulo the quo value with the largest
9782 number that the target int can hold while leaving one
9783 bit for the sign. */
9784 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
9785 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
9787 /* Dereference the quo pointer argument. */
9788 arg_quo = build_fold_indirect_ref (arg_quo);
9789 /* Proceed iff a valid pointer type was passed in. */
9790 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
9792 /* Set the value. */
9793 tree result_quo
9794 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
9795 build_int_cst (TREE_TYPE (arg_quo),
9796 integer_quo));
9797 TREE_SIDE_EFFECTS (result_quo) = 1;
9798 /* Combine the quo assignment with the rem. */
9799 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9800 result_quo, result_rem));
9805 return result;
9808 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
9809 resulting value as a tree with type TYPE. The mpfr precision is
9810 set to the precision of TYPE. We assume that this mpfr function
9811 returns zero if the result could be calculated exactly within the
9812 requested precision. In addition, the integer pointer represented
9813 by ARG_SG will be dereferenced and set to the appropriate signgam
9814 (-1,1) value. */
9816 static tree
9817 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
9819 tree result = NULL_TREE;
9821 STRIP_NOPS (arg);
9823 /* To proceed, MPFR must exactly represent the target floating point
9824 format, which only happens when the target base equals two. Also
9825 verify ARG is a constant and that ARG_SG is an int pointer. */
9826 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9827 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
9828 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
9829 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
9831 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
9833 /* In addition to NaN and Inf, the argument cannot be zero or a
9834 negative integer. */
9835 if (real_isfinite (ra)
9836 && ra->cl != rvc_zero
9837 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
9839 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9840 const int prec = fmt->p;
9841 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9842 int inexact, sg;
9843 mpfr_t m;
9844 tree result_lg;
9846 mpfr_init2 (m, prec);
9847 mpfr_from_real (m, ra, GMP_RNDN);
9848 mpfr_clear_flags ();
9849 inexact = mpfr_lgamma (m, &sg, m, rnd);
9850 result_lg = do_mpfr_ckconv (m, type, inexact);
9851 mpfr_clear (m);
9852 if (result_lg)
9854 tree result_sg;
9856 /* Dereference the arg_sg pointer argument. */
9857 arg_sg = build_fold_indirect_ref (arg_sg);
9858 /* Assign the signgam value into *arg_sg. */
9859 result_sg = fold_build2 (MODIFY_EXPR,
9860 TREE_TYPE (arg_sg), arg_sg,
9861 build_int_cst (TREE_TYPE (arg_sg), sg));
9862 TREE_SIDE_EFFECTS (result_sg) = 1;
9863 /* Combine the signgam assignment with the lgamma result. */
9864 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9865 result_sg, result_lg));
9870 return result;
9873 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
9874 mpc function FUNC on it and return the resulting value as a tree
9875 with type TYPE. The mpfr precision is set to the precision of
9876 TYPE. We assume that function FUNC returns zero if the result
9877 could be calculated exactly within the requested precision. If
9878 DO_NONFINITE is true, then fold expressions containing Inf or NaN
9879 in the arguments and/or results. */
9881 tree
9882 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
9883 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
9885 tree result = NULL_TREE;
9887 STRIP_NOPS (arg0);
9888 STRIP_NOPS (arg1);
9890 /* To proceed, MPFR must exactly represent the target floating point
9891 format, which only happens when the target base equals two. */
9892 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
9893 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
9894 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
9895 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
9896 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
9898 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
9899 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
9900 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
9901 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
9903 if (do_nonfinite
9904 || (real_isfinite (re0) && real_isfinite (im0)
9905 && real_isfinite (re1) && real_isfinite (im1)))
9907 const struct real_format *const fmt =
9908 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
9909 const int prec = fmt->p;
9910 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
9911 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
9912 int inexact;
9913 mpc_t m0, m1;
9915 mpc_init2 (m0, prec);
9916 mpc_init2 (m1, prec);
9917 mpfr_from_real (mpc_realref (m0), re0, rnd);
9918 mpfr_from_real (mpc_imagref (m0), im0, rnd);
9919 mpfr_from_real (mpc_realref (m1), re1, rnd);
9920 mpfr_from_real (mpc_imagref (m1), im1, rnd);
9921 mpfr_clear_flags ();
9922 inexact = func (m0, m0, m1, crnd);
9923 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
9924 mpc_clear (m0);
9925 mpc_clear (m1);
9929 return result;
9932 /* A wrapper function for builtin folding that prevents warnings for
9933 "statement without effect" and the like, caused by removing the
9934 call node earlier than the warning is generated. */
9936 tree
9937 fold_call_stmt (gcall *stmt, bool ignore)
9939 tree ret = NULL_TREE;
9940 tree fndecl = gimple_call_fndecl (stmt);
9941 location_t loc = gimple_location (stmt);
9942 if (fndecl
9943 && TREE_CODE (fndecl) == FUNCTION_DECL
9944 && DECL_BUILT_IN (fndecl)
9945 && !gimple_call_va_arg_pack_p (stmt))
9947 int nargs = gimple_call_num_args (stmt);
9948 tree *args = (nargs > 0
9949 ? gimple_call_arg_ptr (stmt, 0)
9950 : &error_mark_node);
9952 if (avoid_folding_inline_builtin (fndecl))
9953 return NULL_TREE;
9954 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9956 return targetm.fold_builtin (fndecl, nargs, args, ignore);
9958 else
9960 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9961 if (ret)
9963 /* Propagate location information from original call to
9964 expansion of builtin. Otherwise things like
9965 maybe_emit_chk_warning, that operate on the expansion
9966 of a builtin, will use the wrong location information. */
9967 if (gimple_has_location (stmt))
9969 tree realret = ret;
9970 if (TREE_CODE (ret) == NOP_EXPR)
9971 realret = TREE_OPERAND (ret, 0);
9972 if (CAN_HAVE_LOCATION_P (realret)
9973 && !EXPR_HAS_LOCATION (realret))
9974 SET_EXPR_LOCATION (realret, loc);
9975 return realret;
9977 return ret;
9981 return NULL_TREE;
9984 /* Look up the function in builtin_decl that corresponds to DECL
9985 and set ASMSPEC as its user assembler name. DECL must be a
9986 function decl that declares a builtin. */
9988 void
9989 set_builtin_user_assembler_name (tree decl, const char *asmspec)
9991 tree builtin;
9992 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
9993 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
9994 && asmspec != 0);
9996 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
9997 set_user_assembler_name (builtin, asmspec);
9998 switch (DECL_FUNCTION_CODE (decl))
10000 case BUILT_IN_MEMCPY:
10001 init_block_move_fn (asmspec);
10002 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
10003 break;
10004 case BUILT_IN_MEMSET:
10005 init_block_clear_fn (asmspec);
10006 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
10007 break;
10008 case BUILT_IN_MEMMOVE:
10009 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
10010 break;
10011 case BUILT_IN_MEMCMP:
10012 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
10013 break;
10014 case BUILT_IN_ABORT:
10015 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
10016 break;
10017 case BUILT_IN_FFS:
10018 if (INT_TYPE_SIZE < BITS_PER_WORD)
10020 set_user_assembler_libfunc ("ffs", asmspec);
10021 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
10022 MODE_INT, 0), "ffs");
10024 break;
10025 default:
10026 break;
10030 /* Return true if DECL is a builtin that expands to a constant or similarly
10031 simple code. */
10032 bool
10033 is_simple_builtin (tree decl)
10035 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10036 switch (DECL_FUNCTION_CODE (decl))
10038 /* Builtins that expand to constants. */
10039 case BUILT_IN_CONSTANT_P:
10040 case BUILT_IN_EXPECT:
10041 case BUILT_IN_OBJECT_SIZE:
10042 case BUILT_IN_UNREACHABLE:
10043 /* Simple register moves or loads from stack. */
10044 case BUILT_IN_ASSUME_ALIGNED:
10045 case BUILT_IN_RETURN_ADDRESS:
10046 case BUILT_IN_EXTRACT_RETURN_ADDR:
10047 case BUILT_IN_FROB_RETURN_ADDR:
10048 case BUILT_IN_RETURN:
10049 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10050 case BUILT_IN_FRAME_ADDRESS:
10051 case BUILT_IN_VA_END:
10052 case BUILT_IN_STACK_SAVE:
10053 case BUILT_IN_STACK_RESTORE:
10054 /* Exception state returns or moves registers around. */
10055 case BUILT_IN_EH_FILTER:
10056 case BUILT_IN_EH_POINTER:
10057 case BUILT_IN_EH_COPY_VALUES:
10058 return true;
10060 default:
10061 return false;
10064 return false;
10067 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10068 most probably expanded inline into reasonably simple code. This is a
10069 superset of is_simple_builtin. */
10070 bool
10071 is_inexpensive_builtin (tree decl)
10073 if (!decl)
10074 return false;
10075 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10076 return true;
10077 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10078 switch (DECL_FUNCTION_CODE (decl))
10080 case BUILT_IN_ABS:
10081 case BUILT_IN_ALLOCA:
10082 case BUILT_IN_ALLOCA_WITH_ALIGN:
10083 case BUILT_IN_BSWAP16:
10084 case BUILT_IN_BSWAP32:
10085 case BUILT_IN_BSWAP64:
10086 case BUILT_IN_CLZ:
10087 case BUILT_IN_CLZIMAX:
10088 case BUILT_IN_CLZL:
10089 case BUILT_IN_CLZLL:
10090 case BUILT_IN_CTZ:
10091 case BUILT_IN_CTZIMAX:
10092 case BUILT_IN_CTZL:
10093 case BUILT_IN_CTZLL:
10094 case BUILT_IN_FFS:
10095 case BUILT_IN_FFSIMAX:
10096 case BUILT_IN_FFSL:
10097 case BUILT_IN_FFSLL:
10098 case BUILT_IN_IMAXABS:
10099 case BUILT_IN_FINITE:
10100 case BUILT_IN_FINITEF:
10101 case BUILT_IN_FINITEL:
10102 case BUILT_IN_FINITED32:
10103 case BUILT_IN_FINITED64:
10104 case BUILT_IN_FINITED128:
10105 case BUILT_IN_FPCLASSIFY:
10106 case BUILT_IN_ISFINITE:
10107 case BUILT_IN_ISINF_SIGN:
10108 case BUILT_IN_ISINF:
10109 case BUILT_IN_ISINFF:
10110 case BUILT_IN_ISINFL:
10111 case BUILT_IN_ISINFD32:
10112 case BUILT_IN_ISINFD64:
10113 case BUILT_IN_ISINFD128:
10114 case BUILT_IN_ISNAN:
10115 case BUILT_IN_ISNANF:
10116 case BUILT_IN_ISNANL:
10117 case BUILT_IN_ISNAND32:
10118 case BUILT_IN_ISNAND64:
10119 case BUILT_IN_ISNAND128:
10120 case BUILT_IN_ISNORMAL:
10121 case BUILT_IN_ISGREATER:
10122 case BUILT_IN_ISGREATEREQUAL:
10123 case BUILT_IN_ISLESS:
10124 case BUILT_IN_ISLESSEQUAL:
10125 case BUILT_IN_ISLESSGREATER:
10126 case BUILT_IN_ISUNORDERED:
10127 case BUILT_IN_VA_ARG_PACK:
10128 case BUILT_IN_VA_ARG_PACK_LEN:
10129 case BUILT_IN_VA_COPY:
10130 case BUILT_IN_TRAP:
10131 case BUILT_IN_SAVEREGS:
10132 case BUILT_IN_POPCOUNTL:
10133 case BUILT_IN_POPCOUNTLL:
10134 case BUILT_IN_POPCOUNTIMAX:
10135 case BUILT_IN_POPCOUNT:
10136 case BUILT_IN_PARITYL:
10137 case BUILT_IN_PARITYLL:
10138 case BUILT_IN_PARITYIMAX:
10139 case BUILT_IN_PARITY:
10140 case BUILT_IN_LABS:
10141 case BUILT_IN_LLABS:
10142 case BUILT_IN_PREFETCH:
10143 case BUILT_IN_ACC_ON_DEVICE:
10144 return true;
10146 default:
10147 return is_simple_builtin (decl);
10150 return false;