PR middle-end/66867
[official-gcc.git] / gcc / builtins.c
blob1465c60c98fada5b134593dcd41f2952dda966e2
1 /* Expand builtin functions.
2 Copyright (C) 1988-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "predict.h"
33 #include "tm_p.h"
34 #include "stringpool.h"
35 #include "tree-ssanames.h"
36 #include "expmed.h"
37 #include "optabs.h"
38 #include "emit-rtl.h"
39 #include "recog.h"
40 #include "diagnostic-core.h"
41 #include "alias.h"
42 #include "fold-const.h"
43 #include "fold-const-call.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "varasm.h"
47 #include "tree-object-size.h"
48 #include "realmpfr.h"
49 #include "cfgrtl.h"
50 #include "except.h"
51 #include "dojump.h"
52 #include "explow.h"
53 #include "stmt.h"
54 #include "expr.h"
55 #include "libfuncs.h"
56 #include "output.h"
57 #include "typeclass.h"
58 #include "langhooks.h"
59 #include "value-prof.h"
60 #include "builtins.h"
61 #include "asan.h"
62 #include "cilk.h"
63 #include "tree-chkp.h"
64 #include "rtl-chkp.h"
65 #include "internal-fn.h"
66 #include "case-cfn-macros.h"
67 #include "gimple-fold.h"
70 struct target_builtins default_target_builtins;
71 #if SWITCHABLE_TARGET
72 struct target_builtins *this_target_builtins = &default_target_builtins;
73 #endif
75 /* Define the names of the builtin function types and codes. */
76 const char *const built_in_class_names[BUILT_IN_LAST]
77 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
79 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
80 const char * built_in_names[(int) END_BUILTINS] =
82 #include "builtins.def"
85 /* Setup an array of builtin_info_type, make sure each element decl is
86 initialized to NULL_TREE. */
87 builtin_info_type builtin_info[(int)END_BUILTINS];
89 /* Non-zero if __builtin_constant_p should be folded right away. */
90 bool force_folding_builtin_constant_p;
92 static rtx c_readstr (const char *, machine_mode);
93 static int target_char_cast (tree, char *);
94 static rtx get_memory_rtx (tree, tree);
95 static int apply_args_size (void);
96 static int apply_result_size (void);
97 static rtx result_vector (int, rtx);
98 static void expand_builtin_prefetch (tree);
99 static rtx expand_builtin_apply_args (void);
100 static rtx expand_builtin_apply_args_1 (void);
101 static rtx expand_builtin_apply (rtx, rtx, rtx);
102 static void expand_builtin_return (rtx);
103 static enum type_class type_to_class (tree);
104 static rtx expand_builtin_classify_type (tree);
105 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
106 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
107 static rtx expand_builtin_interclass_mathfn (tree, rtx);
108 static rtx expand_builtin_sincos (tree);
109 static rtx expand_builtin_cexpi (tree, rtx);
110 static rtx expand_builtin_int_roundingfn (tree, rtx);
111 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
112 static rtx expand_builtin_next_arg (void);
113 static rtx expand_builtin_va_start (tree);
114 static rtx expand_builtin_va_end (tree);
115 static rtx expand_builtin_va_copy (tree);
116 static rtx expand_builtin_strcmp (tree, rtx);
117 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
118 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
119 static rtx expand_builtin_memcpy (tree, rtx);
120 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
121 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
122 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
123 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
124 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
125 machine_mode, int, tree);
126 static rtx expand_builtin_strcpy (tree, rtx);
127 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
128 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
129 static rtx expand_builtin_strncpy (tree, rtx);
130 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
131 static rtx expand_builtin_memset (tree, rtx, machine_mode);
132 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
133 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
134 static rtx expand_builtin_bzero (tree);
135 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
136 static rtx expand_builtin_alloca (tree, bool);
137 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
138 static rtx expand_builtin_frame_address (tree, tree);
139 static tree stabilize_va_list_loc (location_t, tree, int);
140 static rtx expand_builtin_expect (tree, rtx);
141 static tree fold_builtin_constant_p (tree);
142 static tree fold_builtin_classify_type (tree);
143 static tree fold_builtin_strlen (location_t, tree, tree);
144 static tree fold_builtin_inf (location_t, tree, int);
145 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
146 static bool validate_arg (const_tree, enum tree_code code);
147 static rtx expand_builtin_fabs (tree, rtx, rtx);
148 static rtx expand_builtin_signbit (tree, rtx);
149 static tree fold_builtin_strchr (location_t, tree, tree, tree);
150 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
151 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
152 static tree fold_builtin_strcmp (location_t, tree, tree);
153 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
154 static tree fold_builtin_isascii (location_t, tree);
155 static tree fold_builtin_toascii (location_t, tree);
156 static tree fold_builtin_isdigit (location_t, tree);
157 static tree fold_builtin_fabs (location_t, tree, tree);
158 static tree fold_builtin_abs (location_t, tree, tree);
159 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
160 enum tree_code);
161 static tree fold_builtin_0 (location_t, tree);
162 static tree fold_builtin_1 (location_t, tree, tree);
163 static tree fold_builtin_2 (location_t, tree, tree, tree);
164 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
165 static tree fold_builtin_varargs (location_t, tree, tree*, int);
167 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
168 static tree fold_builtin_strstr (location_t, tree, tree, tree);
169 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
170 static tree fold_builtin_strspn (location_t, tree, tree);
171 static tree fold_builtin_strcspn (location_t, tree, tree);
173 static rtx expand_builtin_object_size (tree);
174 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
175 enum built_in_function);
176 static void maybe_emit_chk_warning (tree, enum built_in_function);
177 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
178 static void maybe_emit_free_warning (tree);
179 static tree fold_builtin_object_size (tree, tree);
181 unsigned HOST_WIDE_INT target_newline;
182 unsigned HOST_WIDE_INT target_percent;
183 static unsigned HOST_WIDE_INT target_c;
184 static unsigned HOST_WIDE_INT target_s;
185 char target_percent_c[3];
186 char target_percent_s[3];
187 char target_percent_s_newline[4];
188 static tree do_mpfr_remquo (tree, tree, tree);
189 static tree do_mpfr_lgamma_r (tree, tree, tree);
190 static void expand_builtin_sync_synchronize (void);
192 /* Return true if NAME starts with __builtin_ or __sync_. */
194 static bool
195 is_builtin_name (const char *name)
197 if (strncmp (name, "__builtin_", 10) == 0)
198 return true;
199 if (strncmp (name, "__sync_", 7) == 0)
200 return true;
201 if (strncmp (name, "__atomic_", 9) == 0)
202 return true;
203 if (flag_cilkplus
204 && (!strcmp (name, "__cilkrts_detach")
205 || !strcmp (name, "__cilkrts_pop_frame")))
206 return true;
207 return false;
211 /* Return true if DECL is a function symbol representing a built-in. */
213 bool
214 is_builtin_fn (tree decl)
216 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
219 /* Return true if NODE should be considered for inline expansion regardless
220 of the optimization level. This means whenever a function is invoked with
221 its "internal" name, which normally contains the prefix "__builtin". */
223 bool
224 called_as_built_in (tree node)
226 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
227 we want the name used to call the function, not the name it
228 will have. */
229 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
230 return is_builtin_name (name);
233 /* Compute values M and N such that M divides (address of EXP - N) and such
234 that N < M. If these numbers can be determined, store M in alignp and N in
235 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
236 *alignp and any bit-offset to *bitposp.
238 Note that the address (and thus the alignment) computed here is based
239 on the address to which a symbol resolves, whereas DECL_ALIGN is based
240 on the address at which an object is actually located. These two
241 addresses are not always the same. For example, on ARM targets,
242 the address &foo of a Thumb function foo() has the lowest bit set,
243 whereas foo() itself starts on an even address.
245 If ADDR_P is true we are taking the address of the memory reference EXP
246 and thus cannot rely on the access taking place. */
248 static bool
249 get_object_alignment_2 (tree exp, unsigned int *alignp,
250 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
252 HOST_WIDE_INT bitsize, bitpos;
253 tree offset;
254 machine_mode mode;
255 int unsignedp, reversep, volatilep;
256 unsigned int align = BITS_PER_UNIT;
257 bool known_alignment = false;
259 /* Get the innermost object and the constant (bitpos) and possibly
260 variable (offset) offset of the access. */
261 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
262 &unsignedp, &reversep, &volatilep, true);
264 /* Extract alignment information from the innermost object and
265 possibly adjust bitpos and offset. */
266 if (TREE_CODE (exp) == FUNCTION_DECL)
268 /* Function addresses can encode extra information besides their
269 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
270 allows the low bit to be used as a virtual bit, we know
271 that the address itself must be at least 2-byte aligned. */
272 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
273 align = 2 * BITS_PER_UNIT;
275 else if (TREE_CODE (exp) == LABEL_DECL)
277 else if (TREE_CODE (exp) == CONST_DECL)
279 /* The alignment of a CONST_DECL is determined by its initializer. */
280 exp = DECL_INITIAL (exp);
281 align = TYPE_ALIGN (TREE_TYPE (exp));
282 if (CONSTANT_CLASS_P (exp))
283 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
285 known_alignment = true;
287 else if (DECL_P (exp))
289 align = DECL_ALIGN (exp);
290 known_alignment = true;
292 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
294 align = TYPE_ALIGN (TREE_TYPE (exp));
296 else if (TREE_CODE (exp) == INDIRECT_REF
297 || TREE_CODE (exp) == MEM_REF
298 || TREE_CODE (exp) == TARGET_MEM_REF)
300 tree addr = TREE_OPERAND (exp, 0);
301 unsigned ptr_align;
302 unsigned HOST_WIDE_INT ptr_bitpos;
303 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
305 /* If the address is explicitely aligned, handle that. */
306 if (TREE_CODE (addr) == BIT_AND_EXPR
307 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
309 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
310 ptr_bitmask *= BITS_PER_UNIT;
311 align = ptr_bitmask & -ptr_bitmask;
312 addr = TREE_OPERAND (addr, 0);
315 known_alignment
316 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
317 align = MAX (ptr_align, align);
319 /* Re-apply explicit alignment to the bitpos. */
320 ptr_bitpos &= ptr_bitmask;
322 /* The alignment of the pointer operand in a TARGET_MEM_REF
323 has to take the variable offset parts into account. */
324 if (TREE_CODE (exp) == TARGET_MEM_REF)
326 if (TMR_INDEX (exp))
328 unsigned HOST_WIDE_INT step = 1;
329 if (TMR_STEP (exp))
330 step = TREE_INT_CST_LOW (TMR_STEP (exp));
331 align = MIN (align, (step & -step) * BITS_PER_UNIT);
333 if (TMR_INDEX2 (exp))
334 align = BITS_PER_UNIT;
335 known_alignment = false;
338 /* When EXP is an actual memory reference then we can use
339 TYPE_ALIGN of a pointer indirection to derive alignment.
340 Do so only if get_pointer_alignment_1 did not reveal absolute
341 alignment knowledge and if using that alignment would
342 improve the situation. */
343 if (!addr_p && !known_alignment
344 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
345 align = TYPE_ALIGN (TREE_TYPE (exp));
346 else
348 /* Else adjust bitpos accordingly. */
349 bitpos += ptr_bitpos;
350 if (TREE_CODE (exp) == MEM_REF
351 || TREE_CODE (exp) == TARGET_MEM_REF)
352 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
355 else if (TREE_CODE (exp) == STRING_CST)
357 /* STRING_CST are the only constant objects we allow to be not
358 wrapped inside a CONST_DECL. */
359 align = TYPE_ALIGN (TREE_TYPE (exp));
360 if (CONSTANT_CLASS_P (exp))
361 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
363 known_alignment = true;
366 /* If there is a non-constant offset part extract the maximum
367 alignment that can prevail. */
368 if (offset)
370 unsigned int trailing_zeros = tree_ctz (offset);
371 if (trailing_zeros < HOST_BITS_PER_INT)
373 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
374 if (inner)
375 align = MIN (align, inner);
379 *alignp = align;
380 *bitposp = bitpos & (*alignp - 1);
381 return known_alignment;
384 /* For a memory reference expression EXP compute values M and N such that M
385 divides (&EXP - N) and such that N < M. If these numbers can be determined,
386 store M in alignp and N in *BITPOSP and return true. Otherwise return false
387 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
389 bool
390 get_object_alignment_1 (tree exp, unsigned int *alignp,
391 unsigned HOST_WIDE_INT *bitposp)
393 return get_object_alignment_2 (exp, alignp, bitposp, false);
396 /* Return the alignment in bits of EXP, an object. */
398 unsigned int
399 get_object_alignment (tree exp)
401 unsigned HOST_WIDE_INT bitpos = 0;
402 unsigned int align;
404 get_object_alignment_1 (exp, &align, &bitpos);
406 /* align and bitpos now specify known low bits of the pointer.
407 ptr & (align - 1) == bitpos. */
409 if (bitpos != 0)
410 align = (bitpos & -bitpos);
411 return align;
414 /* For a pointer valued expression EXP compute values M and N such that M
415 divides (EXP - N) and such that N < M. If these numbers can be determined,
416 store M in alignp and N in *BITPOSP and return true. Return false if
417 the results are just a conservative approximation.
419 If EXP is not a pointer, false is returned too. */
421 bool
422 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
423 unsigned HOST_WIDE_INT *bitposp)
425 STRIP_NOPS (exp);
427 if (TREE_CODE (exp) == ADDR_EXPR)
428 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
429 alignp, bitposp, true);
430 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
432 unsigned int align;
433 unsigned HOST_WIDE_INT bitpos;
434 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
435 &align, &bitpos);
436 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
437 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
438 else
440 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
441 if (trailing_zeros < HOST_BITS_PER_INT)
443 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
444 if (inner)
445 align = MIN (align, inner);
448 *alignp = align;
449 *bitposp = bitpos & (align - 1);
450 return res;
452 else if (TREE_CODE (exp) == SSA_NAME
453 && POINTER_TYPE_P (TREE_TYPE (exp)))
455 unsigned int ptr_align, ptr_misalign;
456 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
458 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
460 *bitposp = ptr_misalign * BITS_PER_UNIT;
461 *alignp = ptr_align * BITS_PER_UNIT;
462 /* Make sure to return a sensible alignment when the multiplication
463 by BITS_PER_UNIT overflowed. */
464 if (*alignp == 0)
465 *alignp = 1u << (HOST_BITS_PER_INT - 1);
466 /* We cannot really tell whether this result is an approximation. */
467 return false;
469 else
471 *bitposp = 0;
472 *alignp = BITS_PER_UNIT;
473 return false;
476 else if (TREE_CODE (exp) == INTEGER_CST)
478 *alignp = BIGGEST_ALIGNMENT;
479 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
480 & (BIGGEST_ALIGNMENT - 1));
481 return true;
484 *bitposp = 0;
485 *alignp = BITS_PER_UNIT;
486 return false;
489 /* Return the alignment in bits of EXP, a pointer valued expression.
490 The alignment returned is, by default, the alignment of the thing that
491 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
493 Otherwise, look at the expression to see if we can do better, i.e., if the
494 expression is actually pointing at an object whose alignment is tighter. */
496 unsigned int
497 get_pointer_alignment (tree exp)
499 unsigned HOST_WIDE_INT bitpos = 0;
500 unsigned int align;
502 get_pointer_alignment_1 (exp, &align, &bitpos);
504 /* align and bitpos now specify known low bits of the pointer.
505 ptr & (align - 1) == bitpos. */
507 if (bitpos != 0)
508 align = (bitpos & -bitpos);
510 return align;
513 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
514 way, because it could contain a zero byte in the middle.
515 TREE_STRING_LENGTH is the size of the character array, not the string.
517 ONLY_VALUE should be nonzero if the result is not going to be emitted
518 into the instruction stream and zero if it is going to be expanded.
519 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
520 is returned, otherwise NULL, since
521 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
522 evaluate the side-effects.
524 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
525 accesses. Note that this implies the result is not going to be emitted
526 into the instruction stream.
528 The value returned is of type `ssizetype'.
530 Unfortunately, string_constant can't access the values of const char
531 arrays with initializers, so neither can we do so here. */
533 tree
534 c_strlen (tree src, int only_value)
536 tree offset_node;
537 HOST_WIDE_INT offset;
538 int max;
539 const char *ptr;
540 location_t loc;
542 STRIP_NOPS (src);
543 if (TREE_CODE (src) == COND_EXPR
544 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
546 tree len1, len2;
548 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
549 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
550 if (tree_int_cst_equal (len1, len2))
551 return len1;
554 if (TREE_CODE (src) == COMPOUND_EXPR
555 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
556 return c_strlen (TREE_OPERAND (src, 1), only_value);
558 loc = EXPR_LOC_OR_LOC (src, input_location);
560 src = string_constant (src, &offset_node);
561 if (src == 0)
562 return NULL_TREE;
564 max = TREE_STRING_LENGTH (src) - 1;
565 ptr = TREE_STRING_POINTER (src);
567 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
569 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
570 compute the offset to the following null if we don't know where to
571 start searching for it. */
572 int i;
574 for (i = 0; i < max; i++)
575 if (ptr[i] == 0)
576 return NULL_TREE;
578 /* We don't know the starting offset, but we do know that the string
579 has no internal zero bytes. We can assume that the offset falls
580 within the bounds of the string; otherwise, the programmer deserves
581 what he gets. Subtract the offset from the length of the string,
582 and return that. This would perhaps not be valid if we were dealing
583 with named arrays in addition to literal string constants. */
585 return size_diffop_loc (loc, size_int (max), offset_node);
588 /* We have a known offset into the string. Start searching there for
589 a null character if we can represent it as a single HOST_WIDE_INT. */
590 if (offset_node == 0)
591 offset = 0;
592 else if (! tree_fits_shwi_p (offset_node))
593 offset = -1;
594 else
595 offset = tree_to_shwi (offset_node);
597 /* If the offset is known to be out of bounds, warn, and call strlen at
598 runtime. */
599 if (offset < 0 || offset > max)
601 /* Suppress multiple warnings for propagated constant strings. */
602 if (only_value != 2
603 && !TREE_NO_WARNING (src))
605 warning_at (loc, 0, "offset outside bounds of constant string");
606 TREE_NO_WARNING (src) = 1;
608 return NULL_TREE;
611 /* Use strlen to search for the first zero byte. Since any strings
612 constructed with build_string will have nulls appended, we win even
613 if we get handed something like (char[4])"abcd".
615 Since OFFSET is our starting index into the string, no further
616 calculation is needed. */
617 return ssize_int (strlen (ptr + offset));
620 /* Return a constant integer corresponding to target reading
621 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
623 static rtx
624 c_readstr (const char *str, machine_mode mode)
626 HOST_WIDE_INT ch;
627 unsigned int i, j;
628 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
630 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
631 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
632 / HOST_BITS_PER_WIDE_INT;
634 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
635 for (i = 0; i < len; i++)
636 tmp[i] = 0;
638 ch = 1;
639 for (i = 0; i < GET_MODE_SIZE (mode); i++)
641 j = i;
642 if (WORDS_BIG_ENDIAN)
643 j = GET_MODE_SIZE (mode) - i - 1;
644 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
645 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
646 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
647 j *= BITS_PER_UNIT;
649 if (ch)
650 ch = (unsigned char) str[i];
651 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
654 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
655 return immed_wide_int_const (c, mode);
658 /* Cast a target constant CST to target CHAR and if that value fits into
659 host char type, return zero and put that value into variable pointed to by
660 P. */
662 static int
663 target_char_cast (tree cst, char *p)
665 unsigned HOST_WIDE_INT val, hostval;
667 if (TREE_CODE (cst) != INTEGER_CST
668 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
669 return 1;
671 /* Do not care if it fits or not right here. */
672 val = TREE_INT_CST_LOW (cst);
674 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
675 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
677 hostval = val;
678 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
679 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
681 if (val != hostval)
682 return 1;
684 *p = hostval;
685 return 0;
688 /* Similar to save_expr, but assumes that arbitrary code is not executed
689 in between the multiple evaluations. In particular, we assume that a
690 non-addressable local variable will not be modified. */
692 static tree
693 builtin_save_expr (tree exp)
695 if (TREE_CODE (exp) == SSA_NAME
696 || (TREE_ADDRESSABLE (exp) == 0
697 && (TREE_CODE (exp) == PARM_DECL
698 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
699 return exp;
701 return save_expr (exp);
704 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
705 times to get the address of either a higher stack frame, or a return
706 address located within it (depending on FNDECL_CODE). */
708 static rtx
709 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
711 int i;
712 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
713 if (tem == NULL_RTX)
715 /* For a zero count with __builtin_return_address, we don't care what
716 frame address we return, because target-specific definitions will
717 override us. Therefore frame pointer elimination is OK, and using
718 the soft frame pointer is OK.
720 For a nonzero count, or a zero count with __builtin_frame_address,
721 we require a stable offset from the current frame pointer to the
722 previous one, so we must use the hard frame pointer, and
723 we must disable frame pointer elimination. */
724 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
725 tem = frame_pointer_rtx;
726 else
728 tem = hard_frame_pointer_rtx;
730 /* Tell reload not to eliminate the frame pointer. */
731 crtl->accesses_prior_frames = 1;
735 if (count > 0)
736 SETUP_FRAME_ADDRESSES ();
738 /* On the SPARC, the return address is not in the frame, it is in a
739 register. There is no way to access it off of the current frame
740 pointer, but it can be accessed off the previous frame pointer by
741 reading the value from the register window save area. */
742 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
743 count--;
745 /* Scan back COUNT frames to the specified frame. */
746 for (i = 0; i < count; i++)
748 /* Assume the dynamic chain pointer is in the word that the
749 frame address points to, unless otherwise specified. */
750 tem = DYNAMIC_CHAIN_ADDRESS (tem);
751 tem = memory_address (Pmode, tem);
752 tem = gen_frame_mem (Pmode, tem);
753 tem = copy_to_reg (tem);
756 /* For __builtin_frame_address, return what we've got. But, on
757 the SPARC for example, we may have to add a bias. */
758 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
759 return FRAME_ADDR_RTX (tem);
761 /* For __builtin_return_address, get the return address from that frame. */
762 #ifdef RETURN_ADDR_RTX
763 tem = RETURN_ADDR_RTX (count, tem);
764 #else
765 tem = memory_address (Pmode,
766 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
767 tem = gen_frame_mem (Pmode, tem);
768 #endif
769 return tem;
772 /* Alias set used for setjmp buffer. */
773 static alias_set_type setjmp_alias_set = -1;
775 /* Construct the leading half of a __builtin_setjmp call. Control will
776 return to RECEIVER_LABEL. This is also called directly by the SJLJ
777 exception handling code. */
779 void
780 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
782 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
783 rtx stack_save;
784 rtx mem;
786 if (setjmp_alias_set == -1)
787 setjmp_alias_set = new_alias_set ();
789 buf_addr = convert_memory_address (Pmode, buf_addr);
791 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
793 /* We store the frame pointer and the address of receiver_label in
794 the buffer and use the rest of it for the stack save area, which
795 is machine-dependent. */
797 mem = gen_rtx_MEM (Pmode, buf_addr);
798 set_mem_alias_set (mem, setjmp_alias_set);
799 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
801 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
802 GET_MODE_SIZE (Pmode))),
803 set_mem_alias_set (mem, setjmp_alias_set);
805 emit_move_insn (validize_mem (mem),
806 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
808 stack_save = gen_rtx_MEM (sa_mode,
809 plus_constant (Pmode, buf_addr,
810 2 * GET_MODE_SIZE (Pmode)));
811 set_mem_alias_set (stack_save, setjmp_alias_set);
812 emit_stack_save (SAVE_NONLOCAL, &stack_save);
814 /* If there is further processing to do, do it. */
815 if (targetm.have_builtin_setjmp_setup ())
816 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
818 /* We have a nonlocal label. */
819 cfun->has_nonlocal_label = 1;
822 /* Construct the trailing part of a __builtin_setjmp call. This is
823 also called directly by the SJLJ exception handling code.
824 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
826 void
827 expand_builtin_setjmp_receiver (rtx receiver_label)
829 rtx chain;
831 /* Mark the FP as used when we get here, so we have to make sure it's
832 marked as used by this function. */
833 emit_use (hard_frame_pointer_rtx);
835 /* Mark the static chain as clobbered here so life information
836 doesn't get messed up for it. */
837 chain = targetm.calls.static_chain (current_function_decl, true);
838 if (chain && REG_P (chain))
839 emit_clobber (chain);
841 /* Now put in the code to restore the frame pointer, and argument
842 pointer, if needed. */
843 if (! targetm.have_nonlocal_goto ())
845 /* First adjust our frame pointer to its actual value. It was
846 previously set to the start of the virtual area corresponding to
847 the stacked variables when we branched here and now needs to be
848 adjusted to the actual hardware fp value.
850 Assignments to virtual registers are converted by
851 instantiate_virtual_regs into the corresponding assignment
852 to the underlying register (fp in this case) that makes
853 the original assignment true.
854 So the following insn will actually be decrementing fp by
855 STARTING_FRAME_OFFSET. */
856 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
858 /* Restoring the frame pointer also modifies the hard frame pointer.
859 Mark it used (so that the previous assignment remains live once
860 the frame pointer is eliminated) and clobbered (to represent the
861 implicit update from the assignment). */
862 emit_use (hard_frame_pointer_rtx);
863 emit_clobber (hard_frame_pointer_rtx);
866 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
868 #ifdef ELIMINABLE_REGS
869 /* If the argument pointer can be eliminated in favor of the
870 frame pointer, we don't need to restore it. We assume here
871 that if such an elimination is present, it can always be used.
872 This is the case on all known machines; if we don't make this
873 assumption, we do unnecessary saving on many machines. */
874 size_t i;
875 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
877 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
878 if (elim_regs[i].from == ARG_POINTER_REGNUM
879 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
880 break;
882 if (i == ARRAY_SIZE (elim_regs))
883 #endif
885 /* Now restore our arg pointer from the address at which it
886 was saved in our stack frame. */
887 emit_move_insn (crtl->args.internal_arg_pointer,
888 copy_to_reg (get_arg_pointer_save_area ()));
892 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
893 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
894 else if (targetm.have_nonlocal_goto_receiver ())
895 emit_insn (targetm.gen_nonlocal_goto_receiver ());
896 else
897 { /* Nothing */ }
899 /* We must not allow the code we just generated to be reordered by
900 scheduling. Specifically, the update of the frame pointer must
901 happen immediately, not later. */
902 emit_insn (gen_blockage ());
905 /* __builtin_longjmp is passed a pointer to an array of five words (not
906 all will be used on all machines). It operates similarly to the C
907 library function of the same name, but is more efficient. Much of
908 the code below is copied from the handling of non-local gotos. */
910 static void
911 expand_builtin_longjmp (rtx buf_addr, rtx value)
913 rtx fp, lab, stack;
914 rtx_insn *insn, *last;
915 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
917 /* DRAP is needed for stack realign if longjmp is expanded to current
918 function */
919 if (SUPPORTS_STACK_ALIGNMENT)
920 crtl->need_drap = true;
922 if (setjmp_alias_set == -1)
923 setjmp_alias_set = new_alias_set ();
925 buf_addr = convert_memory_address (Pmode, buf_addr);
927 buf_addr = force_reg (Pmode, buf_addr);
929 /* We require that the user must pass a second argument of 1, because
930 that is what builtin_setjmp will return. */
931 gcc_assert (value == const1_rtx);
933 last = get_last_insn ();
934 if (targetm.have_builtin_longjmp ())
935 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
936 else
938 fp = gen_rtx_MEM (Pmode, buf_addr);
939 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
940 GET_MODE_SIZE (Pmode)));
942 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
943 2 * GET_MODE_SIZE (Pmode)));
944 set_mem_alias_set (fp, setjmp_alias_set);
945 set_mem_alias_set (lab, setjmp_alias_set);
946 set_mem_alias_set (stack, setjmp_alias_set);
948 /* Pick up FP, label, and SP from the block and jump. This code is
949 from expand_goto in stmt.c; see there for detailed comments. */
950 if (targetm.have_nonlocal_goto ())
951 /* We have to pass a value to the nonlocal_goto pattern that will
952 get copied into the static_chain pointer, but it does not matter
953 what that value is, because builtin_setjmp does not use it. */
954 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
955 else
957 lab = copy_to_reg (lab);
959 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
960 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
962 emit_move_insn (hard_frame_pointer_rtx, fp);
963 emit_stack_restore (SAVE_NONLOCAL, stack);
965 emit_use (hard_frame_pointer_rtx);
966 emit_use (stack_pointer_rtx);
967 emit_indirect_jump (lab);
971 /* Search backwards and mark the jump insn as a non-local goto.
972 Note that this precludes the use of __builtin_longjmp to a
973 __builtin_setjmp target in the same function. However, we've
974 already cautioned the user that these functions are for
975 internal exception handling use only. */
976 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
978 gcc_assert (insn != last);
980 if (JUMP_P (insn))
982 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
983 break;
985 else if (CALL_P (insn))
986 break;
990 static inline bool
991 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
993 return (iter->i < iter->n);
996 /* This function validates the types of a function call argument list
997 against a specified list of tree_codes. If the last specifier is a 0,
998 that represents an ellipses, otherwise the last specifier must be a
999 VOID_TYPE. */
1001 static bool
1002 validate_arglist (const_tree callexpr, ...)
1004 enum tree_code code;
1005 bool res = 0;
1006 va_list ap;
1007 const_call_expr_arg_iterator iter;
1008 const_tree arg;
1010 va_start (ap, callexpr);
1011 init_const_call_expr_arg_iterator (callexpr, &iter);
1015 code = (enum tree_code) va_arg (ap, int);
1016 switch (code)
1018 case 0:
1019 /* This signifies an ellipses, any further arguments are all ok. */
1020 res = true;
1021 goto end;
1022 case VOID_TYPE:
1023 /* This signifies an endlink, if no arguments remain, return
1024 true, otherwise return false. */
1025 res = !more_const_call_expr_args_p (&iter);
1026 goto end;
1027 default:
1028 /* If no parameters remain or the parameter's code does not
1029 match the specified code, return false. Otherwise continue
1030 checking any remaining arguments. */
1031 arg = next_const_call_expr_arg (&iter);
1032 if (!validate_arg (arg, code))
1033 goto end;
1034 break;
1037 while (1);
1039 /* We need gotos here since we can only have one VA_CLOSE in a
1040 function. */
1041 end: ;
1042 va_end (ap);
1044 return res;
1047 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1048 and the address of the save area. */
1050 static rtx
1051 expand_builtin_nonlocal_goto (tree exp)
1053 tree t_label, t_save_area;
1054 rtx r_label, r_save_area, r_fp, r_sp;
1055 rtx_insn *insn;
1057 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1058 return NULL_RTX;
1060 t_label = CALL_EXPR_ARG (exp, 0);
1061 t_save_area = CALL_EXPR_ARG (exp, 1);
1063 r_label = expand_normal (t_label);
1064 r_label = convert_memory_address (Pmode, r_label);
1065 r_save_area = expand_normal (t_save_area);
1066 r_save_area = convert_memory_address (Pmode, r_save_area);
1067 /* Copy the address of the save location to a register just in case it was
1068 based on the frame pointer. */
1069 r_save_area = copy_to_reg (r_save_area);
1070 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1071 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1072 plus_constant (Pmode, r_save_area,
1073 GET_MODE_SIZE (Pmode)));
1075 crtl->has_nonlocal_goto = 1;
1077 /* ??? We no longer need to pass the static chain value, afaik. */
1078 if (targetm.have_nonlocal_goto ())
1079 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1080 else
1082 r_label = copy_to_reg (r_label);
1084 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1085 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1087 /* Restore frame pointer for containing function. */
1088 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1089 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1091 /* USE of hard_frame_pointer_rtx added for consistency;
1092 not clear if really needed. */
1093 emit_use (hard_frame_pointer_rtx);
1094 emit_use (stack_pointer_rtx);
1096 /* If the architecture is using a GP register, we must
1097 conservatively assume that the target function makes use of it.
1098 The prologue of functions with nonlocal gotos must therefore
1099 initialize the GP register to the appropriate value, and we
1100 must then make sure that this value is live at the point
1101 of the jump. (Note that this doesn't necessarily apply
1102 to targets with a nonlocal_goto pattern; they are free
1103 to implement it in their own way. Note also that this is
1104 a no-op if the GP register is a global invariant.) */
1105 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1106 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1107 emit_use (pic_offset_table_rtx);
1109 emit_indirect_jump (r_label);
1112 /* Search backwards to the jump insn and mark it as a
1113 non-local goto. */
1114 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1116 if (JUMP_P (insn))
1118 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1119 break;
1121 else if (CALL_P (insn))
1122 break;
1125 return const0_rtx;
1128 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1129 (not all will be used on all machines) that was passed to __builtin_setjmp.
1130 It updates the stack pointer in that block to the current value. This is
1131 also called directly by the SJLJ exception handling code. */
1133 void
1134 expand_builtin_update_setjmp_buf (rtx buf_addr)
1136 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1137 rtx stack_save
1138 = gen_rtx_MEM (sa_mode,
1139 memory_address
1140 (sa_mode,
1141 plus_constant (Pmode, buf_addr,
1142 2 * GET_MODE_SIZE (Pmode))));
1144 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1147 /* Expand a call to __builtin_prefetch. For a target that does not support
1148 data prefetch, evaluate the memory address argument in case it has side
1149 effects. */
1151 static void
1152 expand_builtin_prefetch (tree exp)
1154 tree arg0, arg1, arg2;
1155 int nargs;
1156 rtx op0, op1, op2;
1158 if (!validate_arglist (exp, POINTER_TYPE, 0))
1159 return;
1161 arg0 = CALL_EXPR_ARG (exp, 0);
1163 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1164 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1165 locality). */
1166 nargs = call_expr_nargs (exp);
1167 if (nargs > 1)
1168 arg1 = CALL_EXPR_ARG (exp, 1);
1169 else
1170 arg1 = integer_zero_node;
1171 if (nargs > 2)
1172 arg2 = CALL_EXPR_ARG (exp, 2);
1173 else
1174 arg2 = integer_three_node;
1176 /* Argument 0 is an address. */
1177 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1179 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1180 if (TREE_CODE (arg1) != INTEGER_CST)
1182 error ("second argument to %<__builtin_prefetch%> must be a constant");
1183 arg1 = integer_zero_node;
1185 op1 = expand_normal (arg1);
1186 /* Argument 1 must be either zero or one. */
1187 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1189 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1190 " using zero");
1191 op1 = const0_rtx;
1194 /* Argument 2 (locality) must be a compile-time constant int. */
1195 if (TREE_CODE (arg2) != INTEGER_CST)
1197 error ("third argument to %<__builtin_prefetch%> must be a constant");
1198 arg2 = integer_zero_node;
1200 op2 = expand_normal (arg2);
1201 /* Argument 2 must be 0, 1, 2, or 3. */
1202 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1204 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1205 op2 = const0_rtx;
1208 if (targetm.have_prefetch ())
1210 struct expand_operand ops[3];
1212 create_address_operand (&ops[0], op0);
1213 create_integer_operand (&ops[1], INTVAL (op1));
1214 create_integer_operand (&ops[2], INTVAL (op2));
1215 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1216 return;
1219 /* Don't do anything with direct references to volatile memory, but
1220 generate code to handle other side effects. */
1221 if (!MEM_P (op0) && side_effects_p (op0))
1222 emit_insn (op0);
1225 /* Get a MEM rtx for expression EXP which is the address of an operand
1226 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1227 the maximum length of the block of memory that might be accessed or
1228 NULL if unknown. */
1230 static rtx
1231 get_memory_rtx (tree exp, tree len)
1233 tree orig_exp = exp;
1234 rtx addr, mem;
1236 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1237 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1238 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1239 exp = TREE_OPERAND (exp, 0);
1241 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1242 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1244 /* Get an expression we can use to find the attributes to assign to MEM.
1245 First remove any nops. */
1246 while (CONVERT_EXPR_P (exp)
1247 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1248 exp = TREE_OPERAND (exp, 0);
1250 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1251 (as builtin stringops may alias with anything). */
1252 exp = fold_build2 (MEM_REF,
1253 build_array_type (char_type_node,
1254 build_range_type (sizetype,
1255 size_one_node, len)),
1256 exp, build_int_cst (ptr_type_node, 0));
1258 /* If the MEM_REF has no acceptable address, try to get the base object
1259 from the original address we got, and build an all-aliasing
1260 unknown-sized access to that one. */
1261 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1262 set_mem_attributes (mem, exp, 0);
1263 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1264 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1265 0))))
1267 exp = build_fold_addr_expr (exp);
1268 exp = fold_build2 (MEM_REF,
1269 build_array_type (char_type_node,
1270 build_range_type (sizetype,
1271 size_zero_node,
1272 NULL)),
1273 exp, build_int_cst (ptr_type_node, 0));
1274 set_mem_attributes (mem, exp, 0);
1276 set_mem_alias_set (mem, 0);
1277 return mem;
1280 /* Built-in functions to perform an untyped call and return. */
1282 #define apply_args_mode \
1283 (this_target_builtins->x_apply_args_mode)
1284 #define apply_result_mode \
1285 (this_target_builtins->x_apply_result_mode)
1287 /* Return the size required for the block returned by __builtin_apply_args,
1288 and initialize apply_args_mode. */
1290 static int
1291 apply_args_size (void)
1293 static int size = -1;
1294 int align;
1295 unsigned int regno;
1296 machine_mode mode;
1298 /* The values computed by this function never change. */
1299 if (size < 0)
1301 /* The first value is the incoming arg-pointer. */
1302 size = GET_MODE_SIZE (Pmode);
1304 /* The second value is the structure value address unless this is
1305 passed as an "invisible" first argument. */
1306 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1307 size += GET_MODE_SIZE (Pmode);
1309 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1310 if (FUNCTION_ARG_REGNO_P (regno))
1312 mode = targetm.calls.get_raw_arg_mode (regno);
1314 gcc_assert (mode != VOIDmode);
1316 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1317 if (size % align != 0)
1318 size = CEIL (size, align) * align;
1319 size += GET_MODE_SIZE (mode);
1320 apply_args_mode[regno] = mode;
1322 else
1324 apply_args_mode[regno] = VOIDmode;
1327 return size;
1330 /* Return the size required for the block returned by __builtin_apply,
1331 and initialize apply_result_mode. */
1333 static int
1334 apply_result_size (void)
1336 static int size = -1;
1337 int align, regno;
1338 machine_mode mode;
1340 /* The values computed by this function never change. */
1341 if (size < 0)
1343 size = 0;
1345 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1346 if (targetm.calls.function_value_regno_p (regno))
1348 mode = targetm.calls.get_raw_result_mode (regno);
1350 gcc_assert (mode != VOIDmode);
1352 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1353 if (size % align != 0)
1354 size = CEIL (size, align) * align;
1355 size += GET_MODE_SIZE (mode);
1356 apply_result_mode[regno] = mode;
1358 else
1359 apply_result_mode[regno] = VOIDmode;
1361 /* Allow targets that use untyped_call and untyped_return to override
1362 the size so that machine-specific information can be stored here. */
1363 #ifdef APPLY_RESULT_SIZE
1364 size = APPLY_RESULT_SIZE;
1365 #endif
1367 return size;
1370 /* Create a vector describing the result block RESULT. If SAVEP is true,
1371 the result block is used to save the values; otherwise it is used to
1372 restore the values. */
1374 static rtx
1375 result_vector (int savep, rtx result)
1377 int regno, size, align, nelts;
1378 machine_mode mode;
1379 rtx reg, mem;
1380 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1382 size = nelts = 0;
1383 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1384 if ((mode = apply_result_mode[regno]) != VOIDmode)
1386 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1387 if (size % align != 0)
1388 size = CEIL (size, align) * align;
1389 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1390 mem = adjust_address (result, mode, size);
1391 savevec[nelts++] = (savep
1392 ? gen_rtx_SET (mem, reg)
1393 : gen_rtx_SET (reg, mem));
1394 size += GET_MODE_SIZE (mode);
1396 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1399 /* Save the state required to perform an untyped call with the same
1400 arguments as were passed to the current function. */
1402 static rtx
1403 expand_builtin_apply_args_1 (void)
1405 rtx registers, tem;
1406 int size, align, regno;
1407 machine_mode mode;
1408 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1410 /* Create a block where the arg-pointer, structure value address,
1411 and argument registers can be saved. */
1412 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1414 /* Walk past the arg-pointer and structure value address. */
1415 size = GET_MODE_SIZE (Pmode);
1416 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1417 size += GET_MODE_SIZE (Pmode);
1419 /* Save each register used in calling a function to the block. */
1420 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1421 if ((mode = apply_args_mode[regno]) != VOIDmode)
1423 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1424 if (size % align != 0)
1425 size = CEIL (size, align) * align;
1427 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1429 emit_move_insn (adjust_address (registers, mode, size), tem);
1430 size += GET_MODE_SIZE (mode);
1433 /* Save the arg pointer to the block. */
1434 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1435 /* We need the pointer as the caller actually passed them to us, not
1436 as we might have pretended they were passed. Make sure it's a valid
1437 operand, as emit_move_insn isn't expected to handle a PLUS. */
1438 if (STACK_GROWS_DOWNWARD)
1440 = force_operand (plus_constant (Pmode, tem,
1441 crtl->args.pretend_args_size),
1442 NULL_RTX);
1443 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1445 size = GET_MODE_SIZE (Pmode);
1447 /* Save the structure value address unless this is passed as an
1448 "invisible" first argument. */
1449 if (struct_incoming_value)
1451 emit_move_insn (adjust_address (registers, Pmode, size),
1452 copy_to_reg (struct_incoming_value));
1453 size += GET_MODE_SIZE (Pmode);
1456 /* Return the address of the block. */
1457 return copy_addr_to_reg (XEXP (registers, 0));
1460 /* __builtin_apply_args returns block of memory allocated on
1461 the stack into which is stored the arg pointer, structure
1462 value address, static chain, and all the registers that might
1463 possibly be used in performing a function call. The code is
1464 moved to the start of the function so the incoming values are
1465 saved. */
1467 static rtx
1468 expand_builtin_apply_args (void)
1470 /* Don't do __builtin_apply_args more than once in a function.
1471 Save the result of the first call and reuse it. */
1472 if (apply_args_value != 0)
1473 return apply_args_value;
1475 /* When this function is called, it means that registers must be
1476 saved on entry to this function. So we migrate the
1477 call to the first insn of this function. */
1478 rtx temp;
1480 start_sequence ();
1481 temp = expand_builtin_apply_args_1 ();
1482 rtx_insn *seq = get_insns ();
1483 end_sequence ();
1485 apply_args_value = temp;
1487 /* Put the insns after the NOTE that starts the function.
1488 If this is inside a start_sequence, make the outer-level insn
1489 chain current, so the code is placed at the start of the
1490 function. If internal_arg_pointer is a non-virtual pseudo,
1491 it needs to be placed after the function that initializes
1492 that pseudo. */
1493 push_topmost_sequence ();
1494 if (REG_P (crtl->args.internal_arg_pointer)
1495 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1496 emit_insn_before (seq, parm_birth_insn);
1497 else
1498 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1499 pop_topmost_sequence ();
1500 return temp;
1504 /* Perform an untyped call and save the state required to perform an
1505 untyped return of whatever value was returned by the given function. */
1507 static rtx
1508 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1510 int size, align, regno;
1511 machine_mode mode;
1512 rtx incoming_args, result, reg, dest, src;
1513 rtx_call_insn *call_insn;
1514 rtx old_stack_level = 0;
1515 rtx call_fusage = 0;
1516 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1518 arguments = convert_memory_address (Pmode, arguments);
1520 /* Create a block where the return registers can be saved. */
1521 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1523 /* Fetch the arg pointer from the ARGUMENTS block. */
1524 incoming_args = gen_reg_rtx (Pmode);
1525 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1526 if (!STACK_GROWS_DOWNWARD)
1527 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1528 incoming_args, 0, OPTAB_LIB_WIDEN);
1530 /* Push a new argument block and copy the arguments. Do not allow
1531 the (potential) memcpy call below to interfere with our stack
1532 manipulations. */
1533 do_pending_stack_adjust ();
1534 NO_DEFER_POP;
1536 /* Save the stack with nonlocal if available. */
1537 if (targetm.have_save_stack_nonlocal ())
1538 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1539 else
1540 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1542 /* Allocate a block of memory onto the stack and copy the memory
1543 arguments to the outgoing arguments address. We can pass TRUE
1544 as the 4th argument because we just saved the stack pointer
1545 and will restore it right after the call. */
1546 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1548 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1549 may have already set current_function_calls_alloca to true.
1550 current_function_calls_alloca won't be set if argsize is zero,
1551 so we have to guarantee need_drap is true here. */
1552 if (SUPPORTS_STACK_ALIGNMENT)
1553 crtl->need_drap = true;
1555 dest = virtual_outgoing_args_rtx;
1556 if (!STACK_GROWS_DOWNWARD)
1558 if (CONST_INT_P (argsize))
1559 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1560 else
1561 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1563 dest = gen_rtx_MEM (BLKmode, dest);
1564 set_mem_align (dest, PARM_BOUNDARY);
1565 src = gen_rtx_MEM (BLKmode, incoming_args);
1566 set_mem_align (src, PARM_BOUNDARY);
1567 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1569 /* Refer to the argument block. */
1570 apply_args_size ();
1571 arguments = gen_rtx_MEM (BLKmode, arguments);
1572 set_mem_align (arguments, PARM_BOUNDARY);
1574 /* Walk past the arg-pointer and structure value address. */
1575 size = GET_MODE_SIZE (Pmode);
1576 if (struct_value)
1577 size += GET_MODE_SIZE (Pmode);
1579 /* Restore each of the registers previously saved. Make USE insns
1580 for each of these registers for use in making the call. */
1581 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1582 if ((mode = apply_args_mode[regno]) != VOIDmode)
1584 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1585 if (size % align != 0)
1586 size = CEIL (size, align) * align;
1587 reg = gen_rtx_REG (mode, regno);
1588 emit_move_insn (reg, adjust_address (arguments, mode, size));
1589 use_reg (&call_fusage, reg);
1590 size += GET_MODE_SIZE (mode);
1593 /* Restore the structure value address unless this is passed as an
1594 "invisible" first argument. */
1595 size = GET_MODE_SIZE (Pmode);
1596 if (struct_value)
1598 rtx value = gen_reg_rtx (Pmode);
1599 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1600 emit_move_insn (struct_value, value);
1601 if (REG_P (struct_value))
1602 use_reg (&call_fusage, struct_value);
1603 size += GET_MODE_SIZE (Pmode);
1606 /* All arguments and registers used for the call are set up by now! */
1607 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1609 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1610 and we don't want to load it into a register as an optimization,
1611 because prepare_call_address already did it if it should be done. */
1612 if (GET_CODE (function) != SYMBOL_REF)
1613 function = memory_address (FUNCTION_MODE, function);
1615 /* Generate the actual call instruction and save the return value. */
1616 if (targetm.have_untyped_call ())
1618 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1619 emit_call_insn (targetm.gen_untyped_call (mem, result,
1620 result_vector (1, result)));
1622 else if (targetm.have_call_value ())
1624 rtx valreg = 0;
1626 /* Locate the unique return register. It is not possible to
1627 express a call that sets more than one return register using
1628 call_value; use untyped_call for that. In fact, untyped_call
1629 only needs to save the return registers in the given block. */
1630 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1631 if ((mode = apply_result_mode[regno]) != VOIDmode)
1633 gcc_assert (!valreg); /* have_untyped_call required. */
1635 valreg = gen_rtx_REG (mode, regno);
1638 emit_insn (targetm.gen_call_value (valreg,
1639 gen_rtx_MEM (FUNCTION_MODE, function),
1640 const0_rtx, NULL_RTX, const0_rtx));
1642 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1644 else
1645 gcc_unreachable ();
1647 /* Find the CALL insn we just emitted, and attach the register usage
1648 information. */
1649 call_insn = last_call_insn ();
1650 add_function_usage_to (call_insn, call_fusage);
1652 /* Restore the stack. */
1653 if (targetm.have_save_stack_nonlocal ())
1654 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1655 else
1656 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1657 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1659 OK_DEFER_POP;
1661 /* Return the address of the result block. */
1662 result = copy_addr_to_reg (XEXP (result, 0));
1663 return convert_memory_address (ptr_mode, result);
1666 /* Perform an untyped return. */
1668 static void
1669 expand_builtin_return (rtx result)
1671 int size, align, regno;
1672 machine_mode mode;
1673 rtx reg;
1674 rtx_insn *call_fusage = 0;
1676 result = convert_memory_address (Pmode, result);
1678 apply_result_size ();
1679 result = gen_rtx_MEM (BLKmode, result);
1681 if (targetm.have_untyped_return ())
1683 rtx vector = result_vector (0, result);
1684 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1685 emit_barrier ();
1686 return;
1689 /* Restore the return value and note that each value is used. */
1690 size = 0;
1691 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1692 if ((mode = apply_result_mode[regno]) != VOIDmode)
1694 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1695 if (size % align != 0)
1696 size = CEIL (size, align) * align;
1697 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1698 emit_move_insn (reg, adjust_address (result, mode, size));
1700 push_to_sequence (call_fusage);
1701 emit_use (reg);
1702 call_fusage = get_insns ();
1703 end_sequence ();
1704 size += GET_MODE_SIZE (mode);
1707 /* Put the USE insns before the return. */
1708 emit_insn (call_fusage);
1710 /* Return whatever values was restored by jumping directly to the end
1711 of the function. */
1712 expand_naked_return ();
1715 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1717 static enum type_class
1718 type_to_class (tree type)
1720 switch (TREE_CODE (type))
1722 case VOID_TYPE: return void_type_class;
1723 case INTEGER_TYPE: return integer_type_class;
1724 case ENUMERAL_TYPE: return enumeral_type_class;
1725 case BOOLEAN_TYPE: return boolean_type_class;
1726 case POINTER_TYPE: return pointer_type_class;
1727 case REFERENCE_TYPE: return reference_type_class;
1728 case OFFSET_TYPE: return offset_type_class;
1729 case REAL_TYPE: return real_type_class;
1730 case COMPLEX_TYPE: return complex_type_class;
1731 case FUNCTION_TYPE: return function_type_class;
1732 case METHOD_TYPE: return method_type_class;
1733 case RECORD_TYPE: return record_type_class;
1734 case UNION_TYPE:
1735 case QUAL_UNION_TYPE: return union_type_class;
1736 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1737 ? string_type_class : array_type_class);
1738 case LANG_TYPE: return lang_type_class;
1739 default: return no_type_class;
1743 /* Expand a call EXP to __builtin_classify_type. */
1745 static rtx
1746 expand_builtin_classify_type (tree exp)
1748 if (call_expr_nargs (exp))
1749 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1750 return GEN_INT (no_type_class);
1753 /* This helper macro, meant to be used in mathfn_built_in below,
1754 determines which among a set of three builtin math functions is
1755 appropriate for a given type mode. The `F' and `L' cases are
1756 automatically generated from the `double' case. */
1757 #define CASE_MATHFN(MATHFN) \
1758 CASE_CFN_##MATHFN: \
1759 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1760 fcodel = BUILT_IN_##MATHFN##L ; break;
1761 /* Similar to above, but appends _R after any F/L suffix. */
1762 #define CASE_MATHFN_REENT(MATHFN) \
1763 case CFN_BUILT_IN_##MATHFN##_R: \
1764 case CFN_BUILT_IN_##MATHFN##F_R: \
1765 case CFN_BUILT_IN_##MATHFN##L_R: \
1766 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1767 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1769 /* Return a function equivalent to FN but operating on floating-point
1770 values of type TYPE, or END_BUILTINS if no such function exists.
1771 This is purely an operation on function codes; it does not guarantee
1772 that the target actually has an implementation of the function. */
1774 static built_in_function
1775 mathfn_built_in_2 (tree type, combined_fn fn)
1777 built_in_function fcode, fcodef, fcodel;
1779 switch (fn)
1781 CASE_MATHFN (ACOS)
1782 CASE_MATHFN (ACOSH)
1783 CASE_MATHFN (ASIN)
1784 CASE_MATHFN (ASINH)
1785 CASE_MATHFN (ATAN)
1786 CASE_MATHFN (ATAN2)
1787 CASE_MATHFN (ATANH)
1788 CASE_MATHFN (CBRT)
1789 CASE_MATHFN (CEIL)
1790 CASE_MATHFN (CEXPI)
1791 CASE_MATHFN (COPYSIGN)
1792 CASE_MATHFN (COS)
1793 CASE_MATHFN (COSH)
1794 CASE_MATHFN (DREM)
1795 CASE_MATHFN (ERF)
1796 CASE_MATHFN (ERFC)
1797 CASE_MATHFN (EXP)
1798 CASE_MATHFN (EXP10)
1799 CASE_MATHFN (EXP2)
1800 CASE_MATHFN (EXPM1)
1801 CASE_MATHFN (FABS)
1802 CASE_MATHFN (FDIM)
1803 CASE_MATHFN (FLOOR)
1804 CASE_MATHFN (FMA)
1805 CASE_MATHFN (FMAX)
1806 CASE_MATHFN (FMIN)
1807 CASE_MATHFN (FMOD)
1808 CASE_MATHFN (FREXP)
1809 CASE_MATHFN (GAMMA)
1810 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1811 CASE_MATHFN (HUGE_VAL)
1812 CASE_MATHFN (HYPOT)
1813 CASE_MATHFN (ILOGB)
1814 CASE_MATHFN (ICEIL)
1815 CASE_MATHFN (IFLOOR)
1816 CASE_MATHFN (INF)
1817 CASE_MATHFN (IRINT)
1818 CASE_MATHFN (IROUND)
1819 CASE_MATHFN (ISINF)
1820 CASE_MATHFN (J0)
1821 CASE_MATHFN (J1)
1822 CASE_MATHFN (JN)
1823 CASE_MATHFN (LCEIL)
1824 CASE_MATHFN (LDEXP)
1825 CASE_MATHFN (LFLOOR)
1826 CASE_MATHFN (LGAMMA)
1827 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1828 CASE_MATHFN (LLCEIL)
1829 CASE_MATHFN (LLFLOOR)
1830 CASE_MATHFN (LLRINT)
1831 CASE_MATHFN (LLROUND)
1832 CASE_MATHFN (LOG)
1833 CASE_MATHFN (LOG10)
1834 CASE_MATHFN (LOG1P)
1835 CASE_MATHFN (LOG2)
1836 CASE_MATHFN (LOGB)
1837 CASE_MATHFN (LRINT)
1838 CASE_MATHFN (LROUND)
1839 CASE_MATHFN (MODF)
1840 CASE_MATHFN (NAN)
1841 CASE_MATHFN (NANS)
1842 CASE_MATHFN (NEARBYINT)
1843 CASE_MATHFN (NEXTAFTER)
1844 CASE_MATHFN (NEXTTOWARD)
1845 CASE_MATHFN (POW)
1846 CASE_MATHFN (POWI)
1847 CASE_MATHFN (POW10)
1848 CASE_MATHFN (REMAINDER)
1849 CASE_MATHFN (REMQUO)
1850 CASE_MATHFN (RINT)
1851 CASE_MATHFN (ROUND)
1852 CASE_MATHFN (SCALB)
1853 CASE_MATHFN (SCALBLN)
1854 CASE_MATHFN (SCALBN)
1855 CASE_MATHFN (SIGNBIT)
1856 CASE_MATHFN (SIGNIFICAND)
1857 CASE_MATHFN (SIN)
1858 CASE_MATHFN (SINCOS)
1859 CASE_MATHFN (SINH)
1860 CASE_MATHFN (SQRT)
1861 CASE_MATHFN (TAN)
1862 CASE_MATHFN (TANH)
1863 CASE_MATHFN (TGAMMA)
1864 CASE_MATHFN (TRUNC)
1865 CASE_MATHFN (Y0)
1866 CASE_MATHFN (Y1)
1867 CASE_MATHFN (YN)
1869 default:
1870 return END_BUILTINS;
1873 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1874 return fcode;
1875 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1876 return fcodef;
1877 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1878 return fcodel;
1879 else
1880 return END_BUILTINS;
1883 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1884 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1885 otherwise use the explicit declaration. If we can't do the conversion,
1886 return null. */
1888 static tree
1889 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
1891 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1892 if (fcode2 == END_BUILTINS)
1893 return NULL_TREE;
1895 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1896 return NULL_TREE;
1898 return builtin_decl_explicit (fcode2);
1901 /* Like mathfn_built_in_1, but always use the implicit array. */
1903 tree
1904 mathfn_built_in (tree type, combined_fn fn)
1906 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1909 /* Like mathfn_built_in_1, but take a built_in_function and
1910 always use the implicit array. */
1912 tree
1913 mathfn_built_in (tree type, enum built_in_function fn)
1915 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
1918 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
1919 return its code, otherwise return IFN_LAST. Note that this function
1920 only tests whether the function is defined in internals.def, not whether
1921 it is actually available on the target. */
1923 internal_fn
1924 associated_internal_fn (tree fndecl)
1926 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
1927 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
1928 switch (DECL_FUNCTION_CODE (fndecl))
1930 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
1931 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1932 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
1933 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1934 #include "internal-fn.def"
1936 CASE_FLT_FN (BUILT_IN_POW10):
1937 return IFN_EXP10;
1939 CASE_FLT_FN (BUILT_IN_DREM):
1940 return IFN_REMAINDER;
1942 CASE_FLT_FN (BUILT_IN_SCALBN):
1943 CASE_FLT_FN (BUILT_IN_SCALBLN):
1944 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
1945 return IFN_LDEXP;
1946 return IFN_LAST;
1948 default:
1949 return IFN_LAST;
1953 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
1954 on the current target by a call to an internal function, return the
1955 code of that internal function, otherwise return IFN_LAST. The caller
1956 is responsible for ensuring that any side-effects of the built-in
1957 call are dealt with correctly. E.g. if CALL sets errno, the caller
1958 must decide that the errno result isn't needed or make it available
1959 in some other way. */
1961 internal_fn
1962 replacement_internal_fn (gcall *call)
1964 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1966 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
1967 if (ifn != IFN_LAST)
1969 tree_pair types = direct_internal_fn_types (ifn, call);
1970 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
1971 if (direct_internal_fn_supported_p (ifn, types, opt_type))
1972 return ifn;
1975 return IFN_LAST;
1978 /* Expand a call to the builtin trinary math functions (fma).
1979 Return NULL_RTX if a normal call should be emitted rather than expanding the
1980 function in-line. EXP is the expression that is a call to the builtin
1981 function; if convenient, the result should be placed in TARGET.
1982 SUBTARGET may be used as the target for computing one of EXP's
1983 operands. */
1985 static rtx
1986 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
1988 optab builtin_optab;
1989 rtx op0, op1, op2, result;
1990 rtx_insn *insns;
1991 tree fndecl = get_callee_fndecl (exp);
1992 tree arg0, arg1, arg2;
1993 machine_mode mode;
1995 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
1996 return NULL_RTX;
1998 arg0 = CALL_EXPR_ARG (exp, 0);
1999 arg1 = CALL_EXPR_ARG (exp, 1);
2000 arg2 = CALL_EXPR_ARG (exp, 2);
2002 switch (DECL_FUNCTION_CODE (fndecl))
2004 CASE_FLT_FN (BUILT_IN_FMA):
2005 builtin_optab = fma_optab; break;
2006 default:
2007 gcc_unreachable ();
2010 /* Make a suitable register to place result in. */
2011 mode = TYPE_MODE (TREE_TYPE (exp));
2013 /* Before working hard, check whether the instruction is available. */
2014 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2015 return NULL_RTX;
2017 result = gen_reg_rtx (mode);
2019 /* Always stabilize the argument list. */
2020 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2021 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2022 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2024 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2025 op1 = expand_normal (arg1);
2026 op2 = expand_normal (arg2);
2028 start_sequence ();
2030 /* Compute into RESULT.
2031 Set RESULT to wherever the result comes back. */
2032 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2033 result, 0);
2035 /* If we were unable to expand via the builtin, stop the sequence
2036 (without outputting the insns) and call to the library function
2037 with the stabilized argument list. */
2038 if (result == 0)
2040 end_sequence ();
2041 return expand_call (exp, target, target == const0_rtx);
2044 /* Output the entire sequence. */
2045 insns = get_insns ();
2046 end_sequence ();
2047 emit_insn (insns);
2049 return result;
2052 /* Expand a call to the builtin sin and cos math functions.
2053 Return NULL_RTX if a normal call should be emitted rather than expanding the
2054 function in-line. EXP is the expression that is a call to the builtin
2055 function; if convenient, the result should be placed in TARGET.
2056 SUBTARGET may be used as the target for computing one of EXP's
2057 operands. */
2059 static rtx
2060 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2062 optab builtin_optab;
2063 rtx op0;
2064 rtx_insn *insns;
2065 tree fndecl = get_callee_fndecl (exp);
2066 machine_mode mode;
2067 tree arg;
2069 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2070 return NULL_RTX;
2072 arg = CALL_EXPR_ARG (exp, 0);
2074 switch (DECL_FUNCTION_CODE (fndecl))
2076 CASE_FLT_FN (BUILT_IN_SIN):
2077 CASE_FLT_FN (BUILT_IN_COS):
2078 builtin_optab = sincos_optab; break;
2079 default:
2080 gcc_unreachable ();
2083 /* Make a suitable register to place result in. */
2084 mode = TYPE_MODE (TREE_TYPE (exp));
2086 /* Check if sincos insn is available, otherwise fallback
2087 to sin or cos insn. */
2088 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2089 switch (DECL_FUNCTION_CODE (fndecl))
2091 CASE_FLT_FN (BUILT_IN_SIN):
2092 builtin_optab = sin_optab; break;
2093 CASE_FLT_FN (BUILT_IN_COS):
2094 builtin_optab = cos_optab; break;
2095 default:
2096 gcc_unreachable ();
2099 /* Before working hard, check whether the instruction is available. */
2100 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2102 rtx result = gen_reg_rtx (mode);
2104 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2105 need to expand the argument again. This way, we will not perform
2106 side-effects more the once. */
2107 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2109 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2111 start_sequence ();
2113 /* Compute into RESULT.
2114 Set RESULT to wherever the result comes back. */
2115 if (builtin_optab == sincos_optab)
2117 int ok;
2119 switch (DECL_FUNCTION_CODE (fndecl))
2121 CASE_FLT_FN (BUILT_IN_SIN):
2122 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2123 break;
2124 CASE_FLT_FN (BUILT_IN_COS):
2125 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2126 break;
2127 default:
2128 gcc_unreachable ();
2130 gcc_assert (ok);
2132 else
2133 result = expand_unop (mode, builtin_optab, op0, result, 0);
2135 if (result != 0)
2137 /* Output the entire sequence. */
2138 insns = get_insns ();
2139 end_sequence ();
2140 emit_insn (insns);
2141 return result;
2144 /* If we were unable to expand via the builtin, stop the sequence
2145 (without outputting the insns) and call to the library function
2146 with the stabilized argument list. */
2147 end_sequence ();
2150 return expand_call (exp, target, target == const0_rtx);
2153 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2154 return an RTL instruction code that implements the functionality.
2155 If that isn't possible or available return CODE_FOR_nothing. */
2157 static enum insn_code
2158 interclass_mathfn_icode (tree arg, tree fndecl)
2160 bool errno_set = false;
2161 optab builtin_optab = unknown_optab;
2162 machine_mode mode;
2164 switch (DECL_FUNCTION_CODE (fndecl))
2166 CASE_FLT_FN (BUILT_IN_ILOGB):
2167 errno_set = true; builtin_optab = ilogb_optab; break;
2168 CASE_FLT_FN (BUILT_IN_ISINF):
2169 builtin_optab = isinf_optab; break;
2170 case BUILT_IN_ISNORMAL:
2171 case BUILT_IN_ISFINITE:
2172 CASE_FLT_FN (BUILT_IN_FINITE):
2173 case BUILT_IN_FINITED32:
2174 case BUILT_IN_FINITED64:
2175 case BUILT_IN_FINITED128:
2176 case BUILT_IN_ISINFD32:
2177 case BUILT_IN_ISINFD64:
2178 case BUILT_IN_ISINFD128:
2179 /* These builtins have no optabs (yet). */
2180 break;
2181 default:
2182 gcc_unreachable ();
2185 /* There's no easy way to detect the case we need to set EDOM. */
2186 if (flag_errno_math && errno_set)
2187 return CODE_FOR_nothing;
2189 /* Optab mode depends on the mode of the input argument. */
2190 mode = TYPE_MODE (TREE_TYPE (arg));
2192 if (builtin_optab)
2193 return optab_handler (builtin_optab, mode);
2194 return CODE_FOR_nothing;
2197 /* Expand a call to one of the builtin math functions that operate on
2198 floating point argument and output an integer result (ilogb, isinf,
2199 isnan, etc).
2200 Return 0 if a normal call should be emitted rather than expanding the
2201 function in-line. EXP is the expression that is a call to the builtin
2202 function; if convenient, the result should be placed in TARGET. */
2204 static rtx
2205 expand_builtin_interclass_mathfn (tree exp, rtx target)
2207 enum insn_code icode = CODE_FOR_nothing;
2208 rtx op0;
2209 tree fndecl = get_callee_fndecl (exp);
2210 machine_mode mode;
2211 tree arg;
2213 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2214 return NULL_RTX;
2216 arg = CALL_EXPR_ARG (exp, 0);
2217 icode = interclass_mathfn_icode (arg, fndecl);
2218 mode = TYPE_MODE (TREE_TYPE (arg));
2220 if (icode != CODE_FOR_nothing)
2222 struct expand_operand ops[1];
2223 rtx_insn *last = get_last_insn ();
2224 tree orig_arg = arg;
2226 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2227 need to expand the argument again. This way, we will not perform
2228 side-effects more the once. */
2229 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2231 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2233 if (mode != GET_MODE (op0))
2234 op0 = convert_to_mode (mode, op0, 0);
2236 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2237 if (maybe_legitimize_operands (icode, 0, 1, ops)
2238 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2239 return ops[0].value;
2241 delete_insns_since (last);
2242 CALL_EXPR_ARG (exp, 0) = orig_arg;
2245 return NULL_RTX;
2248 /* Expand a call to the builtin sincos math function.
2249 Return NULL_RTX if a normal call should be emitted rather than expanding the
2250 function in-line. EXP is the expression that is a call to the builtin
2251 function. */
2253 static rtx
2254 expand_builtin_sincos (tree exp)
2256 rtx op0, op1, op2, target1, target2;
2257 machine_mode mode;
2258 tree arg, sinp, cosp;
2259 int result;
2260 location_t loc = EXPR_LOCATION (exp);
2261 tree alias_type, alias_off;
2263 if (!validate_arglist (exp, REAL_TYPE,
2264 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2265 return NULL_RTX;
2267 arg = CALL_EXPR_ARG (exp, 0);
2268 sinp = CALL_EXPR_ARG (exp, 1);
2269 cosp = CALL_EXPR_ARG (exp, 2);
2271 /* Make a suitable register to place result in. */
2272 mode = TYPE_MODE (TREE_TYPE (arg));
2274 /* Check if sincos insn is available, otherwise emit the call. */
2275 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2276 return NULL_RTX;
2278 target1 = gen_reg_rtx (mode);
2279 target2 = gen_reg_rtx (mode);
2281 op0 = expand_normal (arg);
2282 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2283 alias_off = build_int_cst (alias_type, 0);
2284 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2285 sinp, alias_off));
2286 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2287 cosp, alias_off));
2289 /* Compute into target1 and target2.
2290 Set TARGET to wherever the result comes back. */
2291 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2292 gcc_assert (result);
2294 /* Move target1 and target2 to the memory locations indicated
2295 by op1 and op2. */
2296 emit_move_insn (op1, target1);
2297 emit_move_insn (op2, target2);
2299 return const0_rtx;
2302 /* Expand a call to the internal cexpi builtin to the sincos math function.
2303 EXP is the expression that is a call to the builtin function; if convenient,
2304 the result should be placed in TARGET. */
2306 static rtx
2307 expand_builtin_cexpi (tree exp, rtx target)
2309 tree fndecl = get_callee_fndecl (exp);
2310 tree arg, type;
2311 machine_mode mode;
2312 rtx op0, op1, op2;
2313 location_t loc = EXPR_LOCATION (exp);
2315 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2316 return NULL_RTX;
2318 arg = CALL_EXPR_ARG (exp, 0);
2319 type = TREE_TYPE (arg);
2320 mode = TYPE_MODE (TREE_TYPE (arg));
2322 /* Try expanding via a sincos optab, fall back to emitting a libcall
2323 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2324 is only generated from sincos, cexp or if we have either of them. */
2325 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2327 op1 = gen_reg_rtx (mode);
2328 op2 = gen_reg_rtx (mode);
2330 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2332 /* Compute into op1 and op2. */
2333 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2335 else if (targetm.libc_has_function (function_sincos))
2337 tree call, fn = NULL_TREE;
2338 tree top1, top2;
2339 rtx op1a, op2a;
2341 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2342 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2343 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2344 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2345 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2346 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2347 else
2348 gcc_unreachable ();
2350 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2351 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2352 op1a = copy_addr_to_reg (XEXP (op1, 0));
2353 op2a = copy_addr_to_reg (XEXP (op2, 0));
2354 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2355 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2357 /* Make sure not to fold the sincos call again. */
2358 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2359 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2360 call, 3, arg, top1, top2));
2362 else
2364 tree call, fn = NULL_TREE, narg;
2365 tree ctype = build_complex_type (type);
2367 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2368 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2369 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2370 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2371 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2372 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2373 else
2374 gcc_unreachable ();
2376 /* If we don't have a decl for cexp create one. This is the
2377 friendliest fallback if the user calls __builtin_cexpi
2378 without full target C99 function support. */
2379 if (fn == NULL_TREE)
2381 tree fntype;
2382 const char *name = NULL;
2384 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2385 name = "cexpf";
2386 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2387 name = "cexp";
2388 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2389 name = "cexpl";
2391 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2392 fn = build_fn_decl (name, fntype);
2395 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2396 build_real (type, dconst0), arg);
2398 /* Make sure not to fold the cexp call again. */
2399 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2400 return expand_expr (build_call_nary (ctype, call, 1, narg),
2401 target, VOIDmode, EXPAND_NORMAL);
2404 /* Now build the proper return type. */
2405 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2406 make_tree (TREE_TYPE (arg), op2),
2407 make_tree (TREE_TYPE (arg), op1)),
2408 target, VOIDmode, EXPAND_NORMAL);
2411 /* Conveniently construct a function call expression. FNDECL names the
2412 function to be called, N is the number of arguments, and the "..."
2413 parameters are the argument expressions. Unlike build_call_exr
2414 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2416 static tree
2417 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2419 va_list ap;
2420 tree fntype = TREE_TYPE (fndecl);
2421 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2423 va_start (ap, n);
2424 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2425 va_end (ap);
2426 SET_EXPR_LOCATION (fn, loc);
2427 return fn;
2430 /* Expand a call to one of the builtin rounding functions gcc defines
2431 as an extension (lfloor and lceil). As these are gcc extensions we
2432 do not need to worry about setting errno to EDOM.
2433 If expanding via optab fails, lower expression to (int)(floor(x)).
2434 EXP is the expression that is a call to the builtin function;
2435 if convenient, the result should be placed in TARGET. */
2437 static rtx
2438 expand_builtin_int_roundingfn (tree exp, rtx target)
2440 convert_optab builtin_optab;
2441 rtx op0, tmp;
2442 rtx_insn *insns;
2443 tree fndecl = get_callee_fndecl (exp);
2444 enum built_in_function fallback_fn;
2445 tree fallback_fndecl;
2446 machine_mode mode;
2447 tree arg;
2449 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2450 gcc_unreachable ();
2452 arg = CALL_EXPR_ARG (exp, 0);
2454 switch (DECL_FUNCTION_CODE (fndecl))
2456 CASE_FLT_FN (BUILT_IN_ICEIL):
2457 CASE_FLT_FN (BUILT_IN_LCEIL):
2458 CASE_FLT_FN (BUILT_IN_LLCEIL):
2459 builtin_optab = lceil_optab;
2460 fallback_fn = BUILT_IN_CEIL;
2461 break;
2463 CASE_FLT_FN (BUILT_IN_IFLOOR):
2464 CASE_FLT_FN (BUILT_IN_LFLOOR):
2465 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2466 builtin_optab = lfloor_optab;
2467 fallback_fn = BUILT_IN_FLOOR;
2468 break;
2470 default:
2471 gcc_unreachable ();
2474 /* Make a suitable register to place result in. */
2475 mode = TYPE_MODE (TREE_TYPE (exp));
2477 target = gen_reg_rtx (mode);
2479 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2480 need to expand the argument again. This way, we will not perform
2481 side-effects more the once. */
2482 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2484 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2486 start_sequence ();
2488 /* Compute into TARGET. */
2489 if (expand_sfix_optab (target, op0, builtin_optab))
2491 /* Output the entire sequence. */
2492 insns = get_insns ();
2493 end_sequence ();
2494 emit_insn (insns);
2495 return target;
2498 /* If we were unable to expand via the builtin, stop the sequence
2499 (without outputting the insns). */
2500 end_sequence ();
2502 /* Fall back to floating point rounding optab. */
2503 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2505 /* For non-C99 targets we may end up without a fallback fndecl here
2506 if the user called __builtin_lfloor directly. In this case emit
2507 a call to the floor/ceil variants nevertheless. This should result
2508 in the best user experience for not full C99 targets. */
2509 if (fallback_fndecl == NULL_TREE)
2511 tree fntype;
2512 const char *name = NULL;
2514 switch (DECL_FUNCTION_CODE (fndecl))
2516 case BUILT_IN_ICEIL:
2517 case BUILT_IN_LCEIL:
2518 case BUILT_IN_LLCEIL:
2519 name = "ceil";
2520 break;
2521 case BUILT_IN_ICEILF:
2522 case BUILT_IN_LCEILF:
2523 case BUILT_IN_LLCEILF:
2524 name = "ceilf";
2525 break;
2526 case BUILT_IN_ICEILL:
2527 case BUILT_IN_LCEILL:
2528 case BUILT_IN_LLCEILL:
2529 name = "ceill";
2530 break;
2531 case BUILT_IN_IFLOOR:
2532 case BUILT_IN_LFLOOR:
2533 case BUILT_IN_LLFLOOR:
2534 name = "floor";
2535 break;
2536 case BUILT_IN_IFLOORF:
2537 case BUILT_IN_LFLOORF:
2538 case BUILT_IN_LLFLOORF:
2539 name = "floorf";
2540 break;
2541 case BUILT_IN_IFLOORL:
2542 case BUILT_IN_LFLOORL:
2543 case BUILT_IN_LLFLOORL:
2544 name = "floorl";
2545 break;
2546 default:
2547 gcc_unreachable ();
2550 fntype = build_function_type_list (TREE_TYPE (arg),
2551 TREE_TYPE (arg), NULL_TREE);
2552 fallback_fndecl = build_fn_decl (name, fntype);
2555 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2557 tmp = expand_normal (exp);
2558 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2560 /* Truncate the result of floating point optab to integer
2561 via expand_fix (). */
2562 target = gen_reg_rtx (mode);
2563 expand_fix (target, tmp, 0);
2565 return target;
2568 /* Expand a call to one of the builtin math functions doing integer
2569 conversion (lrint).
2570 Return 0 if a normal call should be emitted rather than expanding the
2571 function in-line. EXP is the expression that is a call to the builtin
2572 function; if convenient, the result should be placed in TARGET. */
2574 static rtx
2575 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2577 convert_optab builtin_optab;
2578 rtx op0;
2579 rtx_insn *insns;
2580 tree fndecl = get_callee_fndecl (exp);
2581 tree arg;
2582 machine_mode mode;
2583 enum built_in_function fallback_fn = BUILT_IN_NONE;
2585 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2586 gcc_unreachable ();
2588 arg = CALL_EXPR_ARG (exp, 0);
2590 switch (DECL_FUNCTION_CODE (fndecl))
2592 CASE_FLT_FN (BUILT_IN_IRINT):
2593 fallback_fn = BUILT_IN_LRINT;
2594 /* FALLTHRU */
2595 CASE_FLT_FN (BUILT_IN_LRINT):
2596 CASE_FLT_FN (BUILT_IN_LLRINT):
2597 builtin_optab = lrint_optab;
2598 break;
2600 CASE_FLT_FN (BUILT_IN_IROUND):
2601 fallback_fn = BUILT_IN_LROUND;
2602 /* FALLTHRU */
2603 CASE_FLT_FN (BUILT_IN_LROUND):
2604 CASE_FLT_FN (BUILT_IN_LLROUND):
2605 builtin_optab = lround_optab;
2606 break;
2608 default:
2609 gcc_unreachable ();
2612 /* There's no easy way to detect the case we need to set EDOM. */
2613 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2614 return NULL_RTX;
2616 /* Make a suitable register to place result in. */
2617 mode = TYPE_MODE (TREE_TYPE (exp));
2619 /* There's no easy way to detect the case we need to set EDOM. */
2620 if (!flag_errno_math)
2622 rtx result = gen_reg_rtx (mode);
2624 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2625 need to expand the argument again. This way, we will not perform
2626 side-effects more the once. */
2627 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2629 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2631 start_sequence ();
2633 if (expand_sfix_optab (result, op0, builtin_optab))
2635 /* Output the entire sequence. */
2636 insns = get_insns ();
2637 end_sequence ();
2638 emit_insn (insns);
2639 return result;
2642 /* If we were unable to expand via the builtin, stop the sequence
2643 (without outputting the insns) and call to the library function
2644 with the stabilized argument list. */
2645 end_sequence ();
2648 if (fallback_fn != BUILT_IN_NONE)
2650 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2651 targets, (int) round (x) should never be transformed into
2652 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2653 a call to lround in the hope that the target provides at least some
2654 C99 functions. This should result in the best user experience for
2655 not full C99 targets. */
2656 tree fallback_fndecl = mathfn_built_in_1
2657 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2659 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2660 fallback_fndecl, 1, arg);
2662 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2663 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2664 return convert_to_mode (mode, target, 0);
2667 return expand_call (exp, target, target == const0_rtx);
2670 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2671 a normal call should be emitted rather than expanding the function
2672 in-line. EXP is the expression that is a call to the builtin
2673 function; if convenient, the result should be placed in TARGET. */
2675 static rtx
2676 expand_builtin_powi (tree exp, rtx target)
2678 tree arg0, arg1;
2679 rtx op0, op1;
2680 machine_mode mode;
2681 machine_mode mode2;
2683 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2684 return NULL_RTX;
2686 arg0 = CALL_EXPR_ARG (exp, 0);
2687 arg1 = CALL_EXPR_ARG (exp, 1);
2688 mode = TYPE_MODE (TREE_TYPE (exp));
2690 /* Emit a libcall to libgcc. */
2692 /* Mode of the 2nd argument must match that of an int. */
2693 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2695 if (target == NULL_RTX)
2696 target = gen_reg_rtx (mode);
2698 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2699 if (GET_MODE (op0) != mode)
2700 op0 = convert_to_mode (mode, op0, 0);
2701 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2702 if (GET_MODE (op1) != mode2)
2703 op1 = convert_to_mode (mode2, op1, 0);
2705 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2706 target, LCT_CONST, mode, 2,
2707 op0, mode, op1, mode2);
2709 return target;
2712 /* Expand expression EXP which is a call to the strlen builtin. Return
2713 NULL_RTX if we failed the caller should emit a normal call, otherwise
2714 try to get the result in TARGET, if convenient. */
2716 static rtx
2717 expand_builtin_strlen (tree exp, rtx target,
2718 machine_mode target_mode)
2720 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2721 return NULL_RTX;
2722 else
2724 struct expand_operand ops[4];
2725 rtx pat;
2726 tree len;
2727 tree src = CALL_EXPR_ARG (exp, 0);
2728 rtx src_reg;
2729 rtx_insn *before_strlen;
2730 machine_mode insn_mode = target_mode;
2731 enum insn_code icode = CODE_FOR_nothing;
2732 unsigned int align;
2734 /* If the length can be computed at compile-time, return it. */
2735 len = c_strlen (src, 0);
2736 if (len)
2737 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2739 /* If the length can be computed at compile-time and is constant
2740 integer, but there are side-effects in src, evaluate
2741 src for side-effects, then return len.
2742 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2743 can be optimized into: i++; x = 3; */
2744 len = c_strlen (src, 1);
2745 if (len && TREE_CODE (len) == INTEGER_CST)
2747 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2748 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2751 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2753 /* If SRC is not a pointer type, don't do this operation inline. */
2754 if (align == 0)
2755 return NULL_RTX;
2757 /* Bail out if we can't compute strlen in the right mode. */
2758 while (insn_mode != VOIDmode)
2760 icode = optab_handler (strlen_optab, insn_mode);
2761 if (icode != CODE_FOR_nothing)
2762 break;
2764 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2766 if (insn_mode == VOIDmode)
2767 return NULL_RTX;
2769 /* Make a place to hold the source address. We will not expand
2770 the actual source until we are sure that the expansion will
2771 not fail -- there are trees that cannot be expanded twice. */
2772 src_reg = gen_reg_rtx (Pmode);
2774 /* Mark the beginning of the strlen sequence so we can emit the
2775 source operand later. */
2776 before_strlen = get_last_insn ();
2778 create_output_operand (&ops[0], target, insn_mode);
2779 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2780 create_integer_operand (&ops[2], 0);
2781 create_integer_operand (&ops[3], align);
2782 if (!maybe_expand_insn (icode, 4, ops))
2783 return NULL_RTX;
2785 /* Now that we are assured of success, expand the source. */
2786 start_sequence ();
2787 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2788 if (pat != src_reg)
2790 #ifdef POINTERS_EXTEND_UNSIGNED
2791 if (GET_MODE (pat) != Pmode)
2792 pat = convert_to_mode (Pmode, pat,
2793 POINTERS_EXTEND_UNSIGNED);
2794 #endif
2795 emit_move_insn (src_reg, pat);
2797 pat = get_insns ();
2798 end_sequence ();
2800 if (before_strlen)
2801 emit_insn_after (pat, before_strlen);
2802 else
2803 emit_insn_before (pat, get_insns ());
2805 /* Return the value in the proper mode for this function. */
2806 if (GET_MODE (ops[0].value) == target_mode)
2807 target = ops[0].value;
2808 else if (target != 0)
2809 convert_move (target, ops[0].value, 0);
2810 else
2811 target = convert_to_mode (target_mode, ops[0].value, 0);
2813 return target;
2817 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2818 bytes from constant string DATA + OFFSET and return it as target
2819 constant. */
2821 static rtx
2822 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2823 machine_mode mode)
2825 const char *str = (const char *) data;
2827 gcc_assert (offset >= 0
2828 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2829 <= strlen (str) + 1));
2831 return c_readstr (str + offset, mode);
2834 /* LEN specify length of the block of memcpy/memset operation.
2835 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2836 In some cases we can make very likely guess on max size, then we
2837 set it into PROBABLE_MAX_SIZE. */
2839 static void
2840 determine_block_size (tree len, rtx len_rtx,
2841 unsigned HOST_WIDE_INT *min_size,
2842 unsigned HOST_WIDE_INT *max_size,
2843 unsigned HOST_WIDE_INT *probable_max_size)
2845 if (CONST_INT_P (len_rtx))
2847 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
2848 return;
2850 else
2852 wide_int min, max;
2853 enum value_range_type range_type = VR_UNDEFINED;
2855 /* Determine bounds from the type. */
2856 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2857 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2858 else
2859 *min_size = 0;
2860 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2861 *probable_max_size = *max_size
2862 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
2863 else
2864 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2866 if (TREE_CODE (len) == SSA_NAME)
2867 range_type = get_range_info (len, &min, &max);
2868 if (range_type == VR_RANGE)
2870 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
2871 *min_size = min.to_uhwi ();
2872 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
2873 *probable_max_size = *max_size = max.to_uhwi ();
2875 else if (range_type == VR_ANTI_RANGE)
2877 /* Anti range 0...N lets us to determine minimal size to N+1. */
2878 if (min == 0)
2880 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2881 *min_size = max.to_uhwi () + 1;
2883 /* Code like
2885 int n;
2886 if (n < 100)
2887 memcpy (a, b, n)
2889 Produce anti range allowing negative values of N. We still
2890 can use the information and make a guess that N is not negative.
2892 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
2893 *probable_max_size = min.to_uhwi () - 1;
2896 gcc_checking_assert (*max_size <=
2897 (unsigned HOST_WIDE_INT)
2898 GET_MODE_MASK (GET_MODE (len_rtx)));
2901 /* Helper function to do the actual work for expand_builtin_memcpy. */
2903 static rtx
2904 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
2906 const char *src_str;
2907 unsigned int src_align = get_pointer_alignment (src);
2908 unsigned int dest_align = get_pointer_alignment (dest);
2909 rtx dest_mem, src_mem, dest_addr, len_rtx;
2910 HOST_WIDE_INT expected_size = -1;
2911 unsigned int expected_align = 0;
2912 unsigned HOST_WIDE_INT min_size;
2913 unsigned HOST_WIDE_INT max_size;
2914 unsigned HOST_WIDE_INT probable_max_size;
2916 /* If DEST is not a pointer type, call the normal function. */
2917 if (dest_align == 0)
2918 return NULL_RTX;
2920 /* If either SRC is not a pointer type, don't do this
2921 operation in-line. */
2922 if (src_align == 0)
2923 return NULL_RTX;
2925 if (currently_expanding_gimple_stmt)
2926 stringop_block_profile (currently_expanding_gimple_stmt,
2927 &expected_align, &expected_size);
2929 if (expected_align < dest_align)
2930 expected_align = dest_align;
2931 dest_mem = get_memory_rtx (dest, len);
2932 set_mem_align (dest_mem, dest_align);
2933 len_rtx = expand_normal (len);
2934 determine_block_size (len, len_rtx, &min_size, &max_size,
2935 &probable_max_size);
2936 src_str = c_getstr (src);
2938 /* If SRC is a string constant and block move would be done
2939 by pieces, we can avoid loading the string from memory
2940 and only stored the computed constants. */
2941 if (src_str
2942 && CONST_INT_P (len_rtx)
2943 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2944 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2945 CONST_CAST (char *, src_str),
2946 dest_align, false))
2948 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2949 builtin_memcpy_read_str,
2950 CONST_CAST (char *, src_str),
2951 dest_align, false, 0);
2952 dest_mem = force_operand (XEXP (dest_mem, 0), target);
2953 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2954 return dest_mem;
2957 src_mem = get_memory_rtx (src, len);
2958 set_mem_align (src_mem, src_align);
2960 /* Copy word part most expediently. */
2961 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
2962 CALL_EXPR_TAILCALL (exp)
2963 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
2964 expected_align, expected_size,
2965 min_size, max_size, probable_max_size);
2967 if (dest_addr == 0)
2969 dest_addr = force_operand (XEXP (dest_mem, 0), target);
2970 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2973 return dest_addr;
2976 /* Expand a call EXP to the memcpy builtin.
2977 Return NULL_RTX if we failed, the caller should emit a normal call,
2978 otherwise try to get the result in TARGET, if convenient (and in
2979 mode MODE if that's convenient). */
2981 static rtx
2982 expand_builtin_memcpy (tree exp, rtx target)
2984 if (!validate_arglist (exp,
2985 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2986 return NULL_RTX;
2987 else
2989 tree dest = CALL_EXPR_ARG (exp, 0);
2990 tree src = CALL_EXPR_ARG (exp, 1);
2991 tree len = CALL_EXPR_ARG (exp, 2);
2992 return expand_builtin_memcpy_args (dest, src, len, target, exp);
2996 /* Expand an instrumented call EXP to the memcpy builtin.
2997 Return NULL_RTX if we failed, the caller should emit a normal call,
2998 otherwise try to get the result in TARGET, if convenient (and in
2999 mode MODE if that's convenient). */
3001 static rtx
3002 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3004 if (!validate_arglist (exp,
3005 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3006 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3007 INTEGER_TYPE, VOID_TYPE))
3008 return NULL_RTX;
3009 else
3011 tree dest = CALL_EXPR_ARG (exp, 0);
3012 tree src = CALL_EXPR_ARG (exp, 2);
3013 tree len = CALL_EXPR_ARG (exp, 4);
3014 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3016 /* Return src bounds with the result. */
3017 if (res)
3019 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3020 expand_normal (CALL_EXPR_ARG (exp, 1)));
3021 res = chkp_join_splitted_slot (res, bnd);
3023 return res;
3027 /* Expand a call EXP to the mempcpy builtin.
3028 Return NULL_RTX if we failed; the caller should emit a normal call,
3029 otherwise try to get the result in TARGET, if convenient (and in
3030 mode MODE if that's convenient). If ENDP is 0 return the
3031 destination pointer, if ENDP is 1 return the end pointer ala
3032 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3033 stpcpy. */
3035 static rtx
3036 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3038 if (!validate_arglist (exp,
3039 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3040 return NULL_RTX;
3041 else
3043 tree dest = CALL_EXPR_ARG (exp, 0);
3044 tree src = CALL_EXPR_ARG (exp, 1);
3045 tree len = CALL_EXPR_ARG (exp, 2);
3046 return expand_builtin_mempcpy_args (dest, src, len,
3047 target, mode, /*endp=*/ 1,
3048 exp);
3052 /* Expand an instrumented call EXP to the mempcpy builtin.
3053 Return NULL_RTX if we failed, the caller should emit a normal call,
3054 otherwise try to get the result in TARGET, if convenient (and in
3055 mode MODE if that's convenient). */
3057 static rtx
3058 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3060 if (!validate_arglist (exp,
3061 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3062 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3063 INTEGER_TYPE, VOID_TYPE))
3064 return NULL_RTX;
3065 else
3067 tree dest = CALL_EXPR_ARG (exp, 0);
3068 tree src = CALL_EXPR_ARG (exp, 2);
3069 tree len = CALL_EXPR_ARG (exp, 4);
3070 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3071 mode, 1, exp);
3073 /* Return src bounds with the result. */
3074 if (res)
3076 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3077 expand_normal (CALL_EXPR_ARG (exp, 1)));
3078 res = chkp_join_splitted_slot (res, bnd);
3080 return res;
3084 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3085 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3086 so that this can also be called without constructing an actual CALL_EXPR.
3087 The other arguments and return value are the same as for
3088 expand_builtin_mempcpy. */
3090 static rtx
3091 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3092 rtx target, machine_mode mode, int endp,
3093 tree orig_exp)
3095 tree fndecl = get_callee_fndecl (orig_exp);
3097 /* If return value is ignored, transform mempcpy into memcpy. */
3098 if (target == const0_rtx
3099 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3100 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3102 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3103 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3104 dest, src, len);
3105 return expand_expr (result, target, mode, EXPAND_NORMAL);
3107 else if (target == const0_rtx
3108 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3110 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3111 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3112 dest, src, len);
3113 return expand_expr (result, target, mode, EXPAND_NORMAL);
3115 else
3117 const char *src_str;
3118 unsigned int src_align = get_pointer_alignment (src);
3119 unsigned int dest_align = get_pointer_alignment (dest);
3120 rtx dest_mem, src_mem, len_rtx;
3122 /* If either SRC or DEST is not a pointer type, don't do this
3123 operation in-line. */
3124 if (dest_align == 0 || src_align == 0)
3125 return NULL_RTX;
3127 /* If LEN is not constant, call the normal function. */
3128 if (! tree_fits_uhwi_p (len))
3129 return NULL_RTX;
3131 len_rtx = expand_normal (len);
3132 src_str = c_getstr (src);
3134 /* If SRC is a string constant and block move would be done
3135 by pieces, we can avoid loading the string from memory
3136 and only stored the computed constants. */
3137 if (src_str
3138 && CONST_INT_P (len_rtx)
3139 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3140 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3141 CONST_CAST (char *, src_str),
3142 dest_align, false))
3144 dest_mem = get_memory_rtx (dest, len);
3145 set_mem_align (dest_mem, dest_align);
3146 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3147 builtin_memcpy_read_str,
3148 CONST_CAST (char *, src_str),
3149 dest_align, false, endp);
3150 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3151 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3152 return dest_mem;
3155 if (CONST_INT_P (len_rtx)
3156 && can_move_by_pieces (INTVAL (len_rtx),
3157 MIN (dest_align, src_align)))
3159 dest_mem = get_memory_rtx (dest, len);
3160 set_mem_align (dest_mem, dest_align);
3161 src_mem = get_memory_rtx (src, len);
3162 set_mem_align (src_mem, src_align);
3163 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3164 MIN (dest_align, src_align), endp);
3165 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3166 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3167 return dest_mem;
3170 return NULL_RTX;
3174 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3175 we failed, the caller should emit a normal call, otherwise try to
3176 get the result in TARGET, if convenient. If ENDP is 0 return the
3177 destination pointer, if ENDP is 1 return the end pointer ala
3178 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3179 stpcpy. */
3181 static rtx
3182 expand_movstr (tree dest, tree src, rtx target, int endp)
3184 struct expand_operand ops[3];
3185 rtx dest_mem;
3186 rtx src_mem;
3188 if (!targetm.have_movstr ())
3189 return NULL_RTX;
3191 dest_mem = get_memory_rtx (dest, NULL);
3192 src_mem = get_memory_rtx (src, NULL);
3193 if (!endp)
3195 target = force_reg (Pmode, XEXP (dest_mem, 0));
3196 dest_mem = replace_equiv_address (dest_mem, target);
3199 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3200 create_fixed_operand (&ops[1], dest_mem);
3201 create_fixed_operand (&ops[2], src_mem);
3202 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3203 return NULL_RTX;
3205 if (endp && target != const0_rtx)
3207 target = ops[0].value;
3208 /* movstr is supposed to set end to the address of the NUL
3209 terminator. If the caller requested a mempcpy-like return value,
3210 adjust it. */
3211 if (endp == 1)
3213 rtx tem = plus_constant (GET_MODE (target),
3214 gen_lowpart (GET_MODE (target), target), 1);
3215 emit_move_insn (target, force_operand (tem, NULL_RTX));
3218 return target;
3221 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3222 NULL_RTX if we failed the caller should emit a normal call, otherwise
3223 try to get the result in TARGET, if convenient (and in mode MODE if that's
3224 convenient). */
3226 static rtx
3227 expand_builtin_strcpy (tree exp, rtx target)
3229 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3231 tree dest = CALL_EXPR_ARG (exp, 0);
3232 tree src = CALL_EXPR_ARG (exp, 1);
3233 return expand_builtin_strcpy_args (dest, src, target);
3235 return NULL_RTX;
3238 /* Helper function to do the actual work for expand_builtin_strcpy. The
3239 arguments to the builtin_strcpy call DEST and SRC are broken out
3240 so that this can also be called without constructing an actual CALL_EXPR.
3241 The other arguments and return value are the same as for
3242 expand_builtin_strcpy. */
3244 static rtx
3245 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3247 return expand_movstr (dest, src, target, /*endp=*/0);
3250 /* Expand a call EXP to the stpcpy builtin.
3251 Return NULL_RTX if we failed the caller should emit a normal call,
3252 otherwise try to get the result in TARGET, if convenient (and in
3253 mode MODE if that's convenient). */
3255 static rtx
3256 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3258 tree dst, src;
3259 location_t loc = EXPR_LOCATION (exp);
3261 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3262 return NULL_RTX;
3264 dst = CALL_EXPR_ARG (exp, 0);
3265 src = CALL_EXPR_ARG (exp, 1);
3267 /* If return value is ignored, transform stpcpy into strcpy. */
3268 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3270 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3271 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3272 return expand_expr (result, target, mode, EXPAND_NORMAL);
3274 else
3276 tree len, lenp1;
3277 rtx ret;
3279 /* Ensure we get an actual string whose length can be evaluated at
3280 compile-time, not an expression containing a string. This is
3281 because the latter will potentially produce pessimized code
3282 when used to produce the return value. */
3283 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3284 return expand_movstr (dst, src, target, /*endp=*/2);
3286 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3287 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3288 target, mode, /*endp=*/2,
3289 exp);
3291 if (ret)
3292 return ret;
3294 if (TREE_CODE (len) == INTEGER_CST)
3296 rtx len_rtx = expand_normal (len);
3298 if (CONST_INT_P (len_rtx))
3300 ret = expand_builtin_strcpy_args (dst, src, target);
3302 if (ret)
3304 if (! target)
3306 if (mode != VOIDmode)
3307 target = gen_reg_rtx (mode);
3308 else
3309 target = gen_reg_rtx (GET_MODE (ret));
3311 if (GET_MODE (target) != GET_MODE (ret))
3312 ret = gen_lowpart (GET_MODE (target), ret);
3314 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3315 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3316 gcc_assert (ret);
3318 return target;
3323 return expand_movstr (dst, src, target, /*endp=*/2);
3327 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3328 bytes from constant string DATA + OFFSET and return it as target
3329 constant. */
3332 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3333 machine_mode mode)
3335 const char *str = (const char *) data;
3337 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3338 return const0_rtx;
3340 return c_readstr (str + offset, mode);
3343 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3344 NULL_RTX if we failed the caller should emit a normal call. */
3346 static rtx
3347 expand_builtin_strncpy (tree exp, rtx target)
3349 location_t loc = EXPR_LOCATION (exp);
3351 if (validate_arglist (exp,
3352 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3354 tree dest = CALL_EXPR_ARG (exp, 0);
3355 tree src = CALL_EXPR_ARG (exp, 1);
3356 tree len = CALL_EXPR_ARG (exp, 2);
3357 tree slen = c_strlen (src, 1);
3359 /* We must be passed a constant len and src parameter. */
3360 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3361 return NULL_RTX;
3363 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3365 /* We're required to pad with trailing zeros if the requested
3366 len is greater than strlen(s2)+1. In that case try to
3367 use store_by_pieces, if it fails, punt. */
3368 if (tree_int_cst_lt (slen, len))
3370 unsigned int dest_align = get_pointer_alignment (dest);
3371 const char *p = c_getstr (src);
3372 rtx dest_mem;
3374 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3375 || !can_store_by_pieces (tree_to_uhwi (len),
3376 builtin_strncpy_read_str,
3377 CONST_CAST (char *, p),
3378 dest_align, false))
3379 return NULL_RTX;
3381 dest_mem = get_memory_rtx (dest, len);
3382 store_by_pieces (dest_mem, tree_to_uhwi (len),
3383 builtin_strncpy_read_str,
3384 CONST_CAST (char *, p), dest_align, false, 0);
3385 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3386 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3387 return dest_mem;
3390 return NULL_RTX;
3393 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3394 bytes from constant string DATA + OFFSET and return it as target
3395 constant. */
3398 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3399 machine_mode mode)
3401 const char *c = (const char *) data;
3402 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3404 memset (p, *c, GET_MODE_SIZE (mode));
3406 return c_readstr (p, mode);
3409 /* Callback routine for store_by_pieces. Return the RTL of a register
3410 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3411 char value given in the RTL register data. For example, if mode is
3412 4 bytes wide, return the RTL for 0x01010101*data. */
3414 static rtx
3415 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3416 machine_mode mode)
3418 rtx target, coeff;
3419 size_t size;
3420 char *p;
3422 size = GET_MODE_SIZE (mode);
3423 if (size == 1)
3424 return (rtx) data;
3426 p = XALLOCAVEC (char, size);
3427 memset (p, 1, size);
3428 coeff = c_readstr (p, mode);
3430 target = convert_to_mode (mode, (rtx) data, 1);
3431 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3432 return force_reg (mode, target);
3435 /* Expand expression EXP, which is a call to the memset builtin. Return
3436 NULL_RTX if we failed the caller should emit a normal call, otherwise
3437 try to get the result in TARGET, if convenient (and in mode MODE if that's
3438 convenient). */
3440 static rtx
3441 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3443 if (!validate_arglist (exp,
3444 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3445 return NULL_RTX;
3446 else
3448 tree dest = CALL_EXPR_ARG (exp, 0);
3449 tree val = CALL_EXPR_ARG (exp, 1);
3450 tree len = CALL_EXPR_ARG (exp, 2);
3451 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3455 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3456 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3457 try to get the result in TARGET, if convenient (and in mode MODE if that's
3458 convenient). */
3460 static rtx
3461 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3463 if (!validate_arglist (exp,
3464 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3465 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3466 return NULL_RTX;
3467 else
3469 tree dest = CALL_EXPR_ARG (exp, 0);
3470 tree val = CALL_EXPR_ARG (exp, 2);
3471 tree len = CALL_EXPR_ARG (exp, 3);
3472 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3474 /* Return src bounds with the result. */
3475 if (res)
3477 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3478 expand_normal (CALL_EXPR_ARG (exp, 1)));
3479 res = chkp_join_splitted_slot (res, bnd);
3481 return res;
3485 /* Helper function to do the actual work for expand_builtin_memset. The
3486 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3487 so that this can also be called without constructing an actual CALL_EXPR.
3488 The other arguments and return value are the same as for
3489 expand_builtin_memset. */
3491 static rtx
3492 expand_builtin_memset_args (tree dest, tree val, tree len,
3493 rtx target, machine_mode mode, tree orig_exp)
3495 tree fndecl, fn;
3496 enum built_in_function fcode;
3497 machine_mode val_mode;
3498 char c;
3499 unsigned int dest_align;
3500 rtx dest_mem, dest_addr, len_rtx;
3501 HOST_WIDE_INT expected_size = -1;
3502 unsigned int expected_align = 0;
3503 unsigned HOST_WIDE_INT min_size;
3504 unsigned HOST_WIDE_INT max_size;
3505 unsigned HOST_WIDE_INT probable_max_size;
3507 dest_align = get_pointer_alignment (dest);
3509 /* If DEST is not a pointer type, don't do this operation in-line. */
3510 if (dest_align == 0)
3511 return NULL_RTX;
3513 if (currently_expanding_gimple_stmt)
3514 stringop_block_profile (currently_expanding_gimple_stmt,
3515 &expected_align, &expected_size);
3517 if (expected_align < dest_align)
3518 expected_align = dest_align;
3520 /* If the LEN parameter is zero, return DEST. */
3521 if (integer_zerop (len))
3523 /* Evaluate and ignore VAL in case it has side-effects. */
3524 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3525 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3528 /* Stabilize the arguments in case we fail. */
3529 dest = builtin_save_expr (dest);
3530 val = builtin_save_expr (val);
3531 len = builtin_save_expr (len);
3533 len_rtx = expand_normal (len);
3534 determine_block_size (len, len_rtx, &min_size, &max_size,
3535 &probable_max_size);
3536 dest_mem = get_memory_rtx (dest, len);
3537 val_mode = TYPE_MODE (unsigned_char_type_node);
3539 if (TREE_CODE (val) != INTEGER_CST)
3541 rtx val_rtx;
3543 val_rtx = expand_normal (val);
3544 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3546 /* Assume that we can memset by pieces if we can store
3547 * the coefficients by pieces (in the required modes).
3548 * We can't pass builtin_memset_gen_str as that emits RTL. */
3549 c = 1;
3550 if (tree_fits_uhwi_p (len)
3551 && can_store_by_pieces (tree_to_uhwi (len),
3552 builtin_memset_read_str, &c, dest_align,
3553 true))
3555 val_rtx = force_reg (val_mode, val_rtx);
3556 store_by_pieces (dest_mem, tree_to_uhwi (len),
3557 builtin_memset_gen_str, val_rtx, dest_align,
3558 true, 0);
3560 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3561 dest_align, expected_align,
3562 expected_size, min_size, max_size,
3563 probable_max_size))
3564 goto do_libcall;
3566 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3567 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3568 return dest_mem;
3571 if (target_char_cast (val, &c))
3572 goto do_libcall;
3574 if (c)
3576 if (tree_fits_uhwi_p (len)
3577 && can_store_by_pieces (tree_to_uhwi (len),
3578 builtin_memset_read_str, &c, dest_align,
3579 true))
3580 store_by_pieces (dest_mem, tree_to_uhwi (len),
3581 builtin_memset_read_str, &c, dest_align, true, 0);
3582 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3583 gen_int_mode (c, val_mode),
3584 dest_align, expected_align,
3585 expected_size, min_size, max_size,
3586 probable_max_size))
3587 goto do_libcall;
3589 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3590 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3591 return dest_mem;
3594 set_mem_align (dest_mem, dest_align);
3595 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3596 CALL_EXPR_TAILCALL (orig_exp)
3597 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3598 expected_align, expected_size,
3599 min_size, max_size,
3600 probable_max_size);
3602 if (dest_addr == 0)
3604 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3605 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3608 return dest_addr;
3610 do_libcall:
3611 fndecl = get_callee_fndecl (orig_exp);
3612 fcode = DECL_FUNCTION_CODE (fndecl);
3613 if (fcode == BUILT_IN_MEMSET
3614 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3615 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3616 dest, val, len);
3617 else if (fcode == BUILT_IN_BZERO)
3618 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3619 dest, len);
3620 else
3621 gcc_unreachable ();
3622 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3623 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3624 return expand_call (fn, target, target == const0_rtx);
3627 /* Expand expression EXP, which is a call to the bzero builtin. Return
3628 NULL_RTX if we failed the caller should emit a normal call. */
3630 static rtx
3631 expand_builtin_bzero (tree exp)
3633 tree dest, size;
3634 location_t loc = EXPR_LOCATION (exp);
3636 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3637 return NULL_RTX;
3639 dest = CALL_EXPR_ARG (exp, 0);
3640 size = CALL_EXPR_ARG (exp, 1);
3642 /* New argument list transforming bzero(ptr x, int y) to
3643 memset(ptr x, int 0, size_t y). This is done this way
3644 so that if it isn't expanded inline, we fallback to
3645 calling bzero instead of memset. */
3647 return expand_builtin_memset_args (dest, integer_zero_node,
3648 fold_convert_loc (loc,
3649 size_type_node, size),
3650 const0_rtx, VOIDmode, exp);
3653 /* Try to expand cmpstr operation ICODE with the given operands.
3654 Return the result rtx on success, otherwise return null. */
3656 static rtx
3657 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
3658 HOST_WIDE_INT align)
3660 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3662 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3663 target = NULL_RTX;
3665 struct expand_operand ops[4];
3666 create_output_operand (&ops[0], target, insn_mode);
3667 create_fixed_operand (&ops[1], arg1_rtx);
3668 create_fixed_operand (&ops[2], arg2_rtx);
3669 create_integer_operand (&ops[3], align);
3670 if (maybe_expand_insn (icode, 4, ops))
3671 return ops[0].value;
3672 return NULL_RTX;
3675 /* Expand expression EXP, which is a call to the memcmp built-in function.
3676 Return NULL_RTX if we failed and the caller should emit a normal call,
3677 otherwise try to get the result in TARGET, if convenient.
3678 RESULT_EQ is true if we can relax the returned value to be either zero
3679 or nonzero, without caring about the sign. */
3681 static rtx
3682 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
3684 if (!validate_arglist (exp,
3685 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3686 return NULL_RTX;
3688 tree arg1 = CALL_EXPR_ARG (exp, 0);
3689 tree arg2 = CALL_EXPR_ARG (exp, 1);
3690 tree len = CALL_EXPR_ARG (exp, 2);
3691 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3692 location_t loc = EXPR_LOCATION (exp);
3694 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3695 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3697 /* If we don't have POINTER_TYPE, call the function. */
3698 if (arg1_align == 0 || arg2_align == 0)
3699 return NULL_RTX;
3701 rtx arg1_rtx = get_memory_rtx (arg1, len);
3702 rtx arg2_rtx = get_memory_rtx (arg2, len);
3703 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3705 /* Set MEM_SIZE as appropriate. */
3706 if (CONST_INT_P (len_rtx))
3708 set_mem_size (arg1_rtx, INTVAL (len_rtx));
3709 set_mem_size (arg2_rtx, INTVAL (len_rtx));
3712 by_pieces_constfn constfn = NULL;
3714 const char *src_str = c_getstr (arg1);
3715 if (src_str == NULL)
3716 src_str = c_getstr (arg2);
3717 else
3718 std::swap (arg1_rtx, arg2_rtx);
3720 /* If SRC is a string constant and block move would be done
3721 by pieces, we can avoid loading the string from memory
3722 and only stored the computed constants. */
3723 if (src_str
3724 && CONST_INT_P (len_rtx)
3725 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
3726 constfn = builtin_memcpy_read_str;
3728 rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
3729 TREE_TYPE (len), target,
3730 result_eq, constfn,
3731 CONST_CAST (char *, src_str));
3733 if (result)
3735 /* Return the value in the proper mode for this function. */
3736 if (GET_MODE (result) == mode)
3737 return result;
3739 if (target != 0)
3741 convert_move (target, result, 0);
3742 return target;
3745 return convert_to_mode (mode, result, 0);
3748 return NULL_RTX;
3751 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3752 if we failed the caller should emit a normal call, otherwise try to get
3753 the result in TARGET, if convenient. */
3755 static rtx
3756 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3758 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3759 return NULL_RTX;
3761 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
3762 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
3763 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
3765 rtx arg1_rtx, arg2_rtx;
3766 tree fndecl, fn;
3767 tree arg1 = CALL_EXPR_ARG (exp, 0);
3768 tree arg2 = CALL_EXPR_ARG (exp, 1);
3769 rtx result = NULL_RTX;
3771 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3772 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3774 /* If we don't have POINTER_TYPE, call the function. */
3775 if (arg1_align == 0 || arg2_align == 0)
3776 return NULL_RTX;
3778 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3779 arg1 = builtin_save_expr (arg1);
3780 arg2 = builtin_save_expr (arg2);
3782 arg1_rtx = get_memory_rtx (arg1, NULL);
3783 arg2_rtx = get_memory_rtx (arg2, NULL);
3785 /* Try to call cmpstrsi. */
3786 if (cmpstr_icode != CODE_FOR_nothing)
3787 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
3788 MIN (arg1_align, arg2_align));
3790 /* Try to determine at least one length and call cmpstrnsi. */
3791 if (!result && cmpstrn_icode != CODE_FOR_nothing)
3793 tree len;
3794 rtx arg3_rtx;
3796 tree len1 = c_strlen (arg1, 1);
3797 tree len2 = c_strlen (arg2, 1);
3799 if (len1)
3800 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3801 if (len2)
3802 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3804 /* If we don't have a constant length for the first, use the length
3805 of the second, if we know it. We don't require a constant for
3806 this case; some cost analysis could be done if both are available
3807 but neither is constant. For now, assume they're equally cheap,
3808 unless one has side effects. If both strings have constant lengths,
3809 use the smaller. */
3811 if (!len1)
3812 len = len2;
3813 else if (!len2)
3814 len = len1;
3815 else if (TREE_SIDE_EFFECTS (len1))
3816 len = len2;
3817 else if (TREE_SIDE_EFFECTS (len2))
3818 len = len1;
3819 else if (TREE_CODE (len1) != INTEGER_CST)
3820 len = len2;
3821 else if (TREE_CODE (len2) != INTEGER_CST)
3822 len = len1;
3823 else if (tree_int_cst_lt (len1, len2))
3824 len = len1;
3825 else
3826 len = len2;
3828 /* If both arguments have side effects, we cannot optimize. */
3829 if (len && !TREE_SIDE_EFFECTS (len))
3831 arg3_rtx = expand_normal (len);
3832 result = expand_cmpstrn_or_cmpmem
3833 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
3834 arg3_rtx, MIN (arg1_align, arg2_align));
3838 if (result)
3840 /* Return the value in the proper mode for this function. */
3841 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3842 if (GET_MODE (result) == mode)
3843 return result;
3844 if (target == 0)
3845 return convert_to_mode (mode, result, 0);
3846 convert_move (target, result, 0);
3847 return target;
3850 /* Expand the library call ourselves using a stabilized argument
3851 list to avoid re-evaluating the function's arguments twice. */
3852 fndecl = get_callee_fndecl (exp);
3853 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3854 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3855 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3856 return expand_call (fn, target, target == const0_rtx);
3858 return NULL_RTX;
3861 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3862 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3863 the result in TARGET, if convenient. */
3865 static rtx
3866 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3867 ATTRIBUTE_UNUSED machine_mode mode)
3869 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3871 if (!validate_arglist (exp,
3872 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3873 return NULL_RTX;
3875 /* If c_strlen can determine an expression for one of the string
3876 lengths, and it doesn't have side effects, then emit cmpstrnsi
3877 using length MIN(strlen(string)+1, arg3). */
3878 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
3879 if (cmpstrn_icode != CODE_FOR_nothing)
3881 tree len, len1, len2;
3882 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3883 rtx result;
3884 tree fndecl, fn;
3885 tree arg1 = CALL_EXPR_ARG (exp, 0);
3886 tree arg2 = CALL_EXPR_ARG (exp, 1);
3887 tree arg3 = CALL_EXPR_ARG (exp, 2);
3889 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3890 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3892 len1 = c_strlen (arg1, 1);
3893 len2 = c_strlen (arg2, 1);
3895 if (len1)
3896 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
3897 if (len2)
3898 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
3900 /* If we don't have a constant length for the first, use the length
3901 of the second, if we know it. We don't require a constant for
3902 this case; some cost analysis could be done if both are available
3903 but neither is constant. For now, assume they're equally cheap,
3904 unless one has side effects. If both strings have constant lengths,
3905 use the smaller. */
3907 if (!len1)
3908 len = len2;
3909 else if (!len2)
3910 len = len1;
3911 else if (TREE_SIDE_EFFECTS (len1))
3912 len = len2;
3913 else if (TREE_SIDE_EFFECTS (len2))
3914 len = len1;
3915 else if (TREE_CODE (len1) != INTEGER_CST)
3916 len = len2;
3917 else if (TREE_CODE (len2) != INTEGER_CST)
3918 len = len1;
3919 else if (tree_int_cst_lt (len1, len2))
3920 len = len1;
3921 else
3922 len = len2;
3924 /* If both arguments have side effects, we cannot optimize. */
3925 if (!len || TREE_SIDE_EFFECTS (len))
3926 return NULL_RTX;
3928 /* The actual new length parameter is MIN(len,arg3). */
3929 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
3930 fold_convert_loc (loc, TREE_TYPE (len), arg3));
3932 /* If we don't have POINTER_TYPE, call the function. */
3933 if (arg1_align == 0 || arg2_align == 0)
3934 return NULL_RTX;
3936 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
3937 arg1 = builtin_save_expr (arg1);
3938 arg2 = builtin_save_expr (arg2);
3939 len = builtin_save_expr (len);
3941 arg1_rtx = get_memory_rtx (arg1, len);
3942 arg2_rtx = get_memory_rtx (arg2, len);
3943 arg3_rtx = expand_normal (len);
3944 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
3945 arg2_rtx, TREE_TYPE (len), arg3_rtx,
3946 MIN (arg1_align, arg2_align));
3947 if (result)
3949 /* Return the value in the proper mode for this function. */
3950 mode = TYPE_MODE (TREE_TYPE (exp));
3951 if (GET_MODE (result) == mode)
3952 return result;
3953 if (target == 0)
3954 return convert_to_mode (mode, result, 0);
3955 convert_move (target, result, 0);
3956 return target;
3959 /* Expand the library call ourselves using a stabilized argument
3960 list to avoid re-evaluating the function's arguments twice. */
3961 fndecl = get_callee_fndecl (exp);
3962 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
3963 arg1, arg2, len);
3964 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3965 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3966 return expand_call (fn, target, target == const0_rtx);
3968 return NULL_RTX;
3971 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
3972 if that's convenient. */
3975 expand_builtin_saveregs (void)
3977 rtx val;
3978 rtx_insn *seq;
3980 /* Don't do __builtin_saveregs more than once in a function.
3981 Save the result of the first call and reuse it. */
3982 if (saveregs_value != 0)
3983 return saveregs_value;
3985 /* When this function is called, it means that registers must be
3986 saved on entry to this function. So we migrate the call to the
3987 first insn of this function. */
3989 start_sequence ();
3991 /* Do whatever the machine needs done in this case. */
3992 val = targetm.calls.expand_builtin_saveregs ();
3994 seq = get_insns ();
3995 end_sequence ();
3997 saveregs_value = val;
3999 /* Put the insns after the NOTE that starts the function. If this
4000 is inside a start_sequence, make the outer-level insn chain current, so
4001 the code is placed at the start of the function. */
4002 push_topmost_sequence ();
4003 emit_insn_after (seq, entry_of_function ());
4004 pop_topmost_sequence ();
4006 return val;
4009 /* Expand a call to __builtin_next_arg. */
4011 static rtx
4012 expand_builtin_next_arg (void)
4014 /* Checking arguments is already done in fold_builtin_next_arg
4015 that must be called before this function. */
4016 return expand_binop (ptr_mode, add_optab,
4017 crtl->args.internal_arg_pointer,
4018 crtl->args.arg_offset_rtx,
4019 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4022 /* Make it easier for the backends by protecting the valist argument
4023 from multiple evaluations. */
4025 static tree
4026 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4028 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4030 /* The current way of determining the type of valist is completely
4031 bogus. We should have the information on the va builtin instead. */
4032 if (!vatype)
4033 vatype = targetm.fn_abi_va_list (cfun->decl);
4035 if (TREE_CODE (vatype) == ARRAY_TYPE)
4037 if (TREE_SIDE_EFFECTS (valist))
4038 valist = save_expr (valist);
4040 /* For this case, the backends will be expecting a pointer to
4041 vatype, but it's possible we've actually been given an array
4042 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4043 So fix it. */
4044 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4046 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4047 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4050 else
4052 tree pt = build_pointer_type (vatype);
4054 if (! needs_lvalue)
4056 if (! TREE_SIDE_EFFECTS (valist))
4057 return valist;
4059 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4060 TREE_SIDE_EFFECTS (valist) = 1;
4063 if (TREE_SIDE_EFFECTS (valist))
4064 valist = save_expr (valist);
4065 valist = fold_build2_loc (loc, MEM_REF,
4066 vatype, valist, build_int_cst (pt, 0));
4069 return valist;
4072 /* The "standard" definition of va_list is void*. */
4074 tree
4075 std_build_builtin_va_list (void)
4077 return ptr_type_node;
4080 /* The "standard" abi va_list is va_list_type_node. */
4082 tree
4083 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4085 return va_list_type_node;
4088 /* The "standard" type of va_list is va_list_type_node. */
4090 tree
4091 std_canonical_va_list_type (tree type)
4093 tree wtype, htype;
4095 if (INDIRECT_REF_P (type))
4096 type = TREE_TYPE (type);
4097 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4098 type = TREE_TYPE (type);
4099 wtype = va_list_type_node;
4100 htype = type;
4101 /* Treat structure va_list types. */
4102 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4103 htype = TREE_TYPE (htype);
4104 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4106 /* If va_list is an array type, the argument may have decayed
4107 to a pointer type, e.g. by being passed to another function.
4108 In that case, unwrap both types so that we can compare the
4109 underlying records. */
4110 if (TREE_CODE (htype) == ARRAY_TYPE
4111 || POINTER_TYPE_P (htype))
4113 wtype = TREE_TYPE (wtype);
4114 htype = TREE_TYPE (htype);
4117 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4118 return va_list_type_node;
4120 return NULL_TREE;
4123 /* The "standard" implementation of va_start: just assign `nextarg' to
4124 the variable. */
4126 void
4127 std_expand_builtin_va_start (tree valist, rtx nextarg)
4129 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4130 convert_move (va_r, nextarg, 0);
4132 /* We do not have any valid bounds for the pointer, so
4133 just store zero bounds for it. */
4134 if (chkp_function_instrumented_p (current_function_decl))
4135 chkp_expand_bounds_reset_for_mem (valist,
4136 make_tree (TREE_TYPE (valist),
4137 nextarg));
4140 /* Expand EXP, a call to __builtin_va_start. */
4142 static rtx
4143 expand_builtin_va_start (tree exp)
4145 rtx nextarg;
4146 tree valist;
4147 location_t loc = EXPR_LOCATION (exp);
4149 if (call_expr_nargs (exp) < 2)
4151 error_at (loc, "too few arguments to function %<va_start%>");
4152 return const0_rtx;
4155 if (fold_builtin_next_arg (exp, true))
4156 return const0_rtx;
4158 nextarg = expand_builtin_next_arg ();
4159 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4161 if (targetm.expand_builtin_va_start)
4162 targetm.expand_builtin_va_start (valist, nextarg);
4163 else
4164 std_expand_builtin_va_start (valist, nextarg);
4166 return const0_rtx;
4169 /* Expand EXP, a call to __builtin_va_end. */
4171 static rtx
4172 expand_builtin_va_end (tree exp)
4174 tree valist = CALL_EXPR_ARG (exp, 0);
4176 /* Evaluate for side effects, if needed. I hate macros that don't
4177 do that. */
4178 if (TREE_SIDE_EFFECTS (valist))
4179 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4181 return const0_rtx;
4184 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4185 builtin rather than just as an assignment in stdarg.h because of the
4186 nastiness of array-type va_list types. */
4188 static rtx
4189 expand_builtin_va_copy (tree exp)
4191 tree dst, src, t;
4192 location_t loc = EXPR_LOCATION (exp);
4194 dst = CALL_EXPR_ARG (exp, 0);
4195 src = CALL_EXPR_ARG (exp, 1);
4197 dst = stabilize_va_list_loc (loc, dst, 1);
4198 src = stabilize_va_list_loc (loc, src, 0);
4200 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4202 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4204 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4205 TREE_SIDE_EFFECTS (t) = 1;
4206 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4208 else
4210 rtx dstb, srcb, size;
4212 /* Evaluate to pointers. */
4213 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4214 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4215 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4216 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4218 dstb = convert_memory_address (Pmode, dstb);
4219 srcb = convert_memory_address (Pmode, srcb);
4221 /* "Dereference" to BLKmode memories. */
4222 dstb = gen_rtx_MEM (BLKmode, dstb);
4223 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4224 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4225 srcb = gen_rtx_MEM (BLKmode, srcb);
4226 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4227 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4229 /* Copy. */
4230 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4233 return const0_rtx;
4236 /* Expand a call to one of the builtin functions __builtin_frame_address or
4237 __builtin_return_address. */
4239 static rtx
4240 expand_builtin_frame_address (tree fndecl, tree exp)
4242 /* The argument must be a nonnegative integer constant.
4243 It counts the number of frames to scan up the stack.
4244 The value is either the frame pointer value or the return
4245 address saved in that frame. */
4246 if (call_expr_nargs (exp) == 0)
4247 /* Warning about missing arg was already issued. */
4248 return const0_rtx;
4249 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4251 error ("invalid argument to %qD", fndecl);
4252 return const0_rtx;
4254 else
4256 /* Number of frames to scan up the stack. */
4257 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4259 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4261 /* Some ports cannot access arbitrary stack frames. */
4262 if (tem == NULL)
4264 warning (0, "unsupported argument to %qD", fndecl);
4265 return const0_rtx;
4268 if (count)
4270 /* Warn since no effort is made to ensure that any frame
4271 beyond the current one exists or can be safely reached. */
4272 warning (OPT_Wframe_address, "calling %qD with "
4273 "a nonzero argument is unsafe", fndecl);
4276 /* For __builtin_frame_address, return what we've got. */
4277 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4278 return tem;
4280 if (!REG_P (tem)
4281 && ! CONSTANT_P (tem))
4282 tem = copy_addr_to_reg (tem);
4283 return tem;
4287 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4288 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4289 is the same as for allocate_dynamic_stack_space. */
4291 static rtx
4292 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4294 rtx op0;
4295 rtx result;
4296 bool valid_arglist;
4297 unsigned int align;
4298 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4299 == BUILT_IN_ALLOCA_WITH_ALIGN);
4301 valid_arglist
4302 = (alloca_with_align
4303 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4304 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4306 if (!valid_arglist)
4307 return NULL_RTX;
4309 /* Compute the argument. */
4310 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4312 /* Compute the alignment. */
4313 align = (alloca_with_align
4314 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4315 : BIGGEST_ALIGNMENT);
4317 /* Allocate the desired space. */
4318 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4319 result = convert_memory_address (ptr_mode, result);
4321 return result;
4324 /* Expand a call to bswap builtin in EXP.
4325 Return NULL_RTX if a normal call should be emitted rather than expanding the
4326 function in-line. If convenient, the result should be placed in TARGET.
4327 SUBTARGET may be used as the target for computing one of EXP's operands. */
4329 static rtx
4330 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4331 rtx subtarget)
4333 tree arg;
4334 rtx op0;
4336 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4337 return NULL_RTX;
4339 arg = CALL_EXPR_ARG (exp, 0);
4340 op0 = expand_expr (arg,
4341 subtarget && GET_MODE (subtarget) == target_mode
4342 ? subtarget : NULL_RTX,
4343 target_mode, EXPAND_NORMAL);
4344 if (GET_MODE (op0) != target_mode)
4345 op0 = convert_to_mode (target_mode, op0, 1);
4347 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4349 gcc_assert (target);
4351 return convert_to_mode (target_mode, target, 1);
4354 /* Expand a call to a unary builtin in EXP.
4355 Return NULL_RTX if a normal call should be emitted rather than expanding the
4356 function in-line. If convenient, the result should be placed in TARGET.
4357 SUBTARGET may be used as the target for computing one of EXP's operands. */
4359 static rtx
4360 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4361 rtx subtarget, optab op_optab)
4363 rtx op0;
4365 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4366 return NULL_RTX;
4368 /* Compute the argument. */
4369 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4370 (subtarget
4371 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4372 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4373 VOIDmode, EXPAND_NORMAL);
4374 /* Compute op, into TARGET if possible.
4375 Set TARGET to wherever the result comes back. */
4376 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4377 op_optab, op0, target, op_optab != clrsb_optab);
4378 gcc_assert (target);
4380 return convert_to_mode (target_mode, target, 0);
4383 /* Expand a call to __builtin_expect. We just return our argument
4384 as the builtin_expect semantic should've been already executed by
4385 tree branch prediction pass. */
4387 static rtx
4388 expand_builtin_expect (tree exp, rtx target)
4390 tree arg;
4392 if (call_expr_nargs (exp) < 2)
4393 return const0_rtx;
4394 arg = CALL_EXPR_ARG (exp, 0);
4396 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4397 /* When guessing was done, the hints should be already stripped away. */
4398 gcc_assert (!flag_guess_branch_prob
4399 || optimize == 0 || seen_error ());
4400 return target;
4403 /* Expand a call to __builtin_assume_aligned. We just return our first
4404 argument as the builtin_assume_aligned semantic should've been already
4405 executed by CCP. */
4407 static rtx
4408 expand_builtin_assume_aligned (tree exp, rtx target)
4410 if (call_expr_nargs (exp) < 2)
4411 return const0_rtx;
4412 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4413 EXPAND_NORMAL);
4414 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4415 && (call_expr_nargs (exp) < 3
4416 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4417 return target;
4420 void
4421 expand_builtin_trap (void)
4423 if (targetm.have_trap ())
4425 rtx_insn *insn = emit_insn (targetm.gen_trap ());
4426 /* For trap insns when not accumulating outgoing args force
4427 REG_ARGS_SIZE note to prevent crossjumping of calls with
4428 different args sizes. */
4429 if (!ACCUMULATE_OUTGOING_ARGS)
4430 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4432 else
4434 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
4435 tree call_expr = build_call_expr (fn, 0);
4436 expand_call (call_expr, NULL_RTX, false);
4439 emit_barrier ();
4442 /* Expand a call to __builtin_unreachable. We do nothing except emit
4443 a barrier saying that control flow will not pass here.
4445 It is the responsibility of the program being compiled to ensure
4446 that control flow does never reach __builtin_unreachable. */
4447 static void
4448 expand_builtin_unreachable (void)
4450 emit_barrier ();
4453 /* Expand EXP, a call to fabs, fabsf or fabsl.
4454 Return NULL_RTX if a normal call should be emitted rather than expanding
4455 the function inline. If convenient, the result should be placed
4456 in TARGET. SUBTARGET may be used as the target for computing
4457 the operand. */
4459 static rtx
4460 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4462 machine_mode mode;
4463 tree arg;
4464 rtx op0;
4466 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4467 return NULL_RTX;
4469 arg = CALL_EXPR_ARG (exp, 0);
4470 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4471 mode = TYPE_MODE (TREE_TYPE (arg));
4472 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4473 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4476 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4477 Return NULL is a normal call should be emitted rather than expanding the
4478 function inline. If convenient, the result should be placed in TARGET.
4479 SUBTARGET may be used as the target for computing the operand. */
4481 static rtx
4482 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4484 rtx op0, op1;
4485 tree arg;
4487 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4488 return NULL_RTX;
4490 arg = CALL_EXPR_ARG (exp, 0);
4491 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4493 arg = CALL_EXPR_ARG (exp, 1);
4494 op1 = expand_normal (arg);
4496 return expand_copysign (op0, op1, target);
4499 /* Expand a call to __builtin___clear_cache. */
4501 static rtx
4502 expand_builtin___clear_cache (tree exp)
4504 if (!targetm.code_for_clear_cache)
4506 #ifdef CLEAR_INSN_CACHE
4507 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4508 does something. Just do the default expansion to a call to
4509 __clear_cache(). */
4510 return NULL_RTX;
4511 #else
4512 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4513 does nothing. There is no need to call it. Do nothing. */
4514 return const0_rtx;
4515 #endif /* CLEAR_INSN_CACHE */
4518 /* We have a "clear_cache" insn, and it will handle everything. */
4519 tree begin, end;
4520 rtx begin_rtx, end_rtx;
4522 /* We must not expand to a library call. If we did, any
4523 fallback library function in libgcc that might contain a call to
4524 __builtin___clear_cache() would recurse infinitely. */
4525 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4527 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4528 return const0_rtx;
4531 if (targetm.have_clear_cache ())
4533 struct expand_operand ops[2];
4535 begin = CALL_EXPR_ARG (exp, 0);
4536 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4538 end = CALL_EXPR_ARG (exp, 1);
4539 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4541 create_address_operand (&ops[0], begin_rtx);
4542 create_address_operand (&ops[1], end_rtx);
4543 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
4544 return const0_rtx;
4546 return const0_rtx;
4549 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4551 static rtx
4552 round_trampoline_addr (rtx tramp)
4554 rtx temp, addend, mask;
4556 /* If we don't need too much alignment, we'll have been guaranteed
4557 proper alignment by get_trampoline_type. */
4558 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4559 return tramp;
4561 /* Round address up to desired boundary. */
4562 temp = gen_reg_rtx (Pmode);
4563 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4564 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4566 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4567 temp, 0, OPTAB_LIB_WIDEN);
4568 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4569 temp, 0, OPTAB_LIB_WIDEN);
4571 return tramp;
4574 static rtx
4575 expand_builtin_init_trampoline (tree exp, bool onstack)
4577 tree t_tramp, t_func, t_chain;
4578 rtx m_tramp, r_tramp, r_chain, tmp;
4580 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4581 POINTER_TYPE, VOID_TYPE))
4582 return NULL_RTX;
4584 t_tramp = CALL_EXPR_ARG (exp, 0);
4585 t_func = CALL_EXPR_ARG (exp, 1);
4586 t_chain = CALL_EXPR_ARG (exp, 2);
4588 r_tramp = expand_normal (t_tramp);
4589 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4590 MEM_NOTRAP_P (m_tramp) = 1;
4592 /* If ONSTACK, the TRAMP argument should be the address of a field
4593 within the local function's FRAME decl. Either way, let's see if
4594 we can fill in the MEM_ATTRs for this memory. */
4595 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4596 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4598 /* Creator of a heap trampoline is responsible for making sure the
4599 address is aligned to at least STACK_BOUNDARY. Normally malloc
4600 will ensure this anyhow. */
4601 tmp = round_trampoline_addr (r_tramp);
4602 if (tmp != r_tramp)
4604 m_tramp = change_address (m_tramp, BLKmode, tmp);
4605 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4606 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4609 /* The FUNC argument should be the address of the nested function.
4610 Extract the actual function decl to pass to the hook. */
4611 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4612 t_func = TREE_OPERAND (t_func, 0);
4613 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4615 r_chain = expand_normal (t_chain);
4617 /* Generate insns to initialize the trampoline. */
4618 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4620 if (onstack)
4622 trampolines_created = 1;
4624 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4625 "trampoline generated for nested function %qD", t_func);
4628 return const0_rtx;
4631 static rtx
4632 expand_builtin_adjust_trampoline (tree exp)
4634 rtx tramp;
4636 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4637 return NULL_RTX;
4639 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4640 tramp = round_trampoline_addr (tramp);
4641 if (targetm.calls.trampoline_adjust_address)
4642 tramp = targetm.calls.trampoline_adjust_address (tramp);
4644 return tramp;
4647 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4648 function. The function first checks whether the back end provides
4649 an insn to implement signbit for the respective mode. If not, it
4650 checks whether the floating point format of the value is such that
4651 the sign bit can be extracted. If that is not the case, error out.
4652 EXP is the expression that is a call to the builtin function; if
4653 convenient, the result should be placed in TARGET. */
4654 static rtx
4655 expand_builtin_signbit (tree exp, rtx target)
4657 const struct real_format *fmt;
4658 machine_mode fmode, imode, rmode;
4659 tree arg;
4660 int word, bitpos;
4661 enum insn_code icode;
4662 rtx temp;
4663 location_t loc = EXPR_LOCATION (exp);
4665 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4666 return NULL_RTX;
4668 arg = CALL_EXPR_ARG (exp, 0);
4669 fmode = TYPE_MODE (TREE_TYPE (arg));
4670 rmode = TYPE_MODE (TREE_TYPE (exp));
4671 fmt = REAL_MODE_FORMAT (fmode);
4673 arg = builtin_save_expr (arg);
4675 /* Expand the argument yielding a RTX expression. */
4676 temp = expand_normal (arg);
4678 /* Check if the back end provides an insn that handles signbit for the
4679 argument's mode. */
4680 icode = optab_handler (signbit_optab, fmode);
4681 if (icode != CODE_FOR_nothing)
4683 rtx_insn *last = get_last_insn ();
4684 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4685 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4686 return target;
4687 delete_insns_since (last);
4690 /* For floating point formats without a sign bit, implement signbit
4691 as "ARG < 0.0". */
4692 bitpos = fmt->signbit_ro;
4693 if (bitpos < 0)
4695 /* But we can't do this if the format supports signed zero. */
4696 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
4698 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4699 build_real (TREE_TYPE (arg), dconst0));
4700 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4703 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4705 imode = int_mode_for_mode (fmode);
4706 gcc_assert (imode != BLKmode);
4707 temp = gen_lowpart (imode, temp);
4709 else
4711 imode = word_mode;
4712 /* Handle targets with different FP word orders. */
4713 if (FLOAT_WORDS_BIG_ENDIAN)
4714 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4715 else
4716 word = bitpos / BITS_PER_WORD;
4717 temp = operand_subword_force (temp, word, fmode);
4718 bitpos = bitpos % BITS_PER_WORD;
4721 /* Force the intermediate word_mode (or narrower) result into a
4722 register. This avoids attempting to create paradoxical SUBREGs
4723 of floating point modes below. */
4724 temp = force_reg (imode, temp);
4726 /* If the bitpos is within the "result mode" lowpart, the operation
4727 can be implement with a single bitwise AND. Otherwise, we need
4728 a right shift and an AND. */
4730 if (bitpos < GET_MODE_BITSIZE (rmode))
4732 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
4734 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4735 temp = gen_lowpart (rmode, temp);
4736 temp = expand_binop (rmode, and_optab, temp,
4737 immed_wide_int_const (mask, rmode),
4738 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4740 else
4742 /* Perform a logical right shift to place the signbit in the least
4743 significant bit, then truncate the result to the desired mode
4744 and mask just this bit. */
4745 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4746 temp = gen_lowpart (rmode, temp);
4747 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4748 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4751 return temp;
4754 /* Expand fork or exec calls. TARGET is the desired target of the
4755 call. EXP is the call. FN is the
4756 identificator of the actual function. IGNORE is nonzero if the
4757 value is to be ignored. */
4759 static rtx
4760 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4762 tree id, decl;
4763 tree call;
4765 /* If we are not profiling, just call the function. */
4766 if (!profile_arc_flag)
4767 return NULL_RTX;
4769 /* Otherwise call the wrapper. This should be equivalent for the rest of
4770 compiler, so the code does not diverge, and the wrapper may run the
4771 code necessary for keeping the profiling sane. */
4773 switch (DECL_FUNCTION_CODE (fn))
4775 case BUILT_IN_FORK:
4776 id = get_identifier ("__gcov_fork");
4777 break;
4779 case BUILT_IN_EXECL:
4780 id = get_identifier ("__gcov_execl");
4781 break;
4783 case BUILT_IN_EXECV:
4784 id = get_identifier ("__gcov_execv");
4785 break;
4787 case BUILT_IN_EXECLP:
4788 id = get_identifier ("__gcov_execlp");
4789 break;
4791 case BUILT_IN_EXECLE:
4792 id = get_identifier ("__gcov_execle");
4793 break;
4795 case BUILT_IN_EXECVP:
4796 id = get_identifier ("__gcov_execvp");
4797 break;
4799 case BUILT_IN_EXECVE:
4800 id = get_identifier ("__gcov_execve");
4801 break;
4803 default:
4804 gcc_unreachable ();
4807 decl = build_decl (DECL_SOURCE_LOCATION (fn),
4808 FUNCTION_DECL, id, TREE_TYPE (fn));
4809 DECL_EXTERNAL (decl) = 1;
4810 TREE_PUBLIC (decl) = 1;
4811 DECL_ARTIFICIAL (decl) = 1;
4812 TREE_NOTHROW (decl) = 1;
4813 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
4814 DECL_VISIBILITY_SPECIFIED (decl) = 1;
4815 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
4816 return expand_call (call, target, ignore);
4821 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
4822 the pointer in these functions is void*, the tree optimizers may remove
4823 casts. The mode computed in expand_builtin isn't reliable either, due
4824 to __sync_bool_compare_and_swap.
4826 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
4827 group of builtins. This gives us log2 of the mode size. */
4829 static inline machine_mode
4830 get_builtin_sync_mode (int fcode_diff)
4832 /* The size is not negotiable, so ask not to get BLKmode in return
4833 if the target indicates that a smaller size would be better. */
4834 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
4837 /* Expand the memory expression LOC and return the appropriate memory operand
4838 for the builtin_sync operations. */
4840 static rtx
4841 get_builtin_sync_mem (tree loc, machine_mode mode)
4843 rtx addr, mem;
4845 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
4846 addr = convert_memory_address (Pmode, addr);
4848 /* Note that we explicitly do not want any alias information for this
4849 memory, so that we kill all other live memories. Otherwise we don't
4850 satisfy the full barrier semantics of the intrinsic. */
4851 mem = validize_mem (gen_rtx_MEM (mode, addr));
4853 /* The alignment needs to be at least according to that of the mode. */
4854 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
4855 get_pointer_alignment (loc)));
4856 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
4857 MEM_VOLATILE_P (mem) = 1;
4859 return mem;
4862 /* Make sure an argument is in the right mode.
4863 EXP is the tree argument.
4864 MODE is the mode it should be in. */
4866 static rtx
4867 expand_expr_force_mode (tree exp, machine_mode mode)
4869 rtx val;
4870 machine_mode old_mode;
4872 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
4873 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
4874 of CONST_INTs, where we know the old_mode only from the call argument. */
4876 old_mode = GET_MODE (val);
4877 if (old_mode == VOIDmode)
4878 old_mode = TYPE_MODE (TREE_TYPE (exp));
4879 val = convert_modes (mode, old_mode, val, 1);
4880 return val;
4884 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
4885 EXP is the CALL_EXPR. CODE is the rtx code
4886 that corresponds to the arithmetic or logical operation from the name;
4887 an exception here is that NOT actually means NAND. TARGET is an optional
4888 place for us to store the results; AFTER is true if this is the
4889 fetch_and_xxx form. */
4891 static rtx
4892 expand_builtin_sync_operation (machine_mode mode, tree exp,
4893 enum rtx_code code, bool after,
4894 rtx target)
4896 rtx val, mem;
4897 location_t loc = EXPR_LOCATION (exp);
4899 if (code == NOT && warn_sync_nand)
4901 tree fndecl = get_callee_fndecl (exp);
4902 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4904 static bool warned_f_a_n, warned_n_a_f;
4906 switch (fcode)
4908 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
4909 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
4910 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
4911 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
4912 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
4913 if (warned_f_a_n)
4914 break;
4916 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
4917 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
4918 warned_f_a_n = true;
4919 break;
4921 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
4922 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
4923 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
4924 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
4925 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
4926 if (warned_n_a_f)
4927 break;
4929 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
4930 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
4931 warned_n_a_f = true;
4932 break;
4934 default:
4935 gcc_unreachable ();
4939 /* Expand the operands. */
4940 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
4941 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
4943 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
4944 after);
4947 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
4948 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
4949 true if this is the boolean form. TARGET is a place for us to store the
4950 results; this is NOT optional if IS_BOOL is true. */
4952 static rtx
4953 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
4954 bool is_bool, rtx target)
4956 rtx old_val, new_val, mem;
4957 rtx *pbool, *poval;
4959 /* Expand the operands. */
4960 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
4961 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
4962 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
4964 pbool = poval = NULL;
4965 if (target != const0_rtx)
4967 if (is_bool)
4968 pbool = &target;
4969 else
4970 poval = &target;
4972 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
4973 false, MEMMODEL_SYNC_SEQ_CST,
4974 MEMMODEL_SYNC_SEQ_CST))
4975 return NULL_RTX;
4977 return target;
4980 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
4981 general form is actually an atomic exchange, and some targets only
4982 support a reduced form with the second argument being a constant 1.
4983 EXP is the CALL_EXPR; TARGET is an optional place for us to store
4984 the results. */
4986 static rtx
4987 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
4988 rtx target)
4990 rtx val, mem;
4992 /* Expand the operands. */
4993 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
4994 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
4996 return expand_sync_lock_test_and_set (target, mem, val);
4999 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5001 static void
5002 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5004 rtx mem;
5006 /* Expand the operands. */
5007 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5009 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5012 /* Given an integer representing an ``enum memmodel'', verify its
5013 correctness and return the memory model enum. */
5015 static enum memmodel
5016 get_memmodel (tree exp)
5018 rtx op;
5019 unsigned HOST_WIDE_INT val;
5020 source_location loc
5021 = expansion_point_location_if_in_system_header (input_location);
5023 /* If the parameter is not a constant, it's a run time value so we'll just
5024 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5025 if (TREE_CODE (exp) != INTEGER_CST)
5026 return MEMMODEL_SEQ_CST;
5028 op = expand_normal (exp);
5030 val = INTVAL (op);
5031 if (targetm.memmodel_check)
5032 val = targetm.memmodel_check (val);
5033 else if (val & ~MEMMODEL_MASK)
5035 warning_at (loc, OPT_Winvalid_memory_model,
5036 "unknown architecture specifier in memory model to builtin");
5037 return MEMMODEL_SEQ_CST;
5040 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5041 if (memmodel_base (val) >= MEMMODEL_LAST)
5043 warning_at (loc, OPT_Winvalid_memory_model,
5044 "invalid memory model argument to builtin");
5045 return MEMMODEL_SEQ_CST;
5048 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5049 be conservative and promote consume to acquire. */
5050 if (val == MEMMODEL_CONSUME)
5051 val = MEMMODEL_ACQUIRE;
5053 return (enum memmodel) val;
5056 /* Expand the __atomic_exchange intrinsic:
5057 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5058 EXP is the CALL_EXPR.
5059 TARGET is an optional place for us to store the results. */
5061 static rtx
5062 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5064 rtx val, mem;
5065 enum memmodel model;
5067 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5069 if (!flag_inline_atomics)
5070 return NULL_RTX;
5072 /* Expand the operands. */
5073 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5074 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5076 return expand_atomic_exchange (target, mem, val, model);
5079 /* Expand the __atomic_compare_exchange intrinsic:
5080 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5081 TYPE desired, BOOL weak,
5082 enum memmodel success,
5083 enum memmodel failure)
5084 EXP is the CALL_EXPR.
5085 TARGET is an optional place for us to store the results. */
5087 static rtx
5088 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5089 rtx target)
5091 rtx expect, desired, mem, oldval;
5092 rtx_code_label *label;
5093 enum memmodel success, failure;
5094 tree weak;
5095 bool is_weak;
5096 source_location loc
5097 = expansion_point_location_if_in_system_header (input_location);
5099 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5100 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5102 if (failure > success)
5104 warning_at (loc, OPT_Winvalid_memory_model,
5105 "failure memory model cannot be stronger than success "
5106 "memory model for %<__atomic_compare_exchange%>");
5107 success = MEMMODEL_SEQ_CST;
5110 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5112 warning_at (loc, OPT_Winvalid_memory_model,
5113 "invalid failure memory model for "
5114 "%<__atomic_compare_exchange%>");
5115 failure = MEMMODEL_SEQ_CST;
5116 success = MEMMODEL_SEQ_CST;
5120 if (!flag_inline_atomics)
5121 return NULL_RTX;
5123 /* Expand the operands. */
5124 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5126 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5127 expect = convert_memory_address (Pmode, expect);
5128 expect = gen_rtx_MEM (mode, expect);
5129 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5131 weak = CALL_EXPR_ARG (exp, 3);
5132 is_weak = false;
5133 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5134 is_weak = true;
5136 if (target == const0_rtx)
5137 target = NULL;
5139 /* Lest the rtl backend create a race condition with an imporoper store
5140 to memory, always create a new pseudo for OLDVAL. */
5141 oldval = NULL;
5143 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5144 is_weak, success, failure))
5145 return NULL_RTX;
5147 /* Conditionally store back to EXPECT, lest we create a race condition
5148 with an improper store to memory. */
5149 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5150 the normal case where EXPECT is totally private, i.e. a register. At
5151 which point the store can be unconditional. */
5152 label = gen_label_rtx ();
5153 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5154 GET_MODE (target), 1, label);
5155 emit_move_insn (expect, oldval);
5156 emit_label (label);
5158 return target;
5161 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5162 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5163 call. The weak parameter must be dropped to match the expected parameter
5164 list and the expected argument changed from value to pointer to memory
5165 slot. */
5167 static void
5168 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5170 unsigned int z;
5171 vec<tree, va_gc> *vec;
5173 vec_alloc (vec, 5);
5174 vec->quick_push (gimple_call_arg (call, 0));
5175 tree expected = gimple_call_arg (call, 1);
5176 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5177 TREE_TYPE (expected));
5178 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5179 if (expd != x)
5180 emit_move_insn (x, expd);
5181 tree v = make_tree (TREE_TYPE (expected), x);
5182 vec->quick_push (build1 (ADDR_EXPR,
5183 build_pointer_type (TREE_TYPE (expected)), v));
5184 vec->quick_push (gimple_call_arg (call, 2));
5185 /* Skip the boolean weak parameter. */
5186 for (z = 4; z < 6; z++)
5187 vec->quick_push (gimple_call_arg (call, z));
5188 built_in_function fncode
5189 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5190 + exact_log2 (GET_MODE_SIZE (mode)));
5191 tree fndecl = builtin_decl_explicit (fncode);
5192 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
5193 fndecl);
5194 tree exp = build_call_vec (boolean_type_node, fn, vec);
5195 tree lhs = gimple_call_lhs (call);
5196 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
5197 if (lhs)
5199 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5200 if (GET_MODE (boolret) != mode)
5201 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5202 x = force_reg (mode, x);
5203 write_complex_part (target, boolret, true);
5204 write_complex_part (target, x, false);
5208 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
5210 void
5211 expand_ifn_atomic_compare_exchange (gcall *call)
5213 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
5214 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
5215 machine_mode mode = mode_for_size (BITS_PER_UNIT * size, MODE_INT, 0);
5216 rtx expect, desired, mem, oldval, boolret;
5217 enum memmodel success, failure;
5218 tree lhs;
5219 bool is_weak;
5220 source_location loc
5221 = expansion_point_location_if_in_system_header (gimple_location (call));
5223 success = get_memmodel (gimple_call_arg (call, 4));
5224 failure = get_memmodel (gimple_call_arg (call, 5));
5226 if (failure > success)
5228 warning_at (loc, OPT_Winvalid_memory_model,
5229 "failure memory model cannot be stronger than success "
5230 "memory model for %<__atomic_compare_exchange%>");
5231 success = MEMMODEL_SEQ_CST;
5234 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5236 warning_at (loc, OPT_Winvalid_memory_model,
5237 "invalid failure memory model for "
5238 "%<__atomic_compare_exchange%>");
5239 failure = MEMMODEL_SEQ_CST;
5240 success = MEMMODEL_SEQ_CST;
5243 if (!flag_inline_atomics)
5245 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5246 return;
5249 /* Expand the operands. */
5250 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
5252 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
5253 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
5255 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
5257 boolret = NULL;
5258 oldval = NULL;
5260 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
5261 is_weak, success, failure))
5263 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5264 return;
5267 lhs = gimple_call_lhs (call);
5268 if (lhs)
5270 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5271 if (GET_MODE (boolret) != mode)
5272 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5273 write_complex_part (target, boolret, true);
5274 write_complex_part (target, oldval, false);
5278 /* Expand the __atomic_load intrinsic:
5279 TYPE __atomic_load (TYPE *object, enum memmodel)
5280 EXP is the CALL_EXPR.
5281 TARGET is an optional place for us to store the results. */
5283 static rtx
5284 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5286 rtx mem;
5287 enum memmodel model;
5289 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5290 if (is_mm_release (model) || is_mm_acq_rel (model))
5292 source_location loc
5293 = expansion_point_location_if_in_system_header (input_location);
5294 warning_at (loc, OPT_Winvalid_memory_model,
5295 "invalid memory model for %<__atomic_load%>");
5296 model = MEMMODEL_SEQ_CST;
5299 if (!flag_inline_atomics)
5300 return NULL_RTX;
5302 /* Expand the operand. */
5303 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5305 return expand_atomic_load (target, mem, model);
5309 /* Expand the __atomic_store intrinsic:
5310 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5311 EXP is the CALL_EXPR.
5312 TARGET is an optional place for us to store the results. */
5314 static rtx
5315 expand_builtin_atomic_store (machine_mode mode, tree exp)
5317 rtx mem, val;
5318 enum memmodel model;
5320 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5321 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5322 || is_mm_release (model)))
5324 source_location loc
5325 = expansion_point_location_if_in_system_header (input_location);
5326 warning_at (loc, OPT_Winvalid_memory_model,
5327 "invalid memory model for %<__atomic_store%>");
5328 model = MEMMODEL_SEQ_CST;
5331 if (!flag_inline_atomics)
5332 return NULL_RTX;
5334 /* Expand the operands. */
5335 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5336 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5338 return expand_atomic_store (mem, val, model, false);
5341 /* Expand the __atomic_fetch_XXX intrinsic:
5342 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5343 EXP is the CALL_EXPR.
5344 TARGET is an optional place for us to store the results.
5345 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5346 FETCH_AFTER is true if returning the result of the operation.
5347 FETCH_AFTER is false if returning the value before the operation.
5348 IGNORE is true if the result is not used.
5349 EXT_CALL is the correct builtin for an external call if this cannot be
5350 resolved to an instruction sequence. */
5352 static rtx
5353 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5354 enum rtx_code code, bool fetch_after,
5355 bool ignore, enum built_in_function ext_call)
5357 rtx val, mem, ret;
5358 enum memmodel model;
5359 tree fndecl;
5360 tree addr;
5362 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5364 /* Expand the operands. */
5365 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5366 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5368 /* Only try generating instructions if inlining is turned on. */
5369 if (flag_inline_atomics)
5371 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5372 if (ret)
5373 return ret;
5376 /* Return if a different routine isn't needed for the library call. */
5377 if (ext_call == BUILT_IN_NONE)
5378 return NULL_RTX;
5380 /* Change the call to the specified function. */
5381 fndecl = get_callee_fndecl (exp);
5382 addr = CALL_EXPR_FN (exp);
5383 STRIP_NOPS (addr);
5385 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5386 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5388 /* Expand the call here so we can emit trailing code. */
5389 ret = expand_call (exp, target, ignore);
5391 /* Replace the original function just in case it matters. */
5392 TREE_OPERAND (addr, 0) = fndecl;
5394 /* Then issue the arithmetic correction to return the right result. */
5395 if (!ignore)
5397 if (code == NOT)
5399 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5400 OPTAB_LIB_WIDEN);
5401 ret = expand_simple_unop (mode, NOT, ret, target, true);
5403 else
5404 ret = expand_simple_binop (mode, code, ret, val, target, true,
5405 OPTAB_LIB_WIDEN);
5407 return ret;
5410 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
5412 void
5413 expand_ifn_atomic_bit_test_and (gcall *call)
5415 tree ptr = gimple_call_arg (call, 0);
5416 tree bit = gimple_call_arg (call, 1);
5417 tree flag = gimple_call_arg (call, 2);
5418 tree lhs = gimple_call_lhs (call);
5419 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
5420 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
5421 enum rtx_code code;
5422 optab optab;
5423 struct expand_operand ops[5];
5425 gcc_assert (flag_inline_atomics);
5427 if (gimple_call_num_args (call) == 4)
5428 model = get_memmodel (gimple_call_arg (call, 3));
5430 rtx mem = get_builtin_sync_mem (ptr, mode);
5431 rtx val = expand_expr_force_mode (bit, mode);
5433 switch (gimple_call_internal_fn (call))
5435 case IFN_ATOMIC_BIT_TEST_AND_SET:
5436 code = IOR;
5437 optab = atomic_bit_test_and_set_optab;
5438 break;
5439 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
5440 code = XOR;
5441 optab = atomic_bit_test_and_complement_optab;
5442 break;
5443 case IFN_ATOMIC_BIT_TEST_AND_RESET:
5444 code = AND;
5445 optab = atomic_bit_test_and_reset_optab;
5446 break;
5447 default:
5448 gcc_unreachable ();
5451 if (lhs == NULL_TREE)
5453 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
5454 val, NULL_RTX, true, OPTAB_DIRECT);
5455 if (code == AND)
5456 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
5457 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
5458 return;
5461 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5462 enum insn_code icode = direct_optab_handler (optab, mode);
5463 gcc_assert (icode != CODE_FOR_nothing);
5464 create_output_operand (&ops[0], target, mode);
5465 create_fixed_operand (&ops[1], mem);
5466 create_convert_operand_to (&ops[2], val, mode, true);
5467 create_integer_operand (&ops[3], model);
5468 create_integer_operand (&ops[4], integer_onep (flag));
5469 if (maybe_expand_insn (icode, 5, ops))
5470 return;
5472 rtx bitval = val;
5473 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
5474 val, NULL_RTX, true, OPTAB_DIRECT);
5475 rtx maskval = val;
5476 if (code == AND)
5477 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
5478 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
5479 code, model, false);
5480 if (integer_onep (flag))
5482 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
5483 NULL_RTX, true, OPTAB_DIRECT);
5484 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
5485 true, OPTAB_DIRECT);
5487 else
5488 result = expand_simple_binop (mode, AND, result, maskval, target, true,
5489 OPTAB_DIRECT);
5490 if (result != target)
5491 emit_move_insn (target, result);
5494 /* Expand an atomic clear operation.
5495 void _atomic_clear (BOOL *obj, enum memmodel)
5496 EXP is the call expression. */
5498 static rtx
5499 expand_builtin_atomic_clear (tree exp)
5501 machine_mode mode;
5502 rtx mem, ret;
5503 enum memmodel model;
5505 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5506 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5507 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5509 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
5511 source_location loc
5512 = expansion_point_location_if_in_system_header (input_location);
5513 warning_at (loc, OPT_Winvalid_memory_model,
5514 "invalid memory model for %<__atomic_store%>");
5515 model = MEMMODEL_SEQ_CST;
5518 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5519 Failing that, a store is issued by __atomic_store. The only way this can
5520 fail is if the bool type is larger than a word size. Unlikely, but
5521 handle it anyway for completeness. Assume a single threaded model since
5522 there is no atomic support in this case, and no barriers are required. */
5523 ret = expand_atomic_store (mem, const0_rtx, model, true);
5524 if (!ret)
5525 emit_move_insn (mem, const0_rtx);
5526 return const0_rtx;
5529 /* Expand an atomic test_and_set operation.
5530 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5531 EXP is the call expression. */
5533 static rtx
5534 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5536 rtx mem;
5537 enum memmodel model;
5538 machine_mode mode;
5540 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5541 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5542 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5544 return expand_atomic_test_and_set (target, mem, model);
5548 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5549 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5551 static tree
5552 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5554 int size;
5555 machine_mode mode;
5556 unsigned int mode_align, type_align;
5558 if (TREE_CODE (arg0) != INTEGER_CST)
5559 return NULL_TREE;
5561 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5562 mode = mode_for_size (size, MODE_INT, 0);
5563 mode_align = GET_MODE_ALIGNMENT (mode);
5565 if (TREE_CODE (arg1) == INTEGER_CST)
5567 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
5569 /* Either this argument is null, or it's a fake pointer encoding
5570 the alignment of the object. */
5571 val = val & -val;
5572 val *= BITS_PER_UNIT;
5574 if (val == 0 || mode_align < val)
5575 type_align = mode_align;
5576 else
5577 type_align = val;
5579 else
5581 tree ttype = TREE_TYPE (arg1);
5583 /* This function is usually invoked and folded immediately by the front
5584 end before anything else has a chance to look at it. The pointer
5585 parameter at this point is usually cast to a void *, so check for that
5586 and look past the cast. */
5587 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
5588 && VOID_TYPE_P (TREE_TYPE (ttype)))
5589 arg1 = TREE_OPERAND (arg1, 0);
5591 ttype = TREE_TYPE (arg1);
5592 gcc_assert (POINTER_TYPE_P (ttype));
5594 /* Get the underlying type of the object. */
5595 ttype = TREE_TYPE (ttype);
5596 type_align = TYPE_ALIGN (ttype);
5599 /* If the object has smaller alignment, the lock free routines cannot
5600 be used. */
5601 if (type_align < mode_align)
5602 return boolean_false_node;
5604 /* Check if a compare_and_swap pattern exists for the mode which represents
5605 the required size. The pattern is not allowed to fail, so the existence
5606 of the pattern indicates support is present. */
5607 if (can_compare_and_swap_p (mode, true))
5608 return boolean_true_node;
5609 else
5610 return boolean_false_node;
5613 /* Return true if the parameters to call EXP represent an object which will
5614 always generate lock free instructions. The first argument represents the
5615 size of the object, and the second parameter is a pointer to the object
5616 itself. If NULL is passed for the object, then the result is based on
5617 typical alignment for an object of the specified size. Otherwise return
5618 false. */
5620 static rtx
5621 expand_builtin_atomic_always_lock_free (tree exp)
5623 tree size;
5624 tree arg0 = CALL_EXPR_ARG (exp, 0);
5625 tree arg1 = CALL_EXPR_ARG (exp, 1);
5627 if (TREE_CODE (arg0) != INTEGER_CST)
5629 error ("non-constant argument 1 to __atomic_always_lock_free");
5630 return const0_rtx;
5633 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5634 if (size == boolean_true_node)
5635 return const1_rtx;
5636 return const0_rtx;
5639 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5640 is lock free on this architecture. */
5642 static tree
5643 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5645 if (!flag_inline_atomics)
5646 return NULL_TREE;
5648 /* If it isn't always lock free, don't generate a result. */
5649 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5650 return boolean_true_node;
5652 return NULL_TREE;
5655 /* Return true if the parameters to call EXP represent an object which will
5656 always generate lock free instructions. The first argument represents the
5657 size of the object, and the second parameter is a pointer to the object
5658 itself. If NULL is passed for the object, then the result is based on
5659 typical alignment for an object of the specified size. Otherwise return
5660 NULL*/
5662 static rtx
5663 expand_builtin_atomic_is_lock_free (tree exp)
5665 tree size;
5666 tree arg0 = CALL_EXPR_ARG (exp, 0);
5667 tree arg1 = CALL_EXPR_ARG (exp, 1);
5669 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5671 error ("non-integer argument 1 to __atomic_is_lock_free");
5672 return NULL_RTX;
5675 if (!flag_inline_atomics)
5676 return NULL_RTX;
5678 /* If the value is known at compile time, return the RTX for it. */
5679 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5680 if (size == boolean_true_node)
5681 return const1_rtx;
5683 return NULL_RTX;
5686 /* Expand the __atomic_thread_fence intrinsic:
5687 void __atomic_thread_fence (enum memmodel)
5688 EXP is the CALL_EXPR. */
5690 static void
5691 expand_builtin_atomic_thread_fence (tree exp)
5693 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5694 expand_mem_thread_fence (model);
5697 /* Expand the __atomic_signal_fence intrinsic:
5698 void __atomic_signal_fence (enum memmodel)
5699 EXP is the CALL_EXPR. */
5701 static void
5702 expand_builtin_atomic_signal_fence (tree exp)
5704 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5705 expand_mem_signal_fence (model);
5708 /* Expand the __sync_synchronize intrinsic. */
5710 static void
5711 expand_builtin_sync_synchronize (void)
5713 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
5716 static rtx
5717 expand_builtin_thread_pointer (tree exp, rtx target)
5719 enum insn_code icode;
5720 if (!validate_arglist (exp, VOID_TYPE))
5721 return const0_rtx;
5722 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5723 if (icode != CODE_FOR_nothing)
5725 struct expand_operand op;
5726 /* If the target is not sutitable then create a new target. */
5727 if (target == NULL_RTX
5728 || !REG_P (target)
5729 || GET_MODE (target) != Pmode)
5730 target = gen_reg_rtx (Pmode);
5731 create_output_operand (&op, target, Pmode);
5732 expand_insn (icode, 1, &op);
5733 return target;
5735 error ("__builtin_thread_pointer is not supported on this target");
5736 return const0_rtx;
5739 static void
5740 expand_builtin_set_thread_pointer (tree exp)
5742 enum insn_code icode;
5743 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5744 return;
5745 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5746 if (icode != CODE_FOR_nothing)
5748 struct expand_operand op;
5749 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5750 Pmode, EXPAND_NORMAL);
5751 create_input_operand (&op, val, Pmode);
5752 expand_insn (icode, 1, &op);
5753 return;
5755 error ("__builtin_set_thread_pointer is not supported on this target");
5759 /* Emit code to restore the current value of stack. */
5761 static void
5762 expand_stack_restore (tree var)
5764 rtx_insn *prev;
5765 rtx sa = expand_normal (var);
5767 sa = convert_memory_address (Pmode, sa);
5769 prev = get_last_insn ();
5770 emit_stack_restore (SAVE_BLOCK, sa);
5772 record_new_stack_level ();
5774 fixup_args_size_notes (prev, get_last_insn (), 0);
5777 /* Emit code to save the current value of stack. */
5779 static rtx
5780 expand_stack_save (void)
5782 rtx ret = NULL_RTX;
5784 emit_stack_save (SAVE_BLOCK, &ret);
5785 return ret;
5789 /* Expand an expression EXP that calls a built-in function,
5790 with result going to TARGET if that's convenient
5791 (and in mode MODE if that's convenient).
5792 SUBTARGET may be used as the target for computing one of EXP's operands.
5793 IGNORE is nonzero if the value is to be ignored. */
5796 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5797 int ignore)
5799 tree fndecl = get_callee_fndecl (exp);
5800 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5801 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5802 int flags;
5804 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5805 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5807 /* When ASan is enabled, we don't want to expand some memory/string
5808 builtins and rely on libsanitizer's hooks. This allows us to avoid
5809 redundant checks and be sure, that possible overflow will be detected
5810 by ASan. */
5812 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5813 return expand_call (exp, target, ignore);
5815 /* When not optimizing, generate calls to library functions for a certain
5816 set of builtins. */
5817 if (!optimize
5818 && !called_as_built_in (fndecl)
5819 && fcode != BUILT_IN_FORK
5820 && fcode != BUILT_IN_EXECL
5821 && fcode != BUILT_IN_EXECV
5822 && fcode != BUILT_IN_EXECLP
5823 && fcode != BUILT_IN_EXECLE
5824 && fcode != BUILT_IN_EXECVP
5825 && fcode != BUILT_IN_EXECVE
5826 && fcode != BUILT_IN_ALLOCA
5827 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5828 && fcode != BUILT_IN_FREE
5829 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5830 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5831 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5832 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5833 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5834 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5835 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5836 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5837 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5838 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5839 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5840 && fcode != BUILT_IN_CHKP_BNDRET)
5841 return expand_call (exp, target, ignore);
5843 /* The built-in function expanders test for target == const0_rtx
5844 to determine whether the function's result will be ignored. */
5845 if (ignore)
5846 target = const0_rtx;
5848 /* If the result of a pure or const built-in function is ignored, and
5849 none of its arguments are volatile, we can avoid expanding the
5850 built-in call and just evaluate the arguments for side-effects. */
5851 if (target == const0_rtx
5852 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5853 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5855 bool volatilep = false;
5856 tree arg;
5857 call_expr_arg_iterator iter;
5859 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5860 if (TREE_THIS_VOLATILE (arg))
5862 volatilep = true;
5863 break;
5866 if (! volatilep)
5868 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5869 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5870 return const0_rtx;
5874 /* expand_builtin_with_bounds is supposed to be used for
5875 instrumented builtin calls. */
5876 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
5878 switch (fcode)
5880 CASE_FLT_FN (BUILT_IN_FABS):
5881 case BUILT_IN_FABSD32:
5882 case BUILT_IN_FABSD64:
5883 case BUILT_IN_FABSD128:
5884 target = expand_builtin_fabs (exp, target, subtarget);
5885 if (target)
5886 return target;
5887 break;
5889 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5890 target = expand_builtin_copysign (exp, target, subtarget);
5891 if (target)
5892 return target;
5893 break;
5895 /* Just do a normal library call if we were unable to fold
5896 the values. */
5897 CASE_FLT_FN (BUILT_IN_CABS):
5898 break;
5900 CASE_FLT_FN (BUILT_IN_FMA):
5901 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5902 if (target)
5903 return target;
5904 break;
5906 CASE_FLT_FN (BUILT_IN_ILOGB):
5907 if (! flag_unsafe_math_optimizations)
5908 break;
5909 CASE_FLT_FN (BUILT_IN_ISINF):
5910 CASE_FLT_FN (BUILT_IN_FINITE):
5911 case BUILT_IN_ISFINITE:
5912 case BUILT_IN_ISNORMAL:
5913 target = expand_builtin_interclass_mathfn (exp, target);
5914 if (target)
5915 return target;
5916 break;
5918 CASE_FLT_FN (BUILT_IN_ICEIL):
5919 CASE_FLT_FN (BUILT_IN_LCEIL):
5920 CASE_FLT_FN (BUILT_IN_LLCEIL):
5921 CASE_FLT_FN (BUILT_IN_LFLOOR):
5922 CASE_FLT_FN (BUILT_IN_IFLOOR):
5923 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5924 target = expand_builtin_int_roundingfn (exp, target);
5925 if (target)
5926 return target;
5927 break;
5929 CASE_FLT_FN (BUILT_IN_IRINT):
5930 CASE_FLT_FN (BUILT_IN_LRINT):
5931 CASE_FLT_FN (BUILT_IN_LLRINT):
5932 CASE_FLT_FN (BUILT_IN_IROUND):
5933 CASE_FLT_FN (BUILT_IN_LROUND):
5934 CASE_FLT_FN (BUILT_IN_LLROUND):
5935 target = expand_builtin_int_roundingfn_2 (exp, target);
5936 if (target)
5937 return target;
5938 break;
5940 CASE_FLT_FN (BUILT_IN_POWI):
5941 target = expand_builtin_powi (exp, target);
5942 if (target)
5943 return target;
5944 break;
5946 CASE_FLT_FN (BUILT_IN_CEXPI):
5947 target = expand_builtin_cexpi (exp, target);
5948 gcc_assert (target);
5949 return target;
5951 CASE_FLT_FN (BUILT_IN_SIN):
5952 CASE_FLT_FN (BUILT_IN_COS):
5953 if (! flag_unsafe_math_optimizations)
5954 break;
5955 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5956 if (target)
5957 return target;
5958 break;
5960 CASE_FLT_FN (BUILT_IN_SINCOS):
5961 if (! flag_unsafe_math_optimizations)
5962 break;
5963 target = expand_builtin_sincos (exp);
5964 if (target)
5965 return target;
5966 break;
5968 case BUILT_IN_APPLY_ARGS:
5969 return expand_builtin_apply_args ();
5971 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5972 FUNCTION with a copy of the parameters described by
5973 ARGUMENTS, and ARGSIZE. It returns a block of memory
5974 allocated on the stack into which is stored all the registers
5975 that might possibly be used for returning the result of a
5976 function. ARGUMENTS is the value returned by
5977 __builtin_apply_args. ARGSIZE is the number of bytes of
5978 arguments that must be copied. ??? How should this value be
5979 computed? We'll also need a safe worst case value for varargs
5980 functions. */
5981 case BUILT_IN_APPLY:
5982 if (!validate_arglist (exp, POINTER_TYPE,
5983 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5984 && !validate_arglist (exp, REFERENCE_TYPE,
5985 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5986 return const0_rtx;
5987 else
5989 rtx ops[3];
5991 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5992 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5993 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5995 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5998 /* __builtin_return (RESULT) causes the function to return the
5999 value described by RESULT. RESULT is address of the block of
6000 memory returned by __builtin_apply. */
6001 case BUILT_IN_RETURN:
6002 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6003 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6004 return const0_rtx;
6006 case BUILT_IN_SAVEREGS:
6007 return expand_builtin_saveregs ();
6009 case BUILT_IN_VA_ARG_PACK:
6010 /* All valid uses of __builtin_va_arg_pack () are removed during
6011 inlining. */
6012 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6013 return const0_rtx;
6015 case BUILT_IN_VA_ARG_PACK_LEN:
6016 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6017 inlining. */
6018 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6019 return const0_rtx;
6021 /* Return the address of the first anonymous stack arg. */
6022 case BUILT_IN_NEXT_ARG:
6023 if (fold_builtin_next_arg (exp, false))
6024 return const0_rtx;
6025 return expand_builtin_next_arg ();
6027 case BUILT_IN_CLEAR_CACHE:
6028 target = expand_builtin___clear_cache (exp);
6029 if (target)
6030 return target;
6031 break;
6033 case BUILT_IN_CLASSIFY_TYPE:
6034 return expand_builtin_classify_type (exp);
6036 case BUILT_IN_CONSTANT_P:
6037 return const0_rtx;
6039 case BUILT_IN_FRAME_ADDRESS:
6040 case BUILT_IN_RETURN_ADDRESS:
6041 return expand_builtin_frame_address (fndecl, exp);
6043 /* Returns the address of the area where the structure is returned.
6044 0 otherwise. */
6045 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6046 if (call_expr_nargs (exp) != 0
6047 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6048 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6049 return const0_rtx;
6050 else
6051 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6053 case BUILT_IN_ALLOCA:
6054 case BUILT_IN_ALLOCA_WITH_ALIGN:
6055 /* If the allocation stems from the declaration of a variable-sized
6056 object, it cannot accumulate. */
6057 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6058 if (target)
6059 return target;
6060 break;
6062 case BUILT_IN_STACK_SAVE:
6063 return expand_stack_save ();
6065 case BUILT_IN_STACK_RESTORE:
6066 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6067 return const0_rtx;
6069 case BUILT_IN_BSWAP16:
6070 case BUILT_IN_BSWAP32:
6071 case BUILT_IN_BSWAP64:
6072 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6073 if (target)
6074 return target;
6075 break;
6077 CASE_INT_FN (BUILT_IN_FFS):
6078 target = expand_builtin_unop (target_mode, exp, target,
6079 subtarget, ffs_optab);
6080 if (target)
6081 return target;
6082 break;
6084 CASE_INT_FN (BUILT_IN_CLZ):
6085 target = expand_builtin_unop (target_mode, exp, target,
6086 subtarget, clz_optab);
6087 if (target)
6088 return target;
6089 break;
6091 CASE_INT_FN (BUILT_IN_CTZ):
6092 target = expand_builtin_unop (target_mode, exp, target,
6093 subtarget, ctz_optab);
6094 if (target)
6095 return target;
6096 break;
6098 CASE_INT_FN (BUILT_IN_CLRSB):
6099 target = expand_builtin_unop (target_mode, exp, target,
6100 subtarget, clrsb_optab);
6101 if (target)
6102 return target;
6103 break;
6105 CASE_INT_FN (BUILT_IN_POPCOUNT):
6106 target = expand_builtin_unop (target_mode, exp, target,
6107 subtarget, popcount_optab);
6108 if (target)
6109 return target;
6110 break;
6112 CASE_INT_FN (BUILT_IN_PARITY):
6113 target = expand_builtin_unop (target_mode, exp, target,
6114 subtarget, parity_optab);
6115 if (target)
6116 return target;
6117 break;
6119 case BUILT_IN_STRLEN:
6120 target = expand_builtin_strlen (exp, target, target_mode);
6121 if (target)
6122 return target;
6123 break;
6125 case BUILT_IN_STRCPY:
6126 target = expand_builtin_strcpy (exp, target);
6127 if (target)
6128 return target;
6129 break;
6131 case BUILT_IN_STRNCPY:
6132 target = expand_builtin_strncpy (exp, target);
6133 if (target)
6134 return target;
6135 break;
6137 case BUILT_IN_STPCPY:
6138 target = expand_builtin_stpcpy (exp, target, mode);
6139 if (target)
6140 return target;
6141 break;
6143 case BUILT_IN_MEMCPY:
6144 target = expand_builtin_memcpy (exp, target);
6145 if (target)
6146 return target;
6147 break;
6149 case BUILT_IN_MEMPCPY:
6150 target = expand_builtin_mempcpy (exp, target, mode);
6151 if (target)
6152 return target;
6153 break;
6155 case BUILT_IN_MEMSET:
6156 target = expand_builtin_memset (exp, target, mode);
6157 if (target)
6158 return target;
6159 break;
6161 case BUILT_IN_BZERO:
6162 target = expand_builtin_bzero (exp);
6163 if (target)
6164 return target;
6165 break;
6167 case BUILT_IN_STRCMP:
6168 target = expand_builtin_strcmp (exp, target);
6169 if (target)
6170 return target;
6171 break;
6173 case BUILT_IN_STRNCMP:
6174 target = expand_builtin_strncmp (exp, target, mode);
6175 if (target)
6176 return target;
6177 break;
6179 case BUILT_IN_BCMP:
6180 case BUILT_IN_MEMCMP:
6181 case BUILT_IN_MEMCMP_EQ:
6182 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
6183 if (target)
6184 return target;
6185 if (fcode == BUILT_IN_MEMCMP_EQ)
6187 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
6188 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
6190 break;
6192 case BUILT_IN_SETJMP:
6193 /* This should have been lowered to the builtins below. */
6194 gcc_unreachable ();
6196 case BUILT_IN_SETJMP_SETUP:
6197 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6198 and the receiver label. */
6199 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6201 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6202 VOIDmode, EXPAND_NORMAL);
6203 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6204 rtx_insn *label_r = label_rtx (label);
6206 /* This is copied from the handling of non-local gotos. */
6207 expand_builtin_setjmp_setup (buf_addr, label_r);
6208 nonlocal_goto_handler_labels
6209 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6210 nonlocal_goto_handler_labels);
6211 /* ??? Do not let expand_label treat us as such since we would
6212 not want to be both on the list of non-local labels and on
6213 the list of forced labels. */
6214 FORCED_LABEL (label) = 0;
6215 return const0_rtx;
6217 break;
6219 case BUILT_IN_SETJMP_RECEIVER:
6220 /* __builtin_setjmp_receiver is passed the receiver label. */
6221 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6223 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6224 rtx_insn *label_r = label_rtx (label);
6226 expand_builtin_setjmp_receiver (label_r);
6227 return const0_rtx;
6229 break;
6231 /* __builtin_longjmp is passed a pointer to an array of five words.
6232 It's similar to the C library longjmp function but works with
6233 __builtin_setjmp above. */
6234 case BUILT_IN_LONGJMP:
6235 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6237 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6238 VOIDmode, EXPAND_NORMAL);
6239 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6241 if (value != const1_rtx)
6243 error ("%<__builtin_longjmp%> second argument must be 1");
6244 return const0_rtx;
6247 expand_builtin_longjmp (buf_addr, value);
6248 return const0_rtx;
6250 break;
6252 case BUILT_IN_NONLOCAL_GOTO:
6253 target = expand_builtin_nonlocal_goto (exp);
6254 if (target)
6255 return target;
6256 break;
6258 /* This updates the setjmp buffer that is its argument with the value
6259 of the current stack pointer. */
6260 case BUILT_IN_UPDATE_SETJMP_BUF:
6261 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6263 rtx buf_addr
6264 = expand_normal (CALL_EXPR_ARG (exp, 0));
6266 expand_builtin_update_setjmp_buf (buf_addr);
6267 return const0_rtx;
6269 break;
6271 case BUILT_IN_TRAP:
6272 expand_builtin_trap ();
6273 return const0_rtx;
6275 case BUILT_IN_UNREACHABLE:
6276 expand_builtin_unreachable ();
6277 return const0_rtx;
6279 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6280 case BUILT_IN_SIGNBITD32:
6281 case BUILT_IN_SIGNBITD64:
6282 case BUILT_IN_SIGNBITD128:
6283 target = expand_builtin_signbit (exp, target);
6284 if (target)
6285 return target;
6286 break;
6288 /* Various hooks for the DWARF 2 __throw routine. */
6289 case BUILT_IN_UNWIND_INIT:
6290 expand_builtin_unwind_init ();
6291 return const0_rtx;
6292 case BUILT_IN_DWARF_CFA:
6293 return virtual_cfa_rtx;
6294 #ifdef DWARF2_UNWIND_INFO
6295 case BUILT_IN_DWARF_SP_COLUMN:
6296 return expand_builtin_dwarf_sp_column ();
6297 case BUILT_IN_INIT_DWARF_REG_SIZES:
6298 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6299 return const0_rtx;
6300 #endif
6301 case BUILT_IN_FROB_RETURN_ADDR:
6302 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6303 case BUILT_IN_EXTRACT_RETURN_ADDR:
6304 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6305 case BUILT_IN_EH_RETURN:
6306 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6307 CALL_EXPR_ARG (exp, 1));
6308 return const0_rtx;
6309 case BUILT_IN_EH_RETURN_DATA_REGNO:
6310 return expand_builtin_eh_return_data_regno (exp);
6311 case BUILT_IN_EXTEND_POINTER:
6312 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6313 case BUILT_IN_EH_POINTER:
6314 return expand_builtin_eh_pointer (exp);
6315 case BUILT_IN_EH_FILTER:
6316 return expand_builtin_eh_filter (exp);
6317 case BUILT_IN_EH_COPY_VALUES:
6318 return expand_builtin_eh_copy_values (exp);
6320 case BUILT_IN_VA_START:
6321 return expand_builtin_va_start (exp);
6322 case BUILT_IN_VA_END:
6323 return expand_builtin_va_end (exp);
6324 case BUILT_IN_VA_COPY:
6325 return expand_builtin_va_copy (exp);
6326 case BUILT_IN_EXPECT:
6327 return expand_builtin_expect (exp, target);
6328 case BUILT_IN_ASSUME_ALIGNED:
6329 return expand_builtin_assume_aligned (exp, target);
6330 case BUILT_IN_PREFETCH:
6331 expand_builtin_prefetch (exp);
6332 return const0_rtx;
6334 case BUILT_IN_INIT_TRAMPOLINE:
6335 return expand_builtin_init_trampoline (exp, true);
6336 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6337 return expand_builtin_init_trampoline (exp, false);
6338 case BUILT_IN_ADJUST_TRAMPOLINE:
6339 return expand_builtin_adjust_trampoline (exp);
6341 case BUILT_IN_FORK:
6342 case BUILT_IN_EXECL:
6343 case BUILT_IN_EXECV:
6344 case BUILT_IN_EXECLP:
6345 case BUILT_IN_EXECLE:
6346 case BUILT_IN_EXECVP:
6347 case BUILT_IN_EXECVE:
6348 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6349 if (target)
6350 return target;
6351 break;
6353 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6354 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6355 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6356 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6357 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6358 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6359 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6360 if (target)
6361 return target;
6362 break;
6364 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6365 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6366 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6367 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6368 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6369 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6370 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6371 if (target)
6372 return target;
6373 break;
6375 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6376 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6377 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6378 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6379 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6380 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6381 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6382 if (target)
6383 return target;
6384 break;
6386 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6387 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6388 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6389 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6390 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6391 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6392 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6393 if (target)
6394 return target;
6395 break;
6397 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6398 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6399 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6400 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6401 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6402 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6403 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6404 if (target)
6405 return target;
6406 break;
6408 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6409 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6410 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6411 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6412 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6413 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6414 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6415 if (target)
6416 return target;
6417 break;
6419 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6420 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6421 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6422 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6423 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6424 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6425 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6426 if (target)
6427 return target;
6428 break;
6430 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6431 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6432 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6433 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6434 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6435 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6436 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6437 if (target)
6438 return target;
6439 break;
6441 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6442 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6443 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6444 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6445 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6446 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6447 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6448 if (target)
6449 return target;
6450 break;
6452 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6453 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6454 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6455 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6456 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6457 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6458 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6459 if (target)
6460 return target;
6461 break;
6463 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6464 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6465 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6466 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6467 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6468 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6469 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6470 if (target)
6471 return target;
6472 break;
6474 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6475 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6476 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6477 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6478 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6479 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6480 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6481 if (target)
6482 return target;
6483 break;
6485 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6486 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6487 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6488 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6489 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6490 if (mode == VOIDmode)
6491 mode = TYPE_MODE (boolean_type_node);
6492 if (!target || !register_operand (target, mode))
6493 target = gen_reg_rtx (mode);
6495 mode = get_builtin_sync_mode
6496 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6497 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6498 if (target)
6499 return target;
6500 break;
6502 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6503 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6504 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6505 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6506 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6507 mode = get_builtin_sync_mode
6508 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6509 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6510 if (target)
6511 return target;
6512 break;
6514 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6515 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6516 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6517 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6518 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6519 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6520 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6521 if (target)
6522 return target;
6523 break;
6525 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6526 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6527 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6528 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6529 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6530 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6531 expand_builtin_sync_lock_release (mode, exp);
6532 return const0_rtx;
6534 case BUILT_IN_SYNC_SYNCHRONIZE:
6535 expand_builtin_sync_synchronize ();
6536 return const0_rtx;
6538 case BUILT_IN_ATOMIC_EXCHANGE_1:
6539 case BUILT_IN_ATOMIC_EXCHANGE_2:
6540 case BUILT_IN_ATOMIC_EXCHANGE_4:
6541 case BUILT_IN_ATOMIC_EXCHANGE_8:
6542 case BUILT_IN_ATOMIC_EXCHANGE_16:
6543 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6544 target = expand_builtin_atomic_exchange (mode, exp, target);
6545 if (target)
6546 return target;
6547 break;
6549 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6550 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6551 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6552 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6553 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6555 unsigned int nargs, z;
6556 vec<tree, va_gc> *vec;
6558 mode =
6559 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6560 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6561 if (target)
6562 return target;
6564 /* If this is turned into an external library call, the weak parameter
6565 must be dropped to match the expected parameter list. */
6566 nargs = call_expr_nargs (exp);
6567 vec_alloc (vec, nargs - 1);
6568 for (z = 0; z < 3; z++)
6569 vec->quick_push (CALL_EXPR_ARG (exp, z));
6570 /* Skip the boolean weak parameter. */
6571 for (z = 4; z < 6; z++)
6572 vec->quick_push (CALL_EXPR_ARG (exp, z));
6573 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6574 break;
6577 case BUILT_IN_ATOMIC_LOAD_1:
6578 case BUILT_IN_ATOMIC_LOAD_2:
6579 case BUILT_IN_ATOMIC_LOAD_4:
6580 case BUILT_IN_ATOMIC_LOAD_8:
6581 case BUILT_IN_ATOMIC_LOAD_16:
6582 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6583 target = expand_builtin_atomic_load (mode, exp, target);
6584 if (target)
6585 return target;
6586 break;
6588 case BUILT_IN_ATOMIC_STORE_1:
6589 case BUILT_IN_ATOMIC_STORE_2:
6590 case BUILT_IN_ATOMIC_STORE_4:
6591 case BUILT_IN_ATOMIC_STORE_8:
6592 case BUILT_IN_ATOMIC_STORE_16:
6593 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6594 target = expand_builtin_atomic_store (mode, exp);
6595 if (target)
6596 return const0_rtx;
6597 break;
6599 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6600 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6601 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6602 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6603 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6605 enum built_in_function lib;
6606 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6607 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6608 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6609 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6610 ignore, lib);
6611 if (target)
6612 return target;
6613 break;
6615 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6616 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6617 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6618 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6619 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6621 enum built_in_function lib;
6622 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6623 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6624 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6625 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6626 ignore, lib);
6627 if (target)
6628 return target;
6629 break;
6631 case BUILT_IN_ATOMIC_AND_FETCH_1:
6632 case BUILT_IN_ATOMIC_AND_FETCH_2:
6633 case BUILT_IN_ATOMIC_AND_FETCH_4:
6634 case BUILT_IN_ATOMIC_AND_FETCH_8:
6635 case BUILT_IN_ATOMIC_AND_FETCH_16:
6637 enum built_in_function lib;
6638 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6639 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6640 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6641 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6642 ignore, lib);
6643 if (target)
6644 return target;
6645 break;
6647 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6648 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6649 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6650 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6651 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6653 enum built_in_function lib;
6654 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6655 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6656 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6657 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6658 ignore, lib);
6659 if (target)
6660 return target;
6661 break;
6663 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6664 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6665 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6666 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6667 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6669 enum built_in_function lib;
6670 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6671 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6672 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6673 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6674 ignore, lib);
6675 if (target)
6676 return target;
6677 break;
6679 case BUILT_IN_ATOMIC_OR_FETCH_1:
6680 case BUILT_IN_ATOMIC_OR_FETCH_2:
6681 case BUILT_IN_ATOMIC_OR_FETCH_4:
6682 case BUILT_IN_ATOMIC_OR_FETCH_8:
6683 case BUILT_IN_ATOMIC_OR_FETCH_16:
6685 enum built_in_function lib;
6686 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6687 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6688 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6689 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6690 ignore, lib);
6691 if (target)
6692 return target;
6693 break;
6695 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6696 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6697 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6698 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6699 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6700 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6701 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6702 ignore, BUILT_IN_NONE);
6703 if (target)
6704 return target;
6705 break;
6707 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6708 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6709 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6710 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6711 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6712 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6713 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6714 ignore, BUILT_IN_NONE);
6715 if (target)
6716 return target;
6717 break;
6719 case BUILT_IN_ATOMIC_FETCH_AND_1:
6720 case BUILT_IN_ATOMIC_FETCH_AND_2:
6721 case BUILT_IN_ATOMIC_FETCH_AND_4:
6722 case BUILT_IN_ATOMIC_FETCH_AND_8:
6723 case BUILT_IN_ATOMIC_FETCH_AND_16:
6724 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6725 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6726 ignore, BUILT_IN_NONE);
6727 if (target)
6728 return target;
6729 break;
6731 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6732 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6733 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6734 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6735 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6736 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6737 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6738 ignore, BUILT_IN_NONE);
6739 if (target)
6740 return target;
6741 break;
6743 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6744 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6745 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6746 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6747 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6748 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6749 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6750 ignore, BUILT_IN_NONE);
6751 if (target)
6752 return target;
6753 break;
6755 case BUILT_IN_ATOMIC_FETCH_OR_1:
6756 case BUILT_IN_ATOMIC_FETCH_OR_2:
6757 case BUILT_IN_ATOMIC_FETCH_OR_4:
6758 case BUILT_IN_ATOMIC_FETCH_OR_8:
6759 case BUILT_IN_ATOMIC_FETCH_OR_16:
6760 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6761 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6762 ignore, BUILT_IN_NONE);
6763 if (target)
6764 return target;
6765 break;
6767 case BUILT_IN_ATOMIC_TEST_AND_SET:
6768 return expand_builtin_atomic_test_and_set (exp, target);
6770 case BUILT_IN_ATOMIC_CLEAR:
6771 return expand_builtin_atomic_clear (exp);
6773 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6774 return expand_builtin_atomic_always_lock_free (exp);
6776 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6777 target = expand_builtin_atomic_is_lock_free (exp);
6778 if (target)
6779 return target;
6780 break;
6782 case BUILT_IN_ATOMIC_THREAD_FENCE:
6783 expand_builtin_atomic_thread_fence (exp);
6784 return const0_rtx;
6786 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6787 expand_builtin_atomic_signal_fence (exp);
6788 return const0_rtx;
6790 case BUILT_IN_OBJECT_SIZE:
6791 return expand_builtin_object_size (exp);
6793 case BUILT_IN_MEMCPY_CHK:
6794 case BUILT_IN_MEMPCPY_CHK:
6795 case BUILT_IN_MEMMOVE_CHK:
6796 case BUILT_IN_MEMSET_CHK:
6797 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6798 if (target)
6799 return target;
6800 break;
6802 case BUILT_IN_STRCPY_CHK:
6803 case BUILT_IN_STPCPY_CHK:
6804 case BUILT_IN_STRNCPY_CHK:
6805 case BUILT_IN_STPNCPY_CHK:
6806 case BUILT_IN_STRCAT_CHK:
6807 case BUILT_IN_STRNCAT_CHK:
6808 case BUILT_IN_SNPRINTF_CHK:
6809 case BUILT_IN_VSNPRINTF_CHK:
6810 maybe_emit_chk_warning (exp, fcode);
6811 break;
6813 case BUILT_IN_SPRINTF_CHK:
6814 case BUILT_IN_VSPRINTF_CHK:
6815 maybe_emit_sprintf_chk_warning (exp, fcode);
6816 break;
6818 case BUILT_IN_FREE:
6819 if (warn_free_nonheap_object)
6820 maybe_emit_free_warning (exp);
6821 break;
6823 case BUILT_IN_THREAD_POINTER:
6824 return expand_builtin_thread_pointer (exp, target);
6826 case BUILT_IN_SET_THREAD_POINTER:
6827 expand_builtin_set_thread_pointer (exp);
6828 return const0_rtx;
6830 case BUILT_IN_CILK_DETACH:
6831 expand_builtin_cilk_detach (exp);
6832 return const0_rtx;
6834 case BUILT_IN_CILK_POP_FRAME:
6835 expand_builtin_cilk_pop_frame (exp);
6836 return const0_rtx;
6838 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6839 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6840 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6841 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6842 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6843 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6844 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6845 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6846 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6847 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6848 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6849 /* We allow user CHKP builtins if Pointer Bounds
6850 Checker is off. */
6851 if (!chkp_function_instrumented_p (current_function_decl))
6853 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
6854 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6855 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
6856 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
6857 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
6858 return expand_normal (CALL_EXPR_ARG (exp, 0));
6859 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
6860 return expand_normal (size_zero_node);
6861 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
6862 return expand_normal (size_int (-1));
6863 else
6864 return const0_rtx;
6866 /* FALLTHROUGH */
6868 case BUILT_IN_CHKP_BNDMK:
6869 case BUILT_IN_CHKP_BNDSTX:
6870 case BUILT_IN_CHKP_BNDCL:
6871 case BUILT_IN_CHKP_BNDCU:
6872 case BUILT_IN_CHKP_BNDLDX:
6873 case BUILT_IN_CHKP_BNDRET:
6874 case BUILT_IN_CHKP_INTERSECT:
6875 case BUILT_IN_CHKP_NARROW:
6876 case BUILT_IN_CHKP_EXTRACT_LOWER:
6877 case BUILT_IN_CHKP_EXTRACT_UPPER:
6878 /* Software implementation of Pointer Bounds Checker is NYI.
6879 Target support is required. */
6880 error ("Your target platform does not support -fcheck-pointer-bounds");
6881 break;
6883 case BUILT_IN_ACC_ON_DEVICE:
6884 /* Do library call, if we failed to expand the builtin when
6885 folding. */
6886 break;
6888 default: /* just do library call, if unknown builtin */
6889 break;
6892 /* The switch statement above can drop through to cause the function
6893 to be called normally. */
6894 return expand_call (exp, target, ignore);
6897 /* Similar to expand_builtin but is used for instrumented calls. */
6900 expand_builtin_with_bounds (tree exp, rtx target,
6901 rtx subtarget ATTRIBUTE_UNUSED,
6902 machine_mode mode, int ignore)
6904 tree fndecl = get_callee_fndecl (exp);
6905 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6907 gcc_assert (CALL_WITH_BOUNDS_P (exp));
6909 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6910 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6912 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
6913 && fcode < END_CHKP_BUILTINS);
6915 switch (fcode)
6917 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
6918 target = expand_builtin_memcpy_with_bounds (exp, target);
6919 if (target)
6920 return target;
6921 break;
6923 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
6924 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
6925 if (target)
6926 return target;
6927 break;
6929 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
6930 target = expand_builtin_memset_with_bounds (exp, target, mode);
6931 if (target)
6932 return target;
6933 break;
6935 default:
6936 break;
6939 /* The switch statement above can drop through to cause the function
6940 to be called normally. */
6941 return expand_call (exp, target, ignore);
6944 /* Determine whether a tree node represents a call to a built-in
6945 function. If the tree T is a call to a built-in function with
6946 the right number of arguments of the appropriate types, return
6947 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6948 Otherwise the return value is END_BUILTINS. */
6950 enum built_in_function
6951 builtin_mathfn_code (const_tree t)
6953 const_tree fndecl, arg, parmlist;
6954 const_tree argtype, parmtype;
6955 const_call_expr_arg_iterator iter;
6957 if (TREE_CODE (t) != CALL_EXPR
6958 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6959 return END_BUILTINS;
6961 fndecl = get_callee_fndecl (t);
6962 if (fndecl == NULL_TREE
6963 || TREE_CODE (fndecl) != FUNCTION_DECL
6964 || ! DECL_BUILT_IN (fndecl)
6965 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6966 return END_BUILTINS;
6968 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6969 init_const_call_expr_arg_iterator (t, &iter);
6970 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6972 /* If a function doesn't take a variable number of arguments,
6973 the last element in the list will have type `void'. */
6974 parmtype = TREE_VALUE (parmlist);
6975 if (VOID_TYPE_P (parmtype))
6977 if (more_const_call_expr_args_p (&iter))
6978 return END_BUILTINS;
6979 return DECL_FUNCTION_CODE (fndecl);
6982 if (! more_const_call_expr_args_p (&iter))
6983 return END_BUILTINS;
6985 arg = next_const_call_expr_arg (&iter);
6986 argtype = TREE_TYPE (arg);
6988 if (SCALAR_FLOAT_TYPE_P (parmtype))
6990 if (! SCALAR_FLOAT_TYPE_P (argtype))
6991 return END_BUILTINS;
6993 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6995 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6996 return END_BUILTINS;
6998 else if (POINTER_TYPE_P (parmtype))
7000 if (! POINTER_TYPE_P (argtype))
7001 return END_BUILTINS;
7003 else if (INTEGRAL_TYPE_P (parmtype))
7005 if (! INTEGRAL_TYPE_P (argtype))
7006 return END_BUILTINS;
7008 else
7009 return END_BUILTINS;
7012 /* Variable-length argument list. */
7013 return DECL_FUNCTION_CODE (fndecl);
7016 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7017 evaluate to a constant. */
7019 static tree
7020 fold_builtin_constant_p (tree arg)
7022 /* We return 1 for a numeric type that's known to be a constant
7023 value at compile-time or for an aggregate type that's a
7024 literal constant. */
7025 STRIP_NOPS (arg);
7027 /* If we know this is a constant, emit the constant of one. */
7028 if (CONSTANT_CLASS_P (arg)
7029 || (TREE_CODE (arg) == CONSTRUCTOR
7030 && TREE_CONSTANT (arg)))
7031 return integer_one_node;
7032 if (TREE_CODE (arg) == ADDR_EXPR)
7034 tree op = TREE_OPERAND (arg, 0);
7035 if (TREE_CODE (op) == STRING_CST
7036 || (TREE_CODE (op) == ARRAY_REF
7037 && integer_zerop (TREE_OPERAND (op, 1))
7038 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7039 return integer_one_node;
7042 /* If this expression has side effects, show we don't know it to be a
7043 constant. Likewise if it's a pointer or aggregate type since in
7044 those case we only want literals, since those are only optimized
7045 when generating RTL, not later.
7046 And finally, if we are compiling an initializer, not code, we
7047 need to return a definite result now; there's not going to be any
7048 more optimization done. */
7049 if (TREE_SIDE_EFFECTS (arg)
7050 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7051 || POINTER_TYPE_P (TREE_TYPE (arg))
7052 || cfun == 0
7053 || folding_initializer
7054 || force_folding_builtin_constant_p)
7055 return integer_zero_node;
7057 return NULL_TREE;
7060 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7061 return it as a truthvalue. */
7063 static tree
7064 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7065 tree predictor)
7067 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7069 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7070 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7071 ret_type = TREE_TYPE (TREE_TYPE (fn));
7072 pred_type = TREE_VALUE (arg_types);
7073 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7075 pred = fold_convert_loc (loc, pred_type, pred);
7076 expected = fold_convert_loc (loc, expected_type, expected);
7077 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7078 predictor);
7080 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7081 build_int_cst (ret_type, 0));
7084 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7085 NULL_TREE if no simplification is possible. */
7087 tree
7088 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7090 tree inner, fndecl, inner_arg0;
7091 enum tree_code code;
7093 /* Distribute the expected value over short-circuiting operators.
7094 See through the cast from truthvalue_type_node to long. */
7095 inner_arg0 = arg0;
7096 while (CONVERT_EXPR_P (inner_arg0)
7097 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7098 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7099 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7101 /* If this is a builtin_expect within a builtin_expect keep the
7102 inner one. See through a comparison against a constant. It
7103 might have been added to create a thruthvalue. */
7104 inner = inner_arg0;
7106 if (COMPARISON_CLASS_P (inner)
7107 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7108 inner = TREE_OPERAND (inner, 0);
7110 if (TREE_CODE (inner) == CALL_EXPR
7111 && (fndecl = get_callee_fndecl (inner))
7112 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7113 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7114 return arg0;
7116 inner = inner_arg0;
7117 code = TREE_CODE (inner);
7118 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7120 tree op0 = TREE_OPERAND (inner, 0);
7121 tree op1 = TREE_OPERAND (inner, 1);
7123 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7124 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7125 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7127 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7130 /* If the argument isn't invariant then there's nothing else we can do. */
7131 if (!TREE_CONSTANT (inner_arg0))
7132 return NULL_TREE;
7134 /* If we expect that a comparison against the argument will fold to
7135 a constant return the constant. In practice, this means a true
7136 constant or the address of a non-weak symbol. */
7137 inner = inner_arg0;
7138 STRIP_NOPS (inner);
7139 if (TREE_CODE (inner) == ADDR_EXPR)
7143 inner = TREE_OPERAND (inner, 0);
7145 while (TREE_CODE (inner) == COMPONENT_REF
7146 || TREE_CODE (inner) == ARRAY_REF);
7147 if ((TREE_CODE (inner) == VAR_DECL
7148 || TREE_CODE (inner) == FUNCTION_DECL)
7149 && DECL_WEAK (inner))
7150 return NULL_TREE;
7153 /* Otherwise, ARG0 already has the proper type for the return value. */
7154 return arg0;
7157 /* Fold a call to __builtin_classify_type with argument ARG. */
7159 static tree
7160 fold_builtin_classify_type (tree arg)
7162 if (arg == 0)
7163 return build_int_cst (integer_type_node, no_type_class);
7165 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7168 /* Fold a call to __builtin_strlen with argument ARG. */
7170 static tree
7171 fold_builtin_strlen (location_t loc, tree type, tree arg)
7173 if (!validate_arg (arg, POINTER_TYPE))
7174 return NULL_TREE;
7175 else
7177 tree len = c_strlen (arg, 0);
7179 if (len)
7180 return fold_convert_loc (loc, type, len);
7182 return NULL_TREE;
7186 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7188 static tree
7189 fold_builtin_inf (location_t loc, tree type, int warn)
7191 REAL_VALUE_TYPE real;
7193 /* __builtin_inff is intended to be usable to define INFINITY on all
7194 targets. If an infinity is not available, INFINITY expands "to a
7195 positive constant of type float that overflows at translation
7196 time", footnote "In this case, using INFINITY will violate the
7197 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7198 Thus we pedwarn to ensure this constraint violation is
7199 diagnosed. */
7200 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7201 pedwarn (loc, 0, "target format does not support infinity");
7203 real_inf (&real);
7204 return build_real (type, real);
7207 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7208 NULL_TREE if no simplification can be made. */
7210 static tree
7211 fold_builtin_sincos (location_t loc,
7212 tree arg0, tree arg1, tree arg2)
7214 tree type;
7215 tree fndecl, call = NULL_TREE;
7217 if (!validate_arg (arg0, REAL_TYPE)
7218 || !validate_arg (arg1, POINTER_TYPE)
7219 || !validate_arg (arg2, POINTER_TYPE))
7220 return NULL_TREE;
7222 type = TREE_TYPE (arg0);
7224 /* Calculate the result when the argument is a constant. */
7225 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
7226 if (fn == END_BUILTINS)
7227 return NULL_TREE;
7229 /* Canonicalize sincos to cexpi. */
7230 if (TREE_CODE (arg0) == REAL_CST)
7232 tree complex_type = build_complex_type (type);
7233 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
7235 if (!call)
7237 if (!targetm.libc_has_function (function_c99_math_complex)
7238 || !builtin_decl_implicit_p (fn))
7239 return NULL_TREE;
7240 fndecl = builtin_decl_explicit (fn);
7241 call = build_call_expr_loc (loc, fndecl, 1, arg0);
7242 call = builtin_save_expr (call);
7245 return build2 (COMPOUND_EXPR, void_type_node,
7246 build2 (MODIFY_EXPR, void_type_node,
7247 build_fold_indirect_ref_loc (loc, arg1),
7248 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
7249 build2 (MODIFY_EXPR, void_type_node,
7250 build_fold_indirect_ref_loc (loc, arg2),
7251 fold_build1_loc (loc, REALPART_EXPR, type, call)));
7254 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
7255 arguments to the call, and TYPE is its return type.
7256 Return NULL_TREE if no simplification can be made. */
7258 static tree
7259 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
7261 if (!validate_arg (arg1, POINTER_TYPE)
7262 || !validate_arg (arg2, INTEGER_TYPE)
7263 || !validate_arg (len, INTEGER_TYPE))
7264 return NULL_TREE;
7265 else
7267 const char *p1;
7269 if (TREE_CODE (arg2) != INTEGER_CST
7270 || !tree_fits_uhwi_p (len))
7271 return NULL_TREE;
7273 p1 = c_getstr (arg1);
7274 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
7276 char c;
7277 const char *r;
7278 tree tem;
7280 if (target_char_cast (arg2, &c))
7281 return NULL_TREE;
7283 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
7285 if (r == NULL)
7286 return build_int_cst (TREE_TYPE (arg1), 0);
7288 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
7289 return fold_convert_loc (loc, type, tem);
7291 return NULL_TREE;
7295 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7296 Return NULL_TREE if no simplification can be made. */
7298 static tree
7299 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
7301 if (!validate_arg (arg1, POINTER_TYPE)
7302 || !validate_arg (arg2, POINTER_TYPE)
7303 || !validate_arg (len, INTEGER_TYPE))
7304 return NULL_TREE;
7306 /* If the LEN parameter is zero, return zero. */
7307 if (integer_zerop (len))
7308 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7309 arg1, arg2);
7311 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7312 if (operand_equal_p (arg1, arg2, 0))
7313 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7315 /* If len parameter is one, return an expression corresponding to
7316 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7317 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7319 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7320 tree cst_uchar_ptr_node
7321 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7323 tree ind1
7324 = fold_convert_loc (loc, integer_type_node,
7325 build1 (INDIRECT_REF, cst_uchar_node,
7326 fold_convert_loc (loc,
7327 cst_uchar_ptr_node,
7328 arg1)));
7329 tree ind2
7330 = fold_convert_loc (loc, integer_type_node,
7331 build1 (INDIRECT_REF, cst_uchar_node,
7332 fold_convert_loc (loc,
7333 cst_uchar_ptr_node,
7334 arg2)));
7335 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7338 return NULL_TREE;
7341 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
7342 Return NULL_TREE if no simplification can be made. */
7344 static tree
7345 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
7347 if (!validate_arg (arg1, POINTER_TYPE)
7348 || !validate_arg (arg2, POINTER_TYPE))
7349 return NULL_TREE;
7351 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7352 if (operand_equal_p (arg1, arg2, 0))
7353 return integer_zero_node;
7355 /* If the second arg is "", return *(const unsigned char*)arg1. */
7356 const char *p2 = c_getstr (arg2);
7357 if (p2 && *p2 == '\0')
7359 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7360 tree cst_uchar_ptr_node
7361 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7363 return fold_convert_loc (loc, integer_type_node,
7364 build1 (INDIRECT_REF, cst_uchar_node,
7365 fold_convert_loc (loc,
7366 cst_uchar_ptr_node,
7367 arg1)));
7370 /* If the first arg is "", return -*(const unsigned char*)arg2. */
7371 const char *p1 = c_getstr (arg1);
7372 if (p1 && *p1 == '\0')
7374 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7375 tree cst_uchar_ptr_node
7376 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7378 tree temp
7379 = fold_convert_loc (loc, integer_type_node,
7380 build1 (INDIRECT_REF, cst_uchar_node,
7381 fold_convert_loc (loc,
7382 cst_uchar_ptr_node,
7383 arg2)));
7384 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
7387 return NULL_TREE;
7390 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
7391 Return NULL_TREE if no simplification can be made. */
7393 static tree
7394 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
7396 if (!validate_arg (arg1, POINTER_TYPE)
7397 || !validate_arg (arg2, POINTER_TYPE)
7398 || !validate_arg (len, INTEGER_TYPE))
7399 return NULL_TREE;
7401 /* If the LEN parameter is zero, return zero. */
7402 if (integer_zerop (len))
7403 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7404 arg1, arg2);
7406 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7407 if (operand_equal_p (arg1, arg2, 0))
7408 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7410 /* If the second arg is "", and the length is greater than zero,
7411 return *(const unsigned char*)arg1. */
7412 const char *p2 = c_getstr (arg2);
7413 if (p2 && *p2 == '\0'
7414 && TREE_CODE (len) == INTEGER_CST
7415 && tree_int_cst_sgn (len) == 1)
7417 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7418 tree cst_uchar_ptr_node
7419 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7421 return fold_convert_loc (loc, integer_type_node,
7422 build1 (INDIRECT_REF, cst_uchar_node,
7423 fold_convert_loc (loc,
7424 cst_uchar_ptr_node,
7425 arg1)));
7428 /* If the first arg is "", and the length is greater than zero,
7429 return -*(const unsigned char*)arg2. */
7430 const char *p1 = c_getstr (arg1);
7431 if (p1 && *p1 == '\0'
7432 && TREE_CODE (len) == INTEGER_CST
7433 && tree_int_cst_sgn (len) == 1)
7435 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7436 tree cst_uchar_ptr_node
7437 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7439 tree temp = fold_convert_loc (loc, integer_type_node,
7440 build1 (INDIRECT_REF, cst_uchar_node,
7441 fold_convert_loc (loc,
7442 cst_uchar_ptr_node,
7443 arg2)));
7444 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
7447 /* If len parameter is one, return an expression corresponding to
7448 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7449 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7451 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7452 tree cst_uchar_ptr_node
7453 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7455 tree ind1 = fold_convert_loc (loc, integer_type_node,
7456 build1 (INDIRECT_REF, cst_uchar_node,
7457 fold_convert_loc (loc,
7458 cst_uchar_ptr_node,
7459 arg1)));
7460 tree ind2 = fold_convert_loc (loc, integer_type_node,
7461 build1 (INDIRECT_REF, cst_uchar_node,
7462 fold_convert_loc (loc,
7463 cst_uchar_ptr_node,
7464 arg2)));
7465 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7468 return NULL_TREE;
7471 /* Fold a call to builtin isascii with argument ARG. */
7473 static tree
7474 fold_builtin_isascii (location_t loc, tree arg)
7476 if (!validate_arg (arg, INTEGER_TYPE))
7477 return NULL_TREE;
7478 else
7480 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
7481 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
7482 build_int_cst (integer_type_node,
7483 ~ (unsigned HOST_WIDE_INT) 0x7f));
7484 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
7485 arg, integer_zero_node);
7489 /* Fold a call to builtin toascii with argument ARG. */
7491 static tree
7492 fold_builtin_toascii (location_t loc, tree arg)
7494 if (!validate_arg (arg, INTEGER_TYPE))
7495 return NULL_TREE;
7497 /* Transform toascii(c) -> (c & 0x7f). */
7498 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
7499 build_int_cst (integer_type_node, 0x7f));
7502 /* Fold a call to builtin isdigit with argument ARG. */
7504 static tree
7505 fold_builtin_isdigit (location_t loc, tree arg)
7507 if (!validate_arg (arg, INTEGER_TYPE))
7508 return NULL_TREE;
7509 else
7511 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
7512 /* According to the C standard, isdigit is unaffected by locale.
7513 However, it definitely is affected by the target character set. */
7514 unsigned HOST_WIDE_INT target_digit0
7515 = lang_hooks.to_target_charset ('0');
7517 if (target_digit0 == 0)
7518 return NULL_TREE;
7520 arg = fold_convert_loc (loc, unsigned_type_node, arg);
7521 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
7522 build_int_cst (unsigned_type_node, target_digit0));
7523 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
7524 build_int_cst (unsigned_type_node, 9));
7528 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
7530 static tree
7531 fold_builtin_fabs (location_t loc, tree arg, tree type)
7533 if (!validate_arg (arg, REAL_TYPE))
7534 return NULL_TREE;
7536 arg = fold_convert_loc (loc, type, arg);
7537 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7540 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
7542 static tree
7543 fold_builtin_abs (location_t loc, tree arg, tree type)
7545 if (!validate_arg (arg, INTEGER_TYPE))
7546 return NULL_TREE;
7548 arg = fold_convert_loc (loc, type, arg);
7549 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7552 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
7554 static tree
7555 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
7557 /* ??? Only expand to FMA_EXPR if it's directly supported. */
7558 if (validate_arg (arg0, REAL_TYPE)
7559 && validate_arg (arg1, REAL_TYPE)
7560 && validate_arg (arg2, REAL_TYPE)
7561 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
7562 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
7564 return NULL_TREE;
7567 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
7569 static tree
7570 fold_builtin_carg (location_t loc, tree arg, tree type)
7572 if (validate_arg (arg, COMPLEX_TYPE)
7573 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7575 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
7577 if (atan2_fn)
7579 tree new_arg = builtin_save_expr (arg);
7580 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
7581 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
7582 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
7586 return NULL_TREE;
7589 /* Fold a call to builtin frexp, we can assume the base is 2. */
7591 static tree
7592 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
7594 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7595 return NULL_TREE;
7597 STRIP_NOPS (arg0);
7599 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
7600 return NULL_TREE;
7602 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7604 /* Proceed if a valid pointer type was passed in. */
7605 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
7607 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
7608 tree frac, exp;
7610 switch (value->cl)
7612 case rvc_zero:
7613 /* For +-0, return (*exp = 0, +-0). */
7614 exp = integer_zero_node;
7615 frac = arg0;
7616 break;
7617 case rvc_nan:
7618 case rvc_inf:
7619 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
7620 return omit_one_operand_loc (loc, rettype, arg0, arg1);
7621 case rvc_normal:
7623 /* Since the frexp function always expects base 2, and in
7624 GCC normalized significands are already in the range
7625 [0.5, 1.0), we have exactly what frexp wants. */
7626 REAL_VALUE_TYPE frac_rvt = *value;
7627 SET_REAL_EXP (&frac_rvt, 0);
7628 frac = build_real (rettype, frac_rvt);
7629 exp = build_int_cst (integer_type_node, REAL_EXP (value));
7631 break;
7632 default:
7633 gcc_unreachable ();
7636 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7637 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
7638 TREE_SIDE_EFFECTS (arg1) = 1;
7639 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
7642 return NULL_TREE;
7645 /* Fold a call to builtin modf. */
7647 static tree
7648 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
7650 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7651 return NULL_TREE;
7653 STRIP_NOPS (arg0);
7655 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
7656 return NULL_TREE;
7658 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7660 /* Proceed if a valid pointer type was passed in. */
7661 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
7663 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
7664 REAL_VALUE_TYPE trunc, frac;
7666 switch (value->cl)
7668 case rvc_nan:
7669 case rvc_zero:
7670 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
7671 trunc = frac = *value;
7672 break;
7673 case rvc_inf:
7674 /* For +-Inf, return (*arg1 = arg0, +-0). */
7675 frac = dconst0;
7676 frac.sign = value->sign;
7677 trunc = *value;
7678 break;
7679 case rvc_normal:
7680 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
7681 real_trunc (&trunc, VOIDmode, value);
7682 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
7683 /* If the original number was negative and already
7684 integral, then the fractional part is -0.0. */
7685 if (value->sign && frac.cl == rvc_zero)
7686 frac.sign = value->sign;
7687 break;
7690 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7691 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
7692 build_real (rettype, trunc));
7693 TREE_SIDE_EFFECTS (arg1) = 1;
7694 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
7695 build_real (rettype, frac));
7698 return NULL_TREE;
7701 /* Given a location LOC, an interclass builtin function decl FNDECL
7702 and its single argument ARG, return an folded expression computing
7703 the same, or NULL_TREE if we either couldn't or didn't want to fold
7704 (the latter happen if there's an RTL instruction available). */
7706 static tree
7707 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
7709 machine_mode mode;
7711 if (!validate_arg (arg, REAL_TYPE))
7712 return NULL_TREE;
7714 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
7715 return NULL_TREE;
7717 mode = TYPE_MODE (TREE_TYPE (arg));
7719 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
7721 /* If there is no optab, try generic code. */
7722 switch (DECL_FUNCTION_CODE (fndecl))
7724 tree result;
7726 CASE_FLT_FN (BUILT_IN_ISINF):
7728 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
7729 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
7730 tree type = TREE_TYPE (arg);
7731 REAL_VALUE_TYPE r;
7732 char buf[128];
7734 if (is_ibm_extended)
7736 /* NaN and Inf are encoded in the high-order double value
7737 only. The low-order value is not significant. */
7738 type = double_type_node;
7739 mode = DFmode;
7740 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7742 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7743 real_from_string (&r, buf);
7744 result = build_call_expr (isgr_fn, 2,
7745 fold_build1_loc (loc, ABS_EXPR, type, arg),
7746 build_real (type, r));
7747 return result;
7749 CASE_FLT_FN (BUILT_IN_FINITE):
7750 case BUILT_IN_ISFINITE:
7752 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
7753 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7754 tree type = TREE_TYPE (arg);
7755 REAL_VALUE_TYPE r;
7756 char buf[128];
7758 if (is_ibm_extended)
7760 /* NaN and Inf are encoded in the high-order double value
7761 only. The low-order value is not significant. */
7762 type = double_type_node;
7763 mode = DFmode;
7764 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7766 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7767 real_from_string (&r, buf);
7768 result = build_call_expr (isle_fn, 2,
7769 fold_build1_loc (loc, ABS_EXPR, type, arg),
7770 build_real (type, r));
7771 /*result = fold_build2_loc (loc, UNGT_EXPR,
7772 TREE_TYPE (TREE_TYPE (fndecl)),
7773 fold_build1_loc (loc, ABS_EXPR, type, arg),
7774 build_real (type, r));
7775 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
7776 TREE_TYPE (TREE_TYPE (fndecl)),
7777 result);*/
7778 return result;
7780 case BUILT_IN_ISNORMAL:
7782 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
7783 islessequal(fabs(x),DBL_MAX). */
7784 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7785 tree type = TREE_TYPE (arg);
7786 tree orig_arg, max_exp, min_exp;
7787 machine_mode orig_mode = mode;
7788 REAL_VALUE_TYPE rmax, rmin;
7789 char buf[128];
7791 orig_arg = arg = builtin_save_expr (arg);
7792 if (is_ibm_extended)
7794 /* Use double to test the normal range of IBM extended
7795 precision. Emin for IBM extended precision is
7796 different to emin for IEEE double, being 53 higher
7797 since the low double exponent is at least 53 lower
7798 than the high double exponent. */
7799 type = double_type_node;
7800 mode = DFmode;
7801 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7803 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
7805 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7806 real_from_string (&rmax, buf);
7807 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
7808 real_from_string (&rmin, buf);
7809 max_exp = build_real (type, rmax);
7810 min_exp = build_real (type, rmin);
7812 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
7813 if (is_ibm_extended)
7815 /* Testing the high end of the range is done just using
7816 the high double, using the same test as isfinite().
7817 For the subnormal end of the range we first test the
7818 high double, then if its magnitude is equal to the
7819 limit of 0x1p-969, we test whether the low double is
7820 non-zero and opposite sign to the high double. */
7821 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
7822 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
7823 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
7824 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
7825 arg, min_exp);
7826 tree as_complex = build1 (VIEW_CONVERT_EXPR,
7827 complex_double_type_node, orig_arg);
7828 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
7829 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
7830 tree zero = build_real (type, dconst0);
7831 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
7832 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
7833 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
7834 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
7835 fold_build3 (COND_EXPR,
7836 integer_type_node,
7837 hilt, logt, lolt));
7838 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
7839 eq_min, ok_lo);
7840 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
7841 gt_min, eq_min);
7843 else
7845 tree const isge_fn
7846 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
7847 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
7849 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
7850 max_exp, min_exp);
7851 return result;
7853 default:
7854 break;
7857 return NULL_TREE;
7860 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
7861 ARG is the argument for the call. */
7863 static tree
7864 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
7866 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7868 if (!validate_arg (arg, REAL_TYPE))
7869 return NULL_TREE;
7871 switch (builtin_index)
7873 case BUILT_IN_ISINF:
7874 if (!HONOR_INFINITIES (arg))
7875 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
7877 return NULL_TREE;
7879 case BUILT_IN_ISINF_SIGN:
7881 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
7882 /* In a boolean context, GCC will fold the inner COND_EXPR to
7883 1. So e.g. "if (isinf_sign(x))" would be folded to just
7884 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
7885 tree signbit_fn = mathfn_built_in_1
7886 (TREE_TYPE (arg), CFN_BUILT_IN_SIGNBIT, 0);
7887 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
7888 tree tmp = NULL_TREE;
7890 arg = builtin_save_expr (arg);
7892 if (signbit_fn && isinf_fn)
7894 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
7895 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
7897 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
7898 signbit_call, integer_zero_node);
7899 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
7900 isinf_call, integer_zero_node);
7902 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
7903 integer_minus_one_node, integer_one_node);
7904 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
7905 isinf_call, tmp,
7906 integer_zero_node);
7909 return tmp;
7912 case BUILT_IN_ISFINITE:
7913 if (!HONOR_NANS (arg)
7914 && !HONOR_INFINITIES (arg))
7915 return omit_one_operand_loc (loc, type, integer_one_node, arg);
7917 return NULL_TREE;
7919 case BUILT_IN_ISNAN:
7920 if (!HONOR_NANS (arg))
7921 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
7924 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
7925 if (is_ibm_extended)
7927 /* NaN and Inf are encoded in the high-order double value
7928 only. The low-order value is not significant. */
7929 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
7932 arg = builtin_save_expr (arg);
7933 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
7935 default:
7936 gcc_unreachable ();
7940 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
7941 This builtin will generate code to return the appropriate floating
7942 point classification depending on the value of the floating point
7943 number passed in. The possible return values must be supplied as
7944 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
7945 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
7946 one floating point argument which is "type generic". */
7948 static tree
7949 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
7951 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
7952 arg, type, res, tmp;
7953 machine_mode mode;
7954 REAL_VALUE_TYPE r;
7955 char buf[128];
7957 /* Verify the required arguments in the original call. */
7958 if (nargs != 6
7959 || !validate_arg (args[0], INTEGER_TYPE)
7960 || !validate_arg (args[1], INTEGER_TYPE)
7961 || !validate_arg (args[2], INTEGER_TYPE)
7962 || !validate_arg (args[3], INTEGER_TYPE)
7963 || !validate_arg (args[4], INTEGER_TYPE)
7964 || !validate_arg (args[5], REAL_TYPE))
7965 return NULL_TREE;
7967 fp_nan = args[0];
7968 fp_infinite = args[1];
7969 fp_normal = args[2];
7970 fp_subnormal = args[3];
7971 fp_zero = args[4];
7972 arg = args[5];
7973 type = TREE_TYPE (arg);
7974 mode = TYPE_MODE (type);
7975 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
7977 /* fpclassify(x) ->
7978 isnan(x) ? FP_NAN :
7979 (fabs(x) == Inf ? FP_INFINITE :
7980 (fabs(x) >= DBL_MIN ? FP_NORMAL :
7981 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
7983 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
7984 build_real (type, dconst0));
7985 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
7986 tmp, fp_zero, fp_subnormal);
7988 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
7989 real_from_string (&r, buf);
7990 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
7991 arg, build_real (type, r));
7992 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
7994 if (HONOR_INFINITIES (mode))
7996 real_inf (&r);
7997 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
7998 build_real (type, r));
7999 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8000 fp_infinite, res);
8003 if (HONOR_NANS (mode))
8005 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8006 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8009 return res;
8012 /* Fold a call to an unordered comparison function such as
8013 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8014 being called and ARG0 and ARG1 are the arguments for the call.
8015 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8016 the opposite of the desired result. UNORDERED_CODE is used
8017 for modes that can hold NaNs and ORDERED_CODE is used for
8018 the rest. */
8020 static tree
8021 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8022 enum tree_code unordered_code,
8023 enum tree_code ordered_code)
8025 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8026 enum tree_code code;
8027 tree type0, type1;
8028 enum tree_code code0, code1;
8029 tree cmp_type = NULL_TREE;
8031 type0 = TREE_TYPE (arg0);
8032 type1 = TREE_TYPE (arg1);
8034 code0 = TREE_CODE (type0);
8035 code1 = TREE_CODE (type1);
8037 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8038 /* Choose the wider of two real types. */
8039 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8040 ? type0 : type1;
8041 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8042 cmp_type = type0;
8043 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8044 cmp_type = type1;
8046 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8047 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8049 if (unordered_code == UNORDERED_EXPR)
8051 if (!HONOR_NANS (arg0))
8052 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8053 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8056 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8057 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8058 fold_build2_loc (loc, code, type, arg0, arg1));
8061 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8062 arithmetics if it can never overflow, or into internal functions that
8063 return both result of arithmetics and overflowed boolean flag in
8064 a complex integer result, or some other check for overflow.
8065 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8066 checking part of that. */
8068 static tree
8069 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8070 tree arg0, tree arg1, tree arg2)
8072 enum internal_fn ifn = IFN_LAST;
8073 /* The code of the expression corresponding to the type-generic
8074 built-in, or ERROR_MARK for the type-specific ones. */
8075 enum tree_code opcode = ERROR_MARK;
8076 bool ovf_only = false;
8078 switch (fcode)
8080 case BUILT_IN_ADD_OVERFLOW_P:
8081 ovf_only = true;
8082 /* FALLTHRU */
8083 case BUILT_IN_ADD_OVERFLOW:
8084 opcode = PLUS_EXPR;
8085 /* FALLTHRU */
8086 case BUILT_IN_SADD_OVERFLOW:
8087 case BUILT_IN_SADDL_OVERFLOW:
8088 case BUILT_IN_SADDLL_OVERFLOW:
8089 case BUILT_IN_UADD_OVERFLOW:
8090 case BUILT_IN_UADDL_OVERFLOW:
8091 case BUILT_IN_UADDLL_OVERFLOW:
8092 ifn = IFN_ADD_OVERFLOW;
8093 break;
8094 case BUILT_IN_SUB_OVERFLOW_P:
8095 ovf_only = true;
8096 /* FALLTHRU */
8097 case BUILT_IN_SUB_OVERFLOW:
8098 opcode = MINUS_EXPR;
8099 /* FALLTHRU */
8100 case BUILT_IN_SSUB_OVERFLOW:
8101 case BUILT_IN_SSUBL_OVERFLOW:
8102 case BUILT_IN_SSUBLL_OVERFLOW:
8103 case BUILT_IN_USUB_OVERFLOW:
8104 case BUILT_IN_USUBL_OVERFLOW:
8105 case BUILT_IN_USUBLL_OVERFLOW:
8106 ifn = IFN_SUB_OVERFLOW;
8107 break;
8108 case BUILT_IN_MUL_OVERFLOW_P:
8109 ovf_only = true;
8110 /* FALLTHRU */
8111 case BUILT_IN_MUL_OVERFLOW:
8112 opcode = MULT_EXPR;
8113 /* FALLTHRU */
8114 case BUILT_IN_SMUL_OVERFLOW:
8115 case BUILT_IN_SMULL_OVERFLOW:
8116 case BUILT_IN_SMULLL_OVERFLOW:
8117 case BUILT_IN_UMUL_OVERFLOW:
8118 case BUILT_IN_UMULL_OVERFLOW:
8119 case BUILT_IN_UMULLL_OVERFLOW:
8120 ifn = IFN_MUL_OVERFLOW;
8121 break;
8122 default:
8123 gcc_unreachable ();
8126 /* For the "generic" overloads, the first two arguments can have different
8127 types and the last argument determines the target type to use to check
8128 for overflow. The arguments of the other overloads all have the same
8129 type. */
8130 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
8132 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8133 arguments are constant, attempt to fold the built-in call into a constant
8134 expression indicating whether or not it detected an overflow. */
8135 if (ovf_only
8136 && TREE_CODE (arg0) == INTEGER_CST
8137 && TREE_CODE (arg1) == INTEGER_CST)
8138 /* Perform the computation in the target type and check for overflow. */
8139 return omit_one_operand_loc (loc, boolean_type_node,
8140 arith_overflowed_p (opcode, type, arg0, arg1)
8141 ? boolean_true_node : boolean_false_node,
8142 arg2);
8144 tree ctype = build_complex_type (type);
8145 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8146 2, arg0, arg1);
8147 tree tgt = save_expr (call);
8148 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8149 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8150 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8152 if (ovf_only)
8153 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
8155 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8156 tree store
8157 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8158 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8161 /* Fold a call to __builtin_FILE to a constant string. */
8163 static inline tree
8164 fold_builtin_FILE (location_t loc)
8166 if (const char *fname = LOCATION_FILE (loc))
8167 return build_string_literal (strlen (fname) + 1, fname);
8169 return build_string_literal (1, "");
8172 /* Fold a call to __builtin_FUNCTION to a constant string. */
8174 static inline tree
8175 fold_builtin_FUNCTION ()
8177 if (current_function_decl)
8179 const char *name = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
8180 return build_string_literal (strlen (name) + 1, name);
8183 return build_string_literal (1, "");
8186 /* Fold a call to __builtin_LINE to an integer constant. */
8188 static inline tree
8189 fold_builtin_LINE (location_t loc, tree type)
8191 return build_int_cst (type, LOCATION_LINE (loc));
8194 /* Fold a call to built-in function FNDECL with 0 arguments.
8195 This function returns NULL_TREE if no simplification was possible. */
8197 static tree
8198 fold_builtin_0 (location_t loc, tree fndecl)
8200 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8201 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8202 switch (fcode)
8204 case BUILT_IN_FILE:
8205 return fold_builtin_FILE (loc);
8207 case BUILT_IN_FUNCTION:
8208 return fold_builtin_FUNCTION ();
8210 case BUILT_IN_LINE:
8211 return fold_builtin_LINE (loc, type);
8213 CASE_FLT_FN (BUILT_IN_INF):
8214 case BUILT_IN_INFD32:
8215 case BUILT_IN_INFD64:
8216 case BUILT_IN_INFD128:
8217 return fold_builtin_inf (loc, type, true);
8219 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8220 return fold_builtin_inf (loc, type, false);
8222 case BUILT_IN_CLASSIFY_TYPE:
8223 return fold_builtin_classify_type (NULL_TREE);
8225 default:
8226 break;
8228 return NULL_TREE;
8231 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8232 This function returns NULL_TREE if no simplification was possible. */
8234 static tree
8235 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8237 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8238 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8240 if (TREE_CODE (arg0) == ERROR_MARK)
8241 return NULL_TREE;
8243 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
8244 return ret;
8246 switch (fcode)
8248 case BUILT_IN_CONSTANT_P:
8250 tree val = fold_builtin_constant_p (arg0);
8252 /* Gimplification will pull the CALL_EXPR for the builtin out of
8253 an if condition. When not optimizing, we'll not CSE it back.
8254 To avoid link error types of regressions, return false now. */
8255 if (!val && !optimize)
8256 val = integer_zero_node;
8258 return val;
8261 case BUILT_IN_CLASSIFY_TYPE:
8262 return fold_builtin_classify_type (arg0);
8264 case BUILT_IN_STRLEN:
8265 return fold_builtin_strlen (loc, type, arg0);
8267 CASE_FLT_FN (BUILT_IN_FABS):
8268 case BUILT_IN_FABSD32:
8269 case BUILT_IN_FABSD64:
8270 case BUILT_IN_FABSD128:
8271 return fold_builtin_fabs (loc, arg0, type);
8273 case BUILT_IN_ABS:
8274 case BUILT_IN_LABS:
8275 case BUILT_IN_LLABS:
8276 case BUILT_IN_IMAXABS:
8277 return fold_builtin_abs (loc, arg0, type);
8279 CASE_FLT_FN (BUILT_IN_CONJ):
8280 if (validate_arg (arg0, COMPLEX_TYPE)
8281 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8282 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8283 break;
8285 CASE_FLT_FN (BUILT_IN_CREAL):
8286 if (validate_arg (arg0, COMPLEX_TYPE)
8287 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8288 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8289 break;
8291 CASE_FLT_FN (BUILT_IN_CIMAG):
8292 if (validate_arg (arg0, COMPLEX_TYPE)
8293 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8294 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8295 break;
8297 CASE_FLT_FN (BUILT_IN_CARG):
8298 return fold_builtin_carg (loc, arg0, type);
8300 case BUILT_IN_ISASCII:
8301 return fold_builtin_isascii (loc, arg0);
8303 case BUILT_IN_TOASCII:
8304 return fold_builtin_toascii (loc, arg0);
8306 case BUILT_IN_ISDIGIT:
8307 return fold_builtin_isdigit (loc, arg0);
8309 CASE_FLT_FN (BUILT_IN_FINITE):
8310 case BUILT_IN_FINITED32:
8311 case BUILT_IN_FINITED64:
8312 case BUILT_IN_FINITED128:
8313 case BUILT_IN_ISFINITE:
8315 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
8316 if (ret)
8317 return ret;
8318 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8321 CASE_FLT_FN (BUILT_IN_ISINF):
8322 case BUILT_IN_ISINFD32:
8323 case BUILT_IN_ISINFD64:
8324 case BUILT_IN_ISINFD128:
8326 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
8327 if (ret)
8328 return ret;
8329 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8332 case BUILT_IN_ISNORMAL:
8333 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8335 case BUILT_IN_ISINF_SIGN:
8336 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
8338 CASE_FLT_FN (BUILT_IN_ISNAN):
8339 case BUILT_IN_ISNAND32:
8340 case BUILT_IN_ISNAND64:
8341 case BUILT_IN_ISNAND128:
8342 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
8344 case BUILT_IN_FREE:
8345 if (integer_zerop (arg0))
8346 return build_empty_stmt (loc);
8347 break;
8349 default:
8350 break;
8353 return NULL_TREE;
8357 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8358 This function returns NULL_TREE if no simplification was possible. */
8360 static tree
8361 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
8363 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8364 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8366 if (TREE_CODE (arg0) == ERROR_MARK
8367 || TREE_CODE (arg1) == ERROR_MARK)
8368 return NULL_TREE;
8370 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
8371 return ret;
8373 switch (fcode)
8375 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
8376 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
8377 if (validate_arg (arg0, REAL_TYPE)
8378 && validate_arg (arg1, POINTER_TYPE))
8379 return do_mpfr_lgamma_r (arg0, arg1, type);
8380 break;
8382 CASE_FLT_FN (BUILT_IN_FREXP):
8383 return fold_builtin_frexp (loc, arg0, arg1, type);
8385 CASE_FLT_FN (BUILT_IN_MODF):
8386 return fold_builtin_modf (loc, arg0, arg1, type);
8388 case BUILT_IN_STRSTR:
8389 return fold_builtin_strstr (loc, arg0, arg1, type);
8391 case BUILT_IN_STRSPN:
8392 return fold_builtin_strspn (loc, arg0, arg1);
8394 case BUILT_IN_STRCSPN:
8395 return fold_builtin_strcspn (loc, arg0, arg1);
8397 case BUILT_IN_STRCHR:
8398 case BUILT_IN_INDEX:
8399 return fold_builtin_strchr (loc, arg0, arg1, type);
8401 case BUILT_IN_STRRCHR:
8402 case BUILT_IN_RINDEX:
8403 return fold_builtin_strrchr (loc, arg0, arg1, type);
8405 case BUILT_IN_STRCMP:
8406 return fold_builtin_strcmp (loc, arg0, arg1);
8408 case BUILT_IN_STRPBRK:
8409 return fold_builtin_strpbrk (loc, arg0, arg1, type);
8411 case BUILT_IN_EXPECT:
8412 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
8414 case BUILT_IN_ISGREATER:
8415 return fold_builtin_unordered_cmp (loc, fndecl,
8416 arg0, arg1, UNLE_EXPR, LE_EXPR);
8417 case BUILT_IN_ISGREATEREQUAL:
8418 return fold_builtin_unordered_cmp (loc, fndecl,
8419 arg0, arg1, UNLT_EXPR, LT_EXPR);
8420 case BUILT_IN_ISLESS:
8421 return fold_builtin_unordered_cmp (loc, fndecl,
8422 arg0, arg1, UNGE_EXPR, GE_EXPR);
8423 case BUILT_IN_ISLESSEQUAL:
8424 return fold_builtin_unordered_cmp (loc, fndecl,
8425 arg0, arg1, UNGT_EXPR, GT_EXPR);
8426 case BUILT_IN_ISLESSGREATER:
8427 return fold_builtin_unordered_cmp (loc, fndecl,
8428 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
8429 case BUILT_IN_ISUNORDERED:
8430 return fold_builtin_unordered_cmp (loc, fndecl,
8431 arg0, arg1, UNORDERED_EXPR,
8432 NOP_EXPR);
8434 /* We do the folding for va_start in the expander. */
8435 case BUILT_IN_VA_START:
8436 break;
8438 case BUILT_IN_OBJECT_SIZE:
8439 return fold_builtin_object_size (arg0, arg1);
8441 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8442 return fold_builtin_atomic_always_lock_free (arg0, arg1);
8444 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8445 return fold_builtin_atomic_is_lock_free (arg0, arg1);
8447 default:
8448 break;
8450 return NULL_TREE;
8453 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
8454 and ARG2.
8455 This function returns NULL_TREE if no simplification was possible. */
8457 static tree
8458 fold_builtin_3 (location_t loc, tree fndecl,
8459 tree arg0, tree arg1, tree arg2)
8461 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8462 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8464 if (TREE_CODE (arg0) == ERROR_MARK
8465 || TREE_CODE (arg1) == ERROR_MARK
8466 || TREE_CODE (arg2) == ERROR_MARK)
8467 return NULL_TREE;
8469 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
8470 arg0, arg1, arg2))
8471 return ret;
8473 switch (fcode)
8476 CASE_FLT_FN (BUILT_IN_SINCOS):
8477 return fold_builtin_sincos (loc, arg0, arg1, arg2);
8479 CASE_FLT_FN (BUILT_IN_FMA):
8480 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
8482 CASE_FLT_FN (BUILT_IN_REMQUO):
8483 if (validate_arg (arg0, REAL_TYPE)
8484 && validate_arg (arg1, REAL_TYPE)
8485 && validate_arg (arg2, POINTER_TYPE))
8486 return do_mpfr_remquo (arg0, arg1, arg2);
8487 break;
8489 case BUILT_IN_STRNCMP:
8490 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
8492 case BUILT_IN_MEMCHR:
8493 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
8495 case BUILT_IN_BCMP:
8496 case BUILT_IN_MEMCMP:
8497 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
8499 case BUILT_IN_EXPECT:
8500 return fold_builtin_expect (loc, arg0, arg1, arg2);
8502 case BUILT_IN_ADD_OVERFLOW:
8503 case BUILT_IN_SUB_OVERFLOW:
8504 case BUILT_IN_MUL_OVERFLOW:
8505 case BUILT_IN_ADD_OVERFLOW_P:
8506 case BUILT_IN_SUB_OVERFLOW_P:
8507 case BUILT_IN_MUL_OVERFLOW_P:
8508 case BUILT_IN_SADD_OVERFLOW:
8509 case BUILT_IN_SADDL_OVERFLOW:
8510 case BUILT_IN_SADDLL_OVERFLOW:
8511 case BUILT_IN_SSUB_OVERFLOW:
8512 case BUILT_IN_SSUBL_OVERFLOW:
8513 case BUILT_IN_SSUBLL_OVERFLOW:
8514 case BUILT_IN_SMUL_OVERFLOW:
8515 case BUILT_IN_SMULL_OVERFLOW:
8516 case BUILT_IN_SMULLL_OVERFLOW:
8517 case BUILT_IN_UADD_OVERFLOW:
8518 case BUILT_IN_UADDL_OVERFLOW:
8519 case BUILT_IN_UADDLL_OVERFLOW:
8520 case BUILT_IN_USUB_OVERFLOW:
8521 case BUILT_IN_USUBL_OVERFLOW:
8522 case BUILT_IN_USUBLL_OVERFLOW:
8523 case BUILT_IN_UMUL_OVERFLOW:
8524 case BUILT_IN_UMULL_OVERFLOW:
8525 case BUILT_IN_UMULLL_OVERFLOW:
8526 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
8528 default:
8529 break;
8531 return NULL_TREE;
8534 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
8535 arguments. IGNORE is true if the result of the
8536 function call is ignored. This function returns NULL_TREE if no
8537 simplification was possible. */
8539 tree
8540 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
8542 tree ret = NULL_TREE;
8544 switch (nargs)
8546 case 0:
8547 ret = fold_builtin_0 (loc, fndecl);
8548 break;
8549 case 1:
8550 ret = fold_builtin_1 (loc, fndecl, args[0]);
8551 break;
8552 case 2:
8553 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
8554 break;
8555 case 3:
8556 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
8557 break;
8558 default:
8559 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
8560 break;
8562 if (ret)
8564 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
8565 SET_EXPR_LOCATION (ret, loc);
8566 TREE_NO_WARNING (ret) = 1;
8567 return ret;
8569 return NULL_TREE;
8572 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
8573 list ARGS along with N new arguments in NEWARGS. SKIP is the number
8574 of arguments in ARGS to be omitted. OLDNARGS is the number of
8575 elements in ARGS. */
8577 static tree
8578 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
8579 int skip, tree fndecl, int n, va_list newargs)
8581 int nargs = oldnargs - skip + n;
8582 tree *buffer;
8584 if (n > 0)
8586 int i, j;
8588 buffer = XALLOCAVEC (tree, nargs);
8589 for (i = 0; i < n; i++)
8590 buffer[i] = va_arg (newargs, tree);
8591 for (j = skip; j < oldnargs; j++, i++)
8592 buffer[i] = args[j];
8594 else
8595 buffer = args + skip;
8597 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
8600 /* Return true if FNDECL shouldn't be folded right now.
8601 If a built-in function has an inline attribute always_inline
8602 wrapper, defer folding it after always_inline functions have
8603 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
8604 might not be performed. */
8606 bool
8607 avoid_folding_inline_builtin (tree fndecl)
8609 return (DECL_DECLARED_INLINE_P (fndecl)
8610 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
8611 && cfun
8612 && !cfun->always_inline_functions_inlined
8613 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
8616 /* A wrapper function for builtin folding that prevents warnings for
8617 "statement without effect" and the like, caused by removing the
8618 call node earlier than the warning is generated. */
8620 tree
8621 fold_call_expr (location_t loc, tree exp, bool ignore)
8623 tree ret = NULL_TREE;
8624 tree fndecl = get_callee_fndecl (exp);
8625 if (fndecl
8626 && TREE_CODE (fndecl) == FUNCTION_DECL
8627 && DECL_BUILT_IN (fndecl)
8628 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
8629 yet. Defer folding until we see all the arguments
8630 (after inlining). */
8631 && !CALL_EXPR_VA_ARG_PACK (exp))
8633 int nargs = call_expr_nargs (exp);
8635 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
8636 instead last argument is __builtin_va_arg_pack (). Defer folding
8637 even in that case, until arguments are finalized. */
8638 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
8640 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
8641 if (fndecl2
8642 && TREE_CODE (fndecl2) == FUNCTION_DECL
8643 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8644 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8645 return NULL_TREE;
8648 if (avoid_folding_inline_builtin (fndecl))
8649 return NULL_TREE;
8651 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8652 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
8653 CALL_EXPR_ARGP (exp), ignore);
8654 else
8656 tree *args = CALL_EXPR_ARGP (exp);
8657 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
8658 if (ret)
8659 return ret;
8662 return NULL_TREE;
8665 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
8666 N arguments are passed in the array ARGARRAY. Return a folded
8667 expression or NULL_TREE if no simplification was possible. */
8669 tree
8670 fold_builtin_call_array (location_t loc, tree,
8671 tree fn,
8672 int n,
8673 tree *argarray)
8675 if (TREE_CODE (fn) != ADDR_EXPR)
8676 return NULL_TREE;
8678 tree fndecl = TREE_OPERAND (fn, 0);
8679 if (TREE_CODE (fndecl) == FUNCTION_DECL
8680 && DECL_BUILT_IN (fndecl))
8682 /* If last argument is __builtin_va_arg_pack (), arguments to this
8683 function are not finalized yet. Defer folding until they are. */
8684 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
8686 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
8687 if (fndecl2
8688 && TREE_CODE (fndecl2) == FUNCTION_DECL
8689 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8690 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8691 return NULL_TREE;
8693 if (avoid_folding_inline_builtin (fndecl))
8694 return NULL_TREE;
8695 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8696 return targetm.fold_builtin (fndecl, n, argarray, false);
8697 else
8698 return fold_builtin_n (loc, fndecl, argarray, n, false);
8701 return NULL_TREE;
8704 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
8705 along with N new arguments specified as the "..." parameters. SKIP
8706 is the number of arguments in EXP to be omitted. This function is used
8707 to do varargs-to-varargs transformations. */
8709 static tree
8710 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
8712 va_list ap;
8713 tree t;
8715 va_start (ap, n);
8716 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
8717 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
8718 va_end (ap);
8720 return t;
8723 /* Validate a single argument ARG against a tree code CODE representing
8724 a type. */
8726 static bool
8727 validate_arg (const_tree arg, enum tree_code code)
8729 if (!arg)
8730 return false;
8731 else if (code == POINTER_TYPE)
8732 return POINTER_TYPE_P (TREE_TYPE (arg));
8733 else if (code == INTEGER_TYPE)
8734 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
8735 return code == TREE_CODE (TREE_TYPE (arg));
8738 /* This function validates the types of a function call argument list
8739 against a specified list of tree_codes. If the last specifier is a 0,
8740 that represents an ellipses, otherwise the last specifier must be a
8741 VOID_TYPE.
8743 This is the GIMPLE version of validate_arglist. Eventually we want to
8744 completely convert builtins.c to work from GIMPLEs and the tree based
8745 validate_arglist will then be removed. */
8747 bool
8748 validate_gimple_arglist (const gcall *call, ...)
8750 enum tree_code code;
8751 bool res = 0;
8752 va_list ap;
8753 const_tree arg;
8754 size_t i;
8756 va_start (ap, call);
8757 i = 0;
8761 code = (enum tree_code) va_arg (ap, int);
8762 switch (code)
8764 case 0:
8765 /* This signifies an ellipses, any further arguments are all ok. */
8766 res = true;
8767 goto end;
8768 case VOID_TYPE:
8769 /* This signifies an endlink, if no arguments remain, return
8770 true, otherwise return false. */
8771 res = (i == gimple_call_num_args (call));
8772 goto end;
8773 default:
8774 /* If no parameters remain or the parameter's code does not
8775 match the specified code, return false. Otherwise continue
8776 checking any remaining arguments. */
8777 arg = gimple_call_arg (call, i++);
8778 if (!validate_arg (arg, code))
8779 goto end;
8780 break;
8783 while (1);
8785 /* We need gotos here since we can only have one VA_CLOSE in a
8786 function. */
8787 end: ;
8788 va_end (ap);
8790 return res;
8793 /* Default target-specific builtin expander that does nothing. */
8796 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
8797 rtx target ATTRIBUTE_UNUSED,
8798 rtx subtarget ATTRIBUTE_UNUSED,
8799 machine_mode mode ATTRIBUTE_UNUSED,
8800 int ignore ATTRIBUTE_UNUSED)
8802 return NULL_RTX;
8805 /* Returns true is EXP represents data that would potentially reside
8806 in a readonly section. */
8808 bool
8809 readonly_data_expr (tree exp)
8811 STRIP_NOPS (exp);
8813 if (TREE_CODE (exp) != ADDR_EXPR)
8814 return false;
8816 exp = get_base_address (TREE_OPERAND (exp, 0));
8817 if (!exp)
8818 return false;
8820 /* Make sure we call decl_readonly_section only for trees it
8821 can handle (since it returns true for everything it doesn't
8822 understand). */
8823 if (TREE_CODE (exp) == STRING_CST
8824 || TREE_CODE (exp) == CONSTRUCTOR
8825 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
8826 return decl_readonly_section (exp, 0);
8827 else
8828 return false;
8831 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
8832 to the call, and TYPE is its return type.
8834 Return NULL_TREE if no simplification was possible, otherwise return the
8835 simplified form of the call as a tree.
8837 The simplified form may be a constant or other expression which
8838 computes the same value, but in a more efficient manner (including
8839 calls to other builtin functions).
8841 The call may contain arguments which need to be evaluated, but
8842 which are not useful to determine the result of the call. In
8843 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8844 COMPOUND_EXPR will be an argument which must be evaluated.
8845 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8846 COMPOUND_EXPR in the chain will contain the tree for the simplified
8847 form of the builtin function call. */
8849 static tree
8850 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
8852 if (!validate_arg (s1, POINTER_TYPE)
8853 || !validate_arg (s2, POINTER_TYPE))
8854 return NULL_TREE;
8855 else
8857 tree fn;
8858 const char *p1, *p2;
8860 p2 = c_getstr (s2);
8861 if (p2 == NULL)
8862 return NULL_TREE;
8864 p1 = c_getstr (s1);
8865 if (p1 != NULL)
8867 const char *r = strstr (p1, p2);
8868 tree tem;
8870 if (r == NULL)
8871 return build_int_cst (TREE_TYPE (s1), 0);
8873 /* Return an offset into the constant string argument. */
8874 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8875 return fold_convert_loc (loc, type, tem);
8878 /* The argument is const char *, and the result is char *, so we need
8879 a type conversion here to avoid a warning. */
8880 if (p2[0] == '\0')
8881 return fold_convert_loc (loc, type, s1);
8883 if (p2[1] != '\0')
8884 return NULL_TREE;
8886 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
8887 if (!fn)
8888 return NULL_TREE;
8890 /* New argument list transforming strstr(s1, s2) to
8891 strchr(s1, s2[0]). */
8892 return build_call_expr_loc (loc, fn, 2, s1,
8893 build_int_cst (integer_type_node, p2[0]));
8897 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
8898 the call, and TYPE is its return type.
8900 Return NULL_TREE if no simplification was possible, otherwise return the
8901 simplified form of the call as a tree.
8903 The simplified form may be a constant or other expression which
8904 computes the same value, but in a more efficient manner (including
8905 calls to other builtin functions).
8907 The call may contain arguments which need to be evaluated, but
8908 which are not useful to determine the result of the call. In
8909 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8910 COMPOUND_EXPR will be an argument which must be evaluated.
8911 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8912 COMPOUND_EXPR in the chain will contain the tree for the simplified
8913 form of the builtin function call. */
8915 static tree
8916 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
8918 if (!validate_arg (s1, POINTER_TYPE)
8919 || !validate_arg (s2, INTEGER_TYPE))
8920 return NULL_TREE;
8921 else
8923 const char *p1;
8925 if (TREE_CODE (s2) != INTEGER_CST)
8926 return NULL_TREE;
8928 p1 = c_getstr (s1);
8929 if (p1 != NULL)
8931 char c;
8932 const char *r;
8933 tree tem;
8935 if (target_char_cast (s2, &c))
8936 return NULL_TREE;
8938 r = strchr (p1, c);
8940 if (r == NULL)
8941 return build_int_cst (TREE_TYPE (s1), 0);
8943 /* Return an offset into the constant string argument. */
8944 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8945 return fold_convert_loc (loc, type, tem);
8947 return NULL_TREE;
8951 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
8952 the call, and TYPE is its return type.
8954 Return NULL_TREE if no simplification was possible, otherwise return the
8955 simplified form of the call as a tree.
8957 The simplified form may be a constant or other expression which
8958 computes the same value, but in a more efficient manner (including
8959 calls to other builtin functions).
8961 The call may contain arguments which need to be evaluated, but
8962 which are not useful to determine the result of the call. In
8963 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8964 COMPOUND_EXPR will be an argument which must be evaluated.
8965 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8966 COMPOUND_EXPR in the chain will contain the tree for the simplified
8967 form of the builtin function call. */
8969 static tree
8970 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
8972 if (!validate_arg (s1, POINTER_TYPE)
8973 || !validate_arg (s2, INTEGER_TYPE))
8974 return NULL_TREE;
8975 else
8977 tree fn;
8978 const char *p1;
8980 if (TREE_CODE (s2) != INTEGER_CST)
8981 return NULL_TREE;
8983 p1 = c_getstr (s1);
8984 if (p1 != NULL)
8986 char c;
8987 const char *r;
8988 tree tem;
8990 if (target_char_cast (s2, &c))
8991 return NULL_TREE;
8993 r = strrchr (p1, c);
8995 if (r == NULL)
8996 return build_int_cst (TREE_TYPE (s1), 0);
8998 /* Return an offset into the constant string argument. */
8999 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9000 return fold_convert_loc (loc, type, tem);
9003 if (! integer_zerop (s2))
9004 return NULL_TREE;
9006 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9007 if (!fn)
9008 return NULL_TREE;
9010 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
9011 return build_call_expr_loc (loc, fn, 2, s1, s2);
9015 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9016 to the call, and TYPE is its return type.
9018 Return NULL_TREE if no simplification was possible, otherwise return the
9019 simplified form of the call as a tree.
9021 The simplified form may be a constant or other expression which
9022 computes the same value, but in a more efficient manner (including
9023 calls to other builtin functions).
9025 The call may contain arguments which need to be evaluated, but
9026 which are not useful to determine the result of the call. In
9027 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9028 COMPOUND_EXPR will be an argument which must be evaluated.
9029 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9030 COMPOUND_EXPR in the chain will contain the tree for the simplified
9031 form of the builtin function call. */
9033 static tree
9034 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9036 if (!validate_arg (s1, POINTER_TYPE)
9037 || !validate_arg (s2, POINTER_TYPE))
9038 return NULL_TREE;
9039 else
9041 tree fn;
9042 const char *p1, *p2;
9044 p2 = c_getstr (s2);
9045 if (p2 == NULL)
9046 return NULL_TREE;
9048 p1 = c_getstr (s1);
9049 if (p1 != NULL)
9051 const char *r = strpbrk (p1, p2);
9052 tree tem;
9054 if (r == NULL)
9055 return build_int_cst (TREE_TYPE (s1), 0);
9057 /* Return an offset into the constant string argument. */
9058 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9059 return fold_convert_loc (loc, type, tem);
9062 if (p2[0] == '\0')
9063 /* strpbrk(x, "") == NULL.
9064 Evaluate and ignore s1 in case it had side-effects. */
9065 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
9067 if (p2[1] != '\0')
9068 return NULL_TREE; /* Really call strpbrk. */
9070 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9071 if (!fn)
9072 return NULL_TREE;
9074 /* New argument list transforming strpbrk(s1, s2) to
9075 strchr(s1, s2[0]). */
9076 return build_call_expr_loc (loc, fn, 2, s1,
9077 build_int_cst (integer_type_node, p2[0]));
9081 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9082 to the call.
9084 Return NULL_TREE if no simplification was possible, otherwise return the
9085 simplified form of the call as a tree.
9087 The simplified form may be a constant or other expression which
9088 computes the same value, but in a more efficient manner (including
9089 calls to other builtin functions).
9091 The call may contain arguments which need to be evaluated, but
9092 which are not useful to determine the result of the call. In
9093 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9094 COMPOUND_EXPR will be an argument which must be evaluated.
9095 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9096 COMPOUND_EXPR in the chain will contain the tree for the simplified
9097 form of the builtin function call. */
9099 static tree
9100 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9102 if (!validate_arg (s1, POINTER_TYPE)
9103 || !validate_arg (s2, POINTER_TYPE))
9104 return NULL_TREE;
9105 else
9107 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9109 /* If either argument is "", return NULL_TREE. */
9110 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9111 /* Evaluate and ignore both arguments in case either one has
9112 side-effects. */
9113 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9114 s1, s2);
9115 return NULL_TREE;
9119 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9120 to the call.
9122 Return NULL_TREE if no simplification was possible, otherwise return the
9123 simplified form of the call as a tree.
9125 The simplified form may be a constant or other expression which
9126 computes the same value, but in a more efficient manner (including
9127 calls to other builtin functions).
9129 The call may contain arguments which need to be evaluated, but
9130 which are not useful to determine the result of the call. In
9131 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9132 COMPOUND_EXPR will be an argument which must be evaluated.
9133 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9134 COMPOUND_EXPR in the chain will contain the tree for the simplified
9135 form of the builtin function call. */
9137 static tree
9138 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9140 if (!validate_arg (s1, POINTER_TYPE)
9141 || !validate_arg (s2, POINTER_TYPE))
9142 return NULL_TREE;
9143 else
9145 /* If the first argument is "", return NULL_TREE. */
9146 const char *p1 = c_getstr (s1);
9147 if (p1 && *p1 == '\0')
9149 /* Evaluate and ignore argument s2 in case it has
9150 side-effects. */
9151 return omit_one_operand_loc (loc, size_type_node,
9152 size_zero_node, s2);
9155 /* If the second argument is "", return __builtin_strlen(s1). */
9156 const char *p2 = c_getstr (s2);
9157 if (p2 && *p2 == '\0')
9159 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9161 /* If the replacement _DECL isn't initialized, don't do the
9162 transformation. */
9163 if (!fn)
9164 return NULL_TREE;
9166 return build_call_expr_loc (loc, fn, 1, s1);
9168 return NULL_TREE;
9172 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9173 produced. False otherwise. This is done so that we don't output the error
9174 or warning twice or three times. */
9176 bool
9177 fold_builtin_next_arg (tree exp, bool va_start_p)
9179 tree fntype = TREE_TYPE (current_function_decl);
9180 int nargs = call_expr_nargs (exp);
9181 tree arg;
9182 /* There is good chance the current input_location points inside the
9183 definition of the va_start macro (perhaps on the token for
9184 builtin) in a system header, so warnings will not be emitted.
9185 Use the location in real source code. */
9186 source_location current_location =
9187 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9188 NULL);
9190 if (!stdarg_p (fntype))
9192 error ("%<va_start%> used in function with fixed args");
9193 return true;
9196 if (va_start_p)
9198 if (va_start_p && (nargs != 2))
9200 error ("wrong number of arguments to function %<va_start%>");
9201 return true;
9203 arg = CALL_EXPR_ARG (exp, 1);
9205 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9206 when we checked the arguments and if needed issued a warning. */
9207 else
9209 if (nargs == 0)
9211 /* Evidently an out of date version of <stdarg.h>; can't validate
9212 va_start's second argument, but can still work as intended. */
9213 warning_at (current_location,
9214 OPT_Wvarargs,
9215 "%<__builtin_next_arg%> called without an argument");
9216 return true;
9218 else if (nargs > 1)
9220 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9221 return true;
9223 arg = CALL_EXPR_ARG (exp, 0);
9226 if (TREE_CODE (arg) == SSA_NAME)
9227 arg = SSA_NAME_VAR (arg);
9229 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9230 or __builtin_next_arg (0) the first time we see it, after checking
9231 the arguments and if needed issuing a warning. */
9232 if (!integer_zerop (arg))
9234 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9236 /* Strip off all nops for the sake of the comparison. This
9237 is not quite the same as STRIP_NOPS. It does more.
9238 We must also strip off INDIRECT_EXPR for C++ reference
9239 parameters. */
9240 while (CONVERT_EXPR_P (arg)
9241 || TREE_CODE (arg) == INDIRECT_REF)
9242 arg = TREE_OPERAND (arg, 0);
9243 if (arg != last_parm)
9245 /* FIXME: Sometimes with the tree optimizers we can get the
9246 not the last argument even though the user used the last
9247 argument. We just warn and set the arg to be the last
9248 argument so that we will get wrong-code because of
9249 it. */
9250 warning_at (current_location,
9251 OPT_Wvarargs,
9252 "second parameter of %<va_start%> not last named argument");
9255 /* Undefined by C99 7.15.1.4p4 (va_start):
9256 "If the parameter parmN is declared with the register storage
9257 class, with a function or array type, or with a type that is
9258 not compatible with the type that results after application of
9259 the default argument promotions, the behavior is undefined."
9261 else if (DECL_REGISTER (arg))
9263 warning_at (current_location,
9264 OPT_Wvarargs,
9265 "undefined behavior when second parameter of "
9266 "%<va_start%> is declared with %<register%> storage");
9269 /* We want to verify the second parameter just once before the tree
9270 optimizers are run and then avoid keeping it in the tree,
9271 as otherwise we could warn even for correct code like:
9272 void foo (int i, ...)
9273 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9274 if (va_start_p)
9275 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9276 else
9277 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9279 return false;
9283 /* Expand a call EXP to __builtin_object_size. */
9285 static rtx
9286 expand_builtin_object_size (tree exp)
9288 tree ost;
9289 int object_size_type;
9290 tree fndecl = get_callee_fndecl (exp);
9292 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9294 error ("%Kfirst argument of %D must be a pointer, second integer constant",
9295 exp, fndecl);
9296 expand_builtin_trap ();
9297 return const0_rtx;
9300 ost = CALL_EXPR_ARG (exp, 1);
9301 STRIP_NOPS (ost);
9303 if (TREE_CODE (ost) != INTEGER_CST
9304 || tree_int_cst_sgn (ost) < 0
9305 || compare_tree_int (ost, 3) > 0)
9307 error ("%Klast argument of %D is not integer constant between 0 and 3",
9308 exp, fndecl);
9309 expand_builtin_trap ();
9310 return const0_rtx;
9313 object_size_type = tree_to_shwi (ost);
9315 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9318 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9319 FCODE is the BUILT_IN_* to use.
9320 Return NULL_RTX if we failed; the caller should emit a normal call,
9321 otherwise try to get the result in TARGET, if convenient (and in
9322 mode MODE if that's convenient). */
9324 static rtx
9325 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9326 enum built_in_function fcode)
9328 tree dest, src, len, size;
9330 if (!validate_arglist (exp,
9331 POINTER_TYPE,
9332 fcode == BUILT_IN_MEMSET_CHK
9333 ? INTEGER_TYPE : POINTER_TYPE,
9334 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9335 return NULL_RTX;
9337 dest = CALL_EXPR_ARG (exp, 0);
9338 src = CALL_EXPR_ARG (exp, 1);
9339 len = CALL_EXPR_ARG (exp, 2);
9340 size = CALL_EXPR_ARG (exp, 3);
9342 if (! tree_fits_uhwi_p (size))
9343 return NULL_RTX;
9345 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
9347 tree fn;
9349 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
9351 warning_at (tree_nonartificial_location (exp),
9352 0, "%Kcall to %D will always overflow destination buffer",
9353 exp, get_callee_fndecl (exp));
9354 return NULL_RTX;
9357 fn = NULL_TREE;
9358 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9359 mem{cpy,pcpy,move,set} is available. */
9360 switch (fcode)
9362 case BUILT_IN_MEMCPY_CHK:
9363 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
9364 break;
9365 case BUILT_IN_MEMPCPY_CHK:
9366 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
9367 break;
9368 case BUILT_IN_MEMMOVE_CHK:
9369 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
9370 break;
9371 case BUILT_IN_MEMSET_CHK:
9372 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
9373 break;
9374 default:
9375 break;
9378 if (! fn)
9379 return NULL_RTX;
9381 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
9382 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9383 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9384 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9386 else if (fcode == BUILT_IN_MEMSET_CHK)
9387 return NULL_RTX;
9388 else
9390 unsigned int dest_align = get_pointer_alignment (dest);
9392 /* If DEST is not a pointer type, call the normal function. */
9393 if (dest_align == 0)
9394 return NULL_RTX;
9396 /* If SRC and DEST are the same (and not volatile), do nothing. */
9397 if (operand_equal_p (src, dest, 0))
9399 tree expr;
9401 if (fcode != BUILT_IN_MEMPCPY_CHK)
9403 /* Evaluate and ignore LEN in case it has side-effects. */
9404 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9405 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9408 expr = fold_build_pointer_plus (dest, len);
9409 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9412 /* __memmove_chk special case. */
9413 if (fcode == BUILT_IN_MEMMOVE_CHK)
9415 unsigned int src_align = get_pointer_alignment (src);
9417 if (src_align == 0)
9418 return NULL_RTX;
9420 /* If src is categorized for a readonly section we can use
9421 normal __memcpy_chk. */
9422 if (readonly_data_expr (src))
9424 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
9425 if (!fn)
9426 return NULL_RTX;
9427 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9428 dest, src, len, size);
9429 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9430 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9431 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9434 return NULL_RTX;
9438 /* Emit warning if a buffer overflow is detected at compile time. */
9440 static void
9441 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9443 int is_strlen = 0;
9444 tree len, size;
9445 location_t loc = tree_nonartificial_location (exp);
9447 switch (fcode)
9449 case BUILT_IN_STRCPY_CHK:
9450 case BUILT_IN_STPCPY_CHK:
9451 /* For __strcat_chk the warning will be emitted only if overflowing
9452 by at least strlen (dest) + 1 bytes. */
9453 case BUILT_IN_STRCAT_CHK:
9454 len = CALL_EXPR_ARG (exp, 1);
9455 size = CALL_EXPR_ARG (exp, 2);
9456 is_strlen = 1;
9457 break;
9458 case BUILT_IN_STRNCAT_CHK:
9459 case BUILT_IN_STRNCPY_CHK:
9460 case BUILT_IN_STPNCPY_CHK:
9461 len = CALL_EXPR_ARG (exp, 2);
9462 size = CALL_EXPR_ARG (exp, 3);
9463 break;
9464 case BUILT_IN_SNPRINTF_CHK:
9465 case BUILT_IN_VSNPRINTF_CHK:
9466 len = CALL_EXPR_ARG (exp, 1);
9467 size = CALL_EXPR_ARG (exp, 3);
9468 break;
9469 default:
9470 gcc_unreachable ();
9473 if (!len || !size)
9474 return;
9476 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9477 return;
9479 if (is_strlen)
9481 len = c_strlen (len, 1);
9482 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
9483 return;
9485 else if (fcode == BUILT_IN_STRNCAT_CHK)
9487 tree src = CALL_EXPR_ARG (exp, 1);
9488 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
9489 return;
9490 src = c_strlen (src, 1);
9491 if (! src || ! tree_fits_uhwi_p (src))
9493 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
9494 exp, get_callee_fndecl (exp));
9495 return;
9497 else if (tree_int_cst_lt (src, size))
9498 return;
9500 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
9501 return;
9503 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
9504 exp, get_callee_fndecl (exp));
9507 /* Emit warning if a buffer overflow is detected at compile time
9508 in __sprintf_chk/__vsprintf_chk calls. */
9510 static void
9511 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
9513 tree size, len, fmt;
9514 const char *fmt_str;
9515 int nargs = call_expr_nargs (exp);
9517 /* Verify the required arguments in the original call. */
9519 if (nargs < 4)
9520 return;
9521 size = CALL_EXPR_ARG (exp, 2);
9522 fmt = CALL_EXPR_ARG (exp, 3);
9524 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9525 return;
9527 /* Check whether the format is a literal string constant. */
9528 fmt_str = c_getstr (fmt);
9529 if (fmt_str == NULL)
9530 return;
9532 if (!init_target_chars ())
9533 return;
9535 /* If the format doesn't contain % args or %%, we know its size. */
9536 if (strchr (fmt_str, target_percent) == 0)
9537 len = build_int_cstu (size_type_node, strlen (fmt_str));
9538 /* If the format is "%s" and first ... argument is a string literal,
9539 we know it too. */
9540 else if (fcode == BUILT_IN_SPRINTF_CHK
9541 && strcmp (fmt_str, target_percent_s) == 0)
9543 tree arg;
9545 if (nargs < 5)
9546 return;
9547 arg = CALL_EXPR_ARG (exp, 4);
9548 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
9549 return;
9551 len = c_strlen (arg, 1);
9552 if (!len || ! tree_fits_uhwi_p (len))
9553 return;
9555 else
9556 return;
9558 if (! tree_int_cst_lt (len, size))
9559 warning_at (tree_nonartificial_location (exp),
9560 0, "%Kcall to %D will always overflow destination buffer",
9561 exp, get_callee_fndecl (exp));
9564 /* Emit warning if a free is called with address of a variable. */
9566 static void
9567 maybe_emit_free_warning (tree exp)
9569 tree arg = CALL_EXPR_ARG (exp, 0);
9571 STRIP_NOPS (arg);
9572 if (TREE_CODE (arg) != ADDR_EXPR)
9573 return;
9575 arg = get_base_address (TREE_OPERAND (arg, 0));
9576 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
9577 return;
9579 if (SSA_VAR_P (arg))
9580 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9581 "%Kattempt to free a non-heap object %qD", exp, arg);
9582 else
9583 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9584 "%Kattempt to free a non-heap object", exp);
9587 /* Fold a call to __builtin_object_size with arguments PTR and OST,
9588 if possible. */
9590 static tree
9591 fold_builtin_object_size (tree ptr, tree ost)
9593 unsigned HOST_WIDE_INT bytes;
9594 int object_size_type;
9596 if (!validate_arg (ptr, POINTER_TYPE)
9597 || !validate_arg (ost, INTEGER_TYPE))
9598 return NULL_TREE;
9600 STRIP_NOPS (ost);
9602 if (TREE_CODE (ost) != INTEGER_CST
9603 || tree_int_cst_sgn (ost) < 0
9604 || compare_tree_int (ost, 3) > 0)
9605 return NULL_TREE;
9607 object_size_type = tree_to_shwi (ost);
9609 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
9610 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
9611 and (size_t) 0 for types 2 and 3. */
9612 if (TREE_SIDE_EFFECTS (ptr))
9613 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
9615 if (TREE_CODE (ptr) == ADDR_EXPR)
9617 bytes = compute_builtin_object_size (ptr, object_size_type);
9618 if (wi::fits_to_tree_p (bytes, size_type_node))
9619 return build_int_cstu (size_type_node, bytes);
9621 else if (TREE_CODE (ptr) == SSA_NAME)
9623 /* If object size is not known yet, delay folding until
9624 later. Maybe subsequent passes will help determining
9625 it. */
9626 bytes = compute_builtin_object_size (ptr, object_size_type);
9627 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
9628 && wi::fits_to_tree_p (bytes, size_type_node))
9629 return build_int_cstu (size_type_node, bytes);
9632 return NULL_TREE;
9635 /* Builtins with folding operations that operate on "..." arguments
9636 need special handling; we need to store the arguments in a convenient
9637 data structure before attempting any folding. Fortunately there are
9638 only a few builtins that fall into this category. FNDECL is the
9639 function, EXP is the CALL_EXPR for the call. */
9641 static tree
9642 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
9644 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9645 tree ret = NULL_TREE;
9647 switch (fcode)
9649 case BUILT_IN_FPCLASSIFY:
9650 ret = fold_builtin_fpclassify (loc, args, nargs);
9651 break;
9653 default:
9654 break;
9656 if (ret)
9658 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9659 SET_EXPR_LOCATION (ret, loc);
9660 TREE_NO_WARNING (ret) = 1;
9661 return ret;
9663 return NULL_TREE;
9666 /* Initialize format string characters in the target charset. */
9668 bool
9669 init_target_chars (void)
9671 static bool init;
9672 if (!init)
9674 target_newline = lang_hooks.to_target_charset ('\n');
9675 target_percent = lang_hooks.to_target_charset ('%');
9676 target_c = lang_hooks.to_target_charset ('c');
9677 target_s = lang_hooks.to_target_charset ('s');
9678 if (target_newline == 0 || target_percent == 0 || target_c == 0
9679 || target_s == 0)
9680 return false;
9682 target_percent_c[0] = target_percent;
9683 target_percent_c[1] = target_c;
9684 target_percent_c[2] = '\0';
9686 target_percent_s[0] = target_percent;
9687 target_percent_s[1] = target_s;
9688 target_percent_s[2] = '\0';
9690 target_percent_s_newline[0] = target_percent;
9691 target_percent_s_newline[1] = target_s;
9692 target_percent_s_newline[2] = target_newline;
9693 target_percent_s_newline[3] = '\0';
9695 init = true;
9697 return true;
9700 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
9701 and no overflow/underflow occurred. INEXACT is true if M was not
9702 exactly calculated. TYPE is the tree type for the result. This
9703 function assumes that you cleared the MPFR flags and then
9704 calculated M to see if anything subsequently set a flag prior to
9705 entering this function. Return NULL_TREE if any checks fail. */
9707 static tree
9708 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
9710 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9711 overflow/underflow occurred. If -frounding-math, proceed iff the
9712 result of calling FUNC was exact. */
9713 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
9714 && (!flag_rounding_math || !inexact))
9716 REAL_VALUE_TYPE rr;
9718 real_from_mpfr (&rr, m, type, GMP_RNDN);
9719 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
9720 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9721 but the mpft_t is not, then we underflowed in the
9722 conversion. */
9723 if (real_isfinite (&rr)
9724 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
9726 REAL_VALUE_TYPE rmode;
9728 real_convert (&rmode, TYPE_MODE (type), &rr);
9729 /* Proceed iff the specified mode can hold the value. */
9730 if (real_identical (&rmode, &rr))
9731 return build_real (type, rmode);
9734 return NULL_TREE;
9737 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
9738 number and no overflow/underflow occurred. INEXACT is true if M
9739 was not exactly calculated. TYPE is the tree type for the result.
9740 This function assumes that you cleared the MPFR flags and then
9741 calculated M to see if anything subsequently set a flag prior to
9742 entering this function. Return NULL_TREE if any checks fail, if
9743 FORCE_CONVERT is true, then bypass the checks. */
9745 static tree
9746 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
9748 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9749 overflow/underflow occurred. If -frounding-math, proceed iff the
9750 result of calling FUNC was exact. */
9751 if (force_convert
9752 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
9753 && !mpfr_overflow_p () && !mpfr_underflow_p ()
9754 && (!flag_rounding_math || !inexact)))
9756 REAL_VALUE_TYPE re, im;
9758 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
9759 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
9760 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
9761 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9762 but the mpft_t is not, then we underflowed in the
9763 conversion. */
9764 if (force_convert
9765 || (real_isfinite (&re) && real_isfinite (&im)
9766 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
9767 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
9769 REAL_VALUE_TYPE re_mode, im_mode;
9771 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
9772 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
9773 /* Proceed iff the specified mode can hold the value. */
9774 if (force_convert
9775 || (real_identical (&re_mode, &re)
9776 && real_identical (&im_mode, &im)))
9777 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
9778 build_real (TREE_TYPE (type), im_mode));
9781 return NULL_TREE;
9784 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
9785 the pointer *(ARG_QUO) and return the result. The type is taken
9786 from the type of ARG0 and is used for setting the precision of the
9787 calculation and results. */
9789 static tree
9790 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
9792 tree const type = TREE_TYPE (arg0);
9793 tree result = NULL_TREE;
9795 STRIP_NOPS (arg0);
9796 STRIP_NOPS (arg1);
9798 /* To proceed, MPFR must exactly represent the target floating point
9799 format, which only happens when the target base equals two. */
9800 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9801 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9802 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
9804 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
9805 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
9807 if (real_isfinite (ra0) && real_isfinite (ra1))
9809 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9810 const int prec = fmt->p;
9811 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9812 tree result_rem;
9813 long integer_quo;
9814 mpfr_t m0, m1;
9816 mpfr_inits2 (prec, m0, m1, NULL);
9817 mpfr_from_real (m0, ra0, GMP_RNDN);
9818 mpfr_from_real (m1, ra1, GMP_RNDN);
9819 mpfr_clear_flags ();
9820 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
9821 /* Remquo is independent of the rounding mode, so pass
9822 inexact=0 to do_mpfr_ckconv(). */
9823 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
9824 mpfr_clears (m0, m1, NULL);
9825 if (result_rem)
9827 /* MPFR calculates quo in the host's long so it may
9828 return more bits in quo than the target int can hold
9829 if sizeof(host long) > sizeof(target int). This can
9830 happen even for native compilers in LP64 mode. In
9831 these cases, modulo the quo value with the largest
9832 number that the target int can hold while leaving one
9833 bit for the sign. */
9834 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
9835 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
9837 /* Dereference the quo pointer argument. */
9838 arg_quo = build_fold_indirect_ref (arg_quo);
9839 /* Proceed iff a valid pointer type was passed in. */
9840 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
9842 /* Set the value. */
9843 tree result_quo
9844 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
9845 build_int_cst (TREE_TYPE (arg_quo),
9846 integer_quo));
9847 TREE_SIDE_EFFECTS (result_quo) = 1;
9848 /* Combine the quo assignment with the rem. */
9849 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9850 result_quo, result_rem));
9855 return result;
9858 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
9859 resulting value as a tree with type TYPE. The mpfr precision is
9860 set to the precision of TYPE. We assume that this mpfr function
9861 returns zero if the result could be calculated exactly within the
9862 requested precision. In addition, the integer pointer represented
9863 by ARG_SG will be dereferenced and set to the appropriate signgam
9864 (-1,1) value. */
9866 static tree
9867 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
9869 tree result = NULL_TREE;
9871 STRIP_NOPS (arg);
9873 /* To proceed, MPFR must exactly represent the target floating point
9874 format, which only happens when the target base equals two. Also
9875 verify ARG is a constant and that ARG_SG is an int pointer. */
9876 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9877 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
9878 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
9879 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
9881 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
9883 /* In addition to NaN and Inf, the argument cannot be zero or a
9884 negative integer. */
9885 if (real_isfinite (ra)
9886 && ra->cl != rvc_zero
9887 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
9889 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9890 const int prec = fmt->p;
9891 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9892 int inexact, sg;
9893 mpfr_t m;
9894 tree result_lg;
9896 mpfr_init2 (m, prec);
9897 mpfr_from_real (m, ra, GMP_RNDN);
9898 mpfr_clear_flags ();
9899 inexact = mpfr_lgamma (m, &sg, m, rnd);
9900 result_lg = do_mpfr_ckconv (m, type, inexact);
9901 mpfr_clear (m);
9902 if (result_lg)
9904 tree result_sg;
9906 /* Dereference the arg_sg pointer argument. */
9907 arg_sg = build_fold_indirect_ref (arg_sg);
9908 /* Assign the signgam value into *arg_sg. */
9909 result_sg = fold_build2 (MODIFY_EXPR,
9910 TREE_TYPE (arg_sg), arg_sg,
9911 build_int_cst (TREE_TYPE (arg_sg), sg));
9912 TREE_SIDE_EFFECTS (result_sg) = 1;
9913 /* Combine the signgam assignment with the lgamma result. */
9914 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9915 result_sg, result_lg));
9920 return result;
9923 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
9924 mpc function FUNC on it and return the resulting value as a tree
9925 with type TYPE. The mpfr precision is set to the precision of
9926 TYPE. We assume that function FUNC returns zero if the result
9927 could be calculated exactly within the requested precision. If
9928 DO_NONFINITE is true, then fold expressions containing Inf or NaN
9929 in the arguments and/or results. */
9931 tree
9932 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
9933 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
9935 tree result = NULL_TREE;
9937 STRIP_NOPS (arg0);
9938 STRIP_NOPS (arg1);
9940 /* To proceed, MPFR must exactly represent the target floating point
9941 format, which only happens when the target base equals two. */
9942 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
9943 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
9944 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
9945 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
9946 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
9948 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
9949 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
9950 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
9951 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
9953 if (do_nonfinite
9954 || (real_isfinite (re0) && real_isfinite (im0)
9955 && real_isfinite (re1) && real_isfinite (im1)))
9957 const struct real_format *const fmt =
9958 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
9959 const int prec = fmt->p;
9960 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
9961 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
9962 int inexact;
9963 mpc_t m0, m1;
9965 mpc_init2 (m0, prec);
9966 mpc_init2 (m1, prec);
9967 mpfr_from_real (mpc_realref (m0), re0, rnd);
9968 mpfr_from_real (mpc_imagref (m0), im0, rnd);
9969 mpfr_from_real (mpc_realref (m1), re1, rnd);
9970 mpfr_from_real (mpc_imagref (m1), im1, rnd);
9971 mpfr_clear_flags ();
9972 inexact = func (m0, m0, m1, crnd);
9973 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
9974 mpc_clear (m0);
9975 mpc_clear (m1);
9979 return result;
9982 /* A wrapper function for builtin folding that prevents warnings for
9983 "statement without effect" and the like, caused by removing the
9984 call node earlier than the warning is generated. */
9986 tree
9987 fold_call_stmt (gcall *stmt, bool ignore)
9989 tree ret = NULL_TREE;
9990 tree fndecl = gimple_call_fndecl (stmt);
9991 location_t loc = gimple_location (stmt);
9992 if (fndecl
9993 && TREE_CODE (fndecl) == FUNCTION_DECL
9994 && DECL_BUILT_IN (fndecl)
9995 && !gimple_call_va_arg_pack_p (stmt))
9997 int nargs = gimple_call_num_args (stmt);
9998 tree *args = (nargs > 0
9999 ? gimple_call_arg_ptr (stmt, 0)
10000 : &error_mark_node);
10002 if (avoid_folding_inline_builtin (fndecl))
10003 return NULL_TREE;
10004 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10006 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10008 else
10010 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10011 if (ret)
10013 /* Propagate location information from original call to
10014 expansion of builtin. Otherwise things like
10015 maybe_emit_chk_warning, that operate on the expansion
10016 of a builtin, will use the wrong location information. */
10017 if (gimple_has_location (stmt))
10019 tree realret = ret;
10020 if (TREE_CODE (ret) == NOP_EXPR)
10021 realret = TREE_OPERAND (ret, 0);
10022 if (CAN_HAVE_LOCATION_P (realret)
10023 && !EXPR_HAS_LOCATION (realret))
10024 SET_EXPR_LOCATION (realret, loc);
10025 return realret;
10027 return ret;
10031 return NULL_TREE;
10034 /* Look up the function in builtin_decl that corresponds to DECL
10035 and set ASMSPEC as its user assembler name. DECL must be a
10036 function decl that declares a builtin. */
10038 void
10039 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10041 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10042 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10043 && asmspec != 0);
10045 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10046 set_user_assembler_name (builtin, asmspec);
10048 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10049 && INT_TYPE_SIZE < BITS_PER_WORD)
10051 set_user_assembler_libfunc ("ffs", asmspec);
10052 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE, MODE_INT, 0),
10053 "ffs");
10057 /* Return true if DECL is a builtin that expands to a constant or similarly
10058 simple code. */
10059 bool
10060 is_simple_builtin (tree decl)
10062 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10063 switch (DECL_FUNCTION_CODE (decl))
10065 /* Builtins that expand to constants. */
10066 case BUILT_IN_CONSTANT_P:
10067 case BUILT_IN_EXPECT:
10068 case BUILT_IN_OBJECT_SIZE:
10069 case BUILT_IN_UNREACHABLE:
10070 /* Simple register moves or loads from stack. */
10071 case BUILT_IN_ASSUME_ALIGNED:
10072 case BUILT_IN_RETURN_ADDRESS:
10073 case BUILT_IN_EXTRACT_RETURN_ADDR:
10074 case BUILT_IN_FROB_RETURN_ADDR:
10075 case BUILT_IN_RETURN:
10076 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10077 case BUILT_IN_FRAME_ADDRESS:
10078 case BUILT_IN_VA_END:
10079 case BUILT_IN_STACK_SAVE:
10080 case BUILT_IN_STACK_RESTORE:
10081 /* Exception state returns or moves registers around. */
10082 case BUILT_IN_EH_FILTER:
10083 case BUILT_IN_EH_POINTER:
10084 case BUILT_IN_EH_COPY_VALUES:
10085 return true;
10087 default:
10088 return false;
10091 return false;
10094 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10095 most probably expanded inline into reasonably simple code. This is a
10096 superset of is_simple_builtin. */
10097 bool
10098 is_inexpensive_builtin (tree decl)
10100 if (!decl)
10101 return false;
10102 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10103 return true;
10104 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10105 switch (DECL_FUNCTION_CODE (decl))
10107 case BUILT_IN_ABS:
10108 case BUILT_IN_ALLOCA:
10109 case BUILT_IN_ALLOCA_WITH_ALIGN:
10110 case BUILT_IN_BSWAP16:
10111 case BUILT_IN_BSWAP32:
10112 case BUILT_IN_BSWAP64:
10113 case BUILT_IN_CLZ:
10114 case BUILT_IN_CLZIMAX:
10115 case BUILT_IN_CLZL:
10116 case BUILT_IN_CLZLL:
10117 case BUILT_IN_CTZ:
10118 case BUILT_IN_CTZIMAX:
10119 case BUILT_IN_CTZL:
10120 case BUILT_IN_CTZLL:
10121 case BUILT_IN_FFS:
10122 case BUILT_IN_FFSIMAX:
10123 case BUILT_IN_FFSL:
10124 case BUILT_IN_FFSLL:
10125 case BUILT_IN_IMAXABS:
10126 case BUILT_IN_FINITE:
10127 case BUILT_IN_FINITEF:
10128 case BUILT_IN_FINITEL:
10129 case BUILT_IN_FINITED32:
10130 case BUILT_IN_FINITED64:
10131 case BUILT_IN_FINITED128:
10132 case BUILT_IN_FPCLASSIFY:
10133 case BUILT_IN_ISFINITE:
10134 case BUILT_IN_ISINF_SIGN:
10135 case BUILT_IN_ISINF:
10136 case BUILT_IN_ISINFF:
10137 case BUILT_IN_ISINFL:
10138 case BUILT_IN_ISINFD32:
10139 case BUILT_IN_ISINFD64:
10140 case BUILT_IN_ISINFD128:
10141 case BUILT_IN_ISNAN:
10142 case BUILT_IN_ISNANF:
10143 case BUILT_IN_ISNANL:
10144 case BUILT_IN_ISNAND32:
10145 case BUILT_IN_ISNAND64:
10146 case BUILT_IN_ISNAND128:
10147 case BUILT_IN_ISNORMAL:
10148 case BUILT_IN_ISGREATER:
10149 case BUILT_IN_ISGREATEREQUAL:
10150 case BUILT_IN_ISLESS:
10151 case BUILT_IN_ISLESSEQUAL:
10152 case BUILT_IN_ISLESSGREATER:
10153 case BUILT_IN_ISUNORDERED:
10154 case BUILT_IN_VA_ARG_PACK:
10155 case BUILT_IN_VA_ARG_PACK_LEN:
10156 case BUILT_IN_VA_COPY:
10157 case BUILT_IN_TRAP:
10158 case BUILT_IN_SAVEREGS:
10159 case BUILT_IN_POPCOUNTL:
10160 case BUILT_IN_POPCOUNTLL:
10161 case BUILT_IN_POPCOUNTIMAX:
10162 case BUILT_IN_POPCOUNT:
10163 case BUILT_IN_PARITYL:
10164 case BUILT_IN_PARITYLL:
10165 case BUILT_IN_PARITYIMAX:
10166 case BUILT_IN_PARITY:
10167 case BUILT_IN_LABS:
10168 case BUILT_IN_LLABS:
10169 case BUILT_IN_PREFETCH:
10170 case BUILT_IN_ACC_ON_DEVICE:
10171 return true;
10173 default:
10174 return is_simple_builtin (decl);
10177 return false;