* tree-ssa.c (target_for_debug_bind, verify_phi_args,
[official-gcc.git] / gcc / builtins.c
blob6c68198fcc5cf48423108e68cfda00cd9ec9250d
1 /* Expand builtin functions.
2 Copyright (C) 1988-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "stor-layout.h"
47 #include "calls.h"
48 #include "varasm.h"
49 #include "tree-object-size.h"
50 #include "realmpfr.h"
51 #include "cfgrtl.h"
52 #include "except.h"
53 #include "dojump.h"
54 #include "explow.h"
55 #include "stmt.h"
56 #include "expr.h"
57 #include "libfuncs.h"
58 #include "output.h"
59 #include "typeclass.h"
60 #include "langhooks.h"
61 #include "value-prof.h"
62 #include "builtins.h"
63 #include "asan.h"
64 #include "cilk.h"
65 #include "tree-chkp.h"
66 #include "rtl-chkp.h"
67 #include "internal-fn.h"
68 #include "case-cfn-macros.h"
69 #include "gimple-fold.h"
72 struct target_builtins default_target_builtins;
73 #if SWITCHABLE_TARGET
74 struct target_builtins *this_target_builtins = &default_target_builtins;
75 #endif
77 /* Define the names of the builtin function types and codes. */
78 const char *const built_in_class_names[BUILT_IN_LAST]
79 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
81 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
82 const char * built_in_names[(int) END_BUILTINS] =
84 #include "builtins.def"
87 /* Setup an array of builtin_info_type, make sure each element decl is
88 initialized to NULL_TREE. */
89 builtin_info_type builtin_info[(int)END_BUILTINS];
91 /* Non-zero if __builtin_constant_p should be folded right away. */
92 bool force_folding_builtin_constant_p;
94 static rtx c_readstr (const char *, machine_mode);
95 static int target_char_cast (tree, char *);
96 static rtx get_memory_rtx (tree, tree);
97 static int apply_args_size (void);
98 static int apply_result_size (void);
99 static rtx result_vector (int, rtx);
100 static void expand_builtin_prefetch (tree);
101 static rtx expand_builtin_apply_args (void);
102 static rtx expand_builtin_apply_args_1 (void);
103 static rtx expand_builtin_apply (rtx, rtx, rtx);
104 static void expand_builtin_return (rtx);
105 static enum type_class type_to_class (tree);
106 static rtx expand_builtin_classify_type (tree);
107 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
109 static rtx expand_builtin_interclass_mathfn (tree, rtx);
110 static rtx expand_builtin_sincos (tree);
111 static rtx expand_builtin_cexpi (tree, rtx);
112 static rtx expand_builtin_int_roundingfn (tree, rtx);
113 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
114 static rtx expand_builtin_next_arg (void);
115 static rtx expand_builtin_va_start (tree);
116 static rtx expand_builtin_va_end (tree);
117 static rtx expand_builtin_va_copy (tree);
118 static rtx expand_builtin_strcmp (tree, rtx);
119 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
120 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
121 static rtx expand_builtin_memcpy (tree, rtx);
122 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
123 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
124 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
125 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
126 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
127 machine_mode, int, tree);
128 static rtx expand_builtin_strcpy (tree, rtx);
129 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
130 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
131 static rtx expand_builtin_strncpy (tree, rtx);
132 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
133 static rtx expand_builtin_memset (tree, rtx, machine_mode);
134 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
135 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
136 static rtx expand_builtin_bzero (tree);
137 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
138 static rtx expand_builtin_alloca (tree, bool);
139 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
140 static rtx expand_builtin_frame_address (tree, tree);
141 static tree stabilize_va_list_loc (location_t, tree, int);
142 static rtx expand_builtin_expect (tree, rtx);
143 static tree fold_builtin_constant_p (tree);
144 static tree fold_builtin_classify_type (tree);
145 static tree fold_builtin_strlen (location_t, tree, tree);
146 static tree fold_builtin_inf (location_t, tree, int);
147 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
148 static bool validate_arg (const_tree, enum tree_code code);
149 static rtx expand_builtin_fabs (tree, rtx, rtx);
150 static rtx expand_builtin_signbit (tree, rtx);
151 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
152 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
153 static tree fold_builtin_strcmp (location_t, tree, tree);
154 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
155 static tree fold_builtin_isascii (location_t, tree);
156 static tree fold_builtin_toascii (location_t, tree);
157 static tree fold_builtin_isdigit (location_t, tree);
158 static tree fold_builtin_fabs (location_t, tree, tree);
159 static tree fold_builtin_abs (location_t, tree, tree);
160 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
161 enum tree_code);
162 static tree fold_builtin_0 (location_t, tree);
163 static tree fold_builtin_1 (location_t, tree, tree);
164 static tree fold_builtin_2 (location_t, tree, tree, tree);
165 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
166 static tree fold_builtin_varargs (location_t, tree, tree*, int);
168 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
169 static tree fold_builtin_strstr (location_t, tree, tree, tree);
170 static tree fold_builtin_strspn (location_t, tree, tree);
171 static tree fold_builtin_strcspn (location_t, tree, tree);
173 static rtx expand_builtin_object_size (tree);
174 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
175 enum built_in_function);
176 static void maybe_emit_chk_warning (tree, enum built_in_function);
177 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
178 static void maybe_emit_free_warning (tree);
179 static tree fold_builtin_object_size (tree, tree);
181 unsigned HOST_WIDE_INT target_newline;
182 unsigned HOST_WIDE_INT target_percent;
183 static unsigned HOST_WIDE_INT target_c;
184 static unsigned HOST_WIDE_INT target_s;
185 char target_percent_c[3];
186 char target_percent_s[3];
187 char target_percent_s_newline[4];
188 static tree do_mpfr_remquo (tree, tree, tree);
189 static tree do_mpfr_lgamma_r (tree, tree, tree);
190 static void expand_builtin_sync_synchronize (void);
192 /* Return true if NAME starts with __builtin_ or __sync_. */
194 static bool
195 is_builtin_name (const char *name)
197 if (strncmp (name, "__builtin_", 10) == 0)
198 return true;
199 if (strncmp (name, "__sync_", 7) == 0)
200 return true;
201 if (strncmp (name, "__atomic_", 9) == 0)
202 return true;
203 if (flag_cilkplus
204 && (!strcmp (name, "__cilkrts_detach")
205 || !strcmp (name, "__cilkrts_pop_frame")))
206 return true;
207 return false;
211 /* Return true if DECL is a function symbol representing a built-in. */
213 bool
214 is_builtin_fn (tree decl)
216 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
219 /* Return true if NODE should be considered for inline expansion regardless
220 of the optimization level. This means whenever a function is invoked with
221 its "internal" name, which normally contains the prefix "__builtin". */
223 bool
224 called_as_built_in (tree node)
226 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
227 we want the name used to call the function, not the name it
228 will have. */
229 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
230 return is_builtin_name (name);
233 /* Compute values M and N such that M divides (address of EXP - N) and such
234 that N < M. If these numbers can be determined, store M in alignp and N in
235 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
236 *alignp and any bit-offset to *bitposp.
238 Note that the address (and thus the alignment) computed here is based
239 on the address to which a symbol resolves, whereas DECL_ALIGN is based
240 on the address at which an object is actually located. These two
241 addresses are not always the same. For example, on ARM targets,
242 the address &foo of a Thumb function foo() has the lowest bit set,
243 whereas foo() itself starts on an even address.
245 If ADDR_P is true we are taking the address of the memory reference EXP
246 and thus cannot rely on the access taking place. */
248 static bool
249 get_object_alignment_2 (tree exp, unsigned int *alignp,
250 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
252 HOST_WIDE_INT bitsize, bitpos;
253 tree offset;
254 machine_mode mode;
255 int unsignedp, reversep, volatilep;
256 unsigned int align = BITS_PER_UNIT;
257 bool known_alignment = false;
259 /* Get the innermost object and the constant (bitpos) and possibly
260 variable (offset) offset of the access. */
261 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
262 &unsignedp, &reversep, &volatilep);
264 /* Extract alignment information from the innermost object and
265 possibly adjust bitpos and offset. */
266 if (TREE_CODE (exp) == FUNCTION_DECL)
268 /* Function addresses can encode extra information besides their
269 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
270 allows the low bit to be used as a virtual bit, we know
271 that the address itself must be at least 2-byte aligned. */
272 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
273 align = 2 * BITS_PER_UNIT;
275 else if (TREE_CODE (exp) == LABEL_DECL)
277 else if (TREE_CODE (exp) == CONST_DECL)
279 /* The alignment of a CONST_DECL is determined by its initializer. */
280 exp = DECL_INITIAL (exp);
281 align = TYPE_ALIGN (TREE_TYPE (exp));
282 if (CONSTANT_CLASS_P (exp))
283 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
285 known_alignment = true;
287 else if (DECL_P (exp))
289 align = DECL_ALIGN (exp);
290 known_alignment = true;
292 else if (TREE_CODE (exp) == INDIRECT_REF
293 || TREE_CODE (exp) == MEM_REF
294 || TREE_CODE (exp) == TARGET_MEM_REF)
296 tree addr = TREE_OPERAND (exp, 0);
297 unsigned ptr_align;
298 unsigned HOST_WIDE_INT ptr_bitpos;
299 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
301 /* If the address is explicitely aligned, handle that. */
302 if (TREE_CODE (addr) == BIT_AND_EXPR
303 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
305 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
306 ptr_bitmask *= BITS_PER_UNIT;
307 align = least_bit_hwi (ptr_bitmask);
308 addr = TREE_OPERAND (addr, 0);
311 known_alignment
312 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
313 align = MAX (ptr_align, align);
315 /* Re-apply explicit alignment to the bitpos. */
316 ptr_bitpos &= ptr_bitmask;
318 /* The alignment of the pointer operand in a TARGET_MEM_REF
319 has to take the variable offset parts into account. */
320 if (TREE_CODE (exp) == TARGET_MEM_REF)
322 if (TMR_INDEX (exp))
324 unsigned HOST_WIDE_INT step = 1;
325 if (TMR_STEP (exp))
326 step = TREE_INT_CST_LOW (TMR_STEP (exp));
327 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
329 if (TMR_INDEX2 (exp))
330 align = BITS_PER_UNIT;
331 known_alignment = false;
334 /* When EXP is an actual memory reference then we can use
335 TYPE_ALIGN of a pointer indirection to derive alignment.
336 Do so only if get_pointer_alignment_1 did not reveal absolute
337 alignment knowledge and if using that alignment would
338 improve the situation. */
339 if (!addr_p && !known_alignment
340 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
341 align = TYPE_ALIGN (TREE_TYPE (exp));
342 else
344 /* Else adjust bitpos accordingly. */
345 bitpos += ptr_bitpos;
346 if (TREE_CODE (exp) == MEM_REF
347 || TREE_CODE (exp) == TARGET_MEM_REF)
348 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
351 else if (TREE_CODE (exp) == STRING_CST)
353 /* STRING_CST are the only constant objects we allow to be not
354 wrapped inside a CONST_DECL. */
355 align = TYPE_ALIGN (TREE_TYPE (exp));
356 if (CONSTANT_CLASS_P (exp))
357 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
359 known_alignment = true;
362 /* If there is a non-constant offset part extract the maximum
363 alignment that can prevail. */
364 if (offset)
366 unsigned int trailing_zeros = tree_ctz (offset);
367 if (trailing_zeros < HOST_BITS_PER_INT)
369 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
370 if (inner)
371 align = MIN (align, inner);
375 *alignp = align;
376 *bitposp = bitpos & (*alignp - 1);
377 return known_alignment;
380 /* For a memory reference expression EXP compute values M and N such that M
381 divides (&EXP - N) and such that N < M. If these numbers can be determined,
382 store M in alignp and N in *BITPOSP and return true. Otherwise return false
383 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
385 bool
386 get_object_alignment_1 (tree exp, unsigned int *alignp,
387 unsigned HOST_WIDE_INT *bitposp)
389 return get_object_alignment_2 (exp, alignp, bitposp, false);
392 /* Return the alignment in bits of EXP, an object. */
394 unsigned int
395 get_object_alignment (tree exp)
397 unsigned HOST_WIDE_INT bitpos = 0;
398 unsigned int align;
400 get_object_alignment_1 (exp, &align, &bitpos);
402 /* align and bitpos now specify known low bits of the pointer.
403 ptr & (align - 1) == bitpos. */
405 if (bitpos != 0)
406 align = least_bit_hwi (bitpos);
407 return align;
410 /* For a pointer valued expression EXP compute values M and N such that M
411 divides (EXP - N) and such that N < M. If these numbers can be determined,
412 store M in alignp and N in *BITPOSP and return true. Return false if
413 the results are just a conservative approximation.
415 If EXP is not a pointer, false is returned too. */
417 bool
418 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
419 unsigned HOST_WIDE_INT *bitposp)
421 STRIP_NOPS (exp);
423 if (TREE_CODE (exp) == ADDR_EXPR)
424 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
425 alignp, bitposp, true);
426 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
428 unsigned int align;
429 unsigned HOST_WIDE_INT bitpos;
430 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
431 &align, &bitpos);
432 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
433 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
434 else
436 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
437 if (trailing_zeros < HOST_BITS_PER_INT)
439 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
440 if (inner)
441 align = MIN (align, inner);
444 *alignp = align;
445 *bitposp = bitpos & (align - 1);
446 return res;
448 else if (TREE_CODE (exp) == SSA_NAME
449 && POINTER_TYPE_P (TREE_TYPE (exp)))
451 unsigned int ptr_align, ptr_misalign;
452 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
454 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
456 *bitposp = ptr_misalign * BITS_PER_UNIT;
457 *alignp = ptr_align * BITS_PER_UNIT;
458 /* Make sure to return a sensible alignment when the multiplication
459 by BITS_PER_UNIT overflowed. */
460 if (*alignp == 0)
461 *alignp = 1u << (HOST_BITS_PER_INT - 1);
462 /* We cannot really tell whether this result is an approximation. */
463 return false;
465 else
467 *bitposp = 0;
468 *alignp = BITS_PER_UNIT;
469 return false;
472 else if (TREE_CODE (exp) == INTEGER_CST)
474 *alignp = BIGGEST_ALIGNMENT;
475 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
476 & (BIGGEST_ALIGNMENT - 1));
477 return true;
480 *bitposp = 0;
481 *alignp = BITS_PER_UNIT;
482 return false;
485 /* Return the alignment in bits of EXP, a pointer valued expression.
486 The alignment returned is, by default, the alignment of the thing that
487 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
489 Otherwise, look at the expression to see if we can do better, i.e., if the
490 expression is actually pointing at an object whose alignment is tighter. */
492 unsigned int
493 get_pointer_alignment (tree exp)
495 unsigned HOST_WIDE_INT bitpos = 0;
496 unsigned int align;
498 get_pointer_alignment_1 (exp, &align, &bitpos);
500 /* align and bitpos now specify known low bits of the pointer.
501 ptr & (align - 1) == bitpos. */
503 if (bitpos != 0)
504 align = least_bit_hwi (bitpos);
506 return align;
509 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
510 way, because it could contain a zero byte in the middle.
511 TREE_STRING_LENGTH is the size of the character array, not the string.
513 ONLY_VALUE should be nonzero if the result is not going to be emitted
514 into the instruction stream and zero if it is going to be expanded.
515 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
516 is returned, otherwise NULL, since
517 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
518 evaluate the side-effects.
520 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
521 accesses. Note that this implies the result is not going to be emitted
522 into the instruction stream.
524 The value returned is of type `ssizetype'.
526 Unfortunately, string_constant can't access the values of const char
527 arrays with initializers, so neither can we do so here. */
529 tree
530 c_strlen (tree src, int only_value)
532 tree offset_node;
533 HOST_WIDE_INT offset;
534 int max;
535 const char *ptr;
536 location_t loc;
538 STRIP_NOPS (src);
539 if (TREE_CODE (src) == COND_EXPR
540 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
542 tree len1, len2;
544 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
545 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
546 if (tree_int_cst_equal (len1, len2))
547 return len1;
550 if (TREE_CODE (src) == COMPOUND_EXPR
551 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
552 return c_strlen (TREE_OPERAND (src, 1), only_value);
554 loc = EXPR_LOC_OR_LOC (src, input_location);
556 src = string_constant (src, &offset_node);
557 if (src == 0)
558 return NULL_TREE;
560 max = TREE_STRING_LENGTH (src) - 1;
561 ptr = TREE_STRING_POINTER (src);
563 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
565 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
566 compute the offset to the following null if we don't know where to
567 start searching for it. */
568 int i;
570 for (i = 0; i < max; i++)
571 if (ptr[i] == 0)
572 return NULL_TREE;
574 /* We don't know the starting offset, but we do know that the string
575 has no internal zero bytes. We can assume that the offset falls
576 within the bounds of the string; otherwise, the programmer deserves
577 what he gets. Subtract the offset from the length of the string,
578 and return that. This would perhaps not be valid if we were dealing
579 with named arrays in addition to literal string constants. */
581 return size_diffop_loc (loc, size_int (max), offset_node);
584 /* We have a known offset into the string. Start searching there for
585 a null character if we can represent it as a single HOST_WIDE_INT. */
586 if (offset_node == 0)
587 offset = 0;
588 else if (! tree_fits_shwi_p (offset_node))
589 offset = -1;
590 else
591 offset = tree_to_shwi (offset_node);
593 /* If the offset is known to be out of bounds, warn, and call strlen at
594 runtime. */
595 if (offset < 0 || offset > max)
597 /* Suppress multiple warnings for propagated constant strings. */
598 if (only_value != 2
599 && !TREE_NO_WARNING (src))
601 warning_at (loc, 0, "offset outside bounds of constant string");
602 TREE_NO_WARNING (src) = 1;
604 return NULL_TREE;
607 /* Use strlen to search for the first zero byte. Since any strings
608 constructed with build_string will have nulls appended, we win even
609 if we get handed something like (char[4])"abcd".
611 Since OFFSET is our starting index into the string, no further
612 calculation is needed. */
613 return ssize_int (strlen (ptr + offset));
616 /* Return a constant integer corresponding to target reading
617 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
619 static rtx
620 c_readstr (const char *str, machine_mode mode)
622 HOST_WIDE_INT ch;
623 unsigned int i, j;
624 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
626 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
627 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
628 / HOST_BITS_PER_WIDE_INT;
630 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
631 for (i = 0; i < len; i++)
632 tmp[i] = 0;
634 ch = 1;
635 for (i = 0; i < GET_MODE_SIZE (mode); i++)
637 j = i;
638 if (WORDS_BIG_ENDIAN)
639 j = GET_MODE_SIZE (mode) - i - 1;
640 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
641 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
642 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
643 j *= BITS_PER_UNIT;
645 if (ch)
646 ch = (unsigned char) str[i];
647 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
650 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
651 return immed_wide_int_const (c, mode);
654 /* Cast a target constant CST to target CHAR and if that value fits into
655 host char type, return zero and put that value into variable pointed to by
656 P. */
658 static int
659 target_char_cast (tree cst, char *p)
661 unsigned HOST_WIDE_INT val, hostval;
663 if (TREE_CODE (cst) != INTEGER_CST
664 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
665 return 1;
667 /* Do not care if it fits or not right here. */
668 val = TREE_INT_CST_LOW (cst);
670 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
671 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
673 hostval = val;
674 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
675 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
677 if (val != hostval)
678 return 1;
680 *p = hostval;
681 return 0;
684 /* Similar to save_expr, but assumes that arbitrary code is not executed
685 in between the multiple evaluations. In particular, we assume that a
686 non-addressable local variable will not be modified. */
688 static tree
689 builtin_save_expr (tree exp)
691 if (TREE_CODE (exp) == SSA_NAME
692 || (TREE_ADDRESSABLE (exp) == 0
693 && (TREE_CODE (exp) == PARM_DECL
694 || (VAR_P (exp) && !TREE_STATIC (exp)))))
695 return exp;
697 return save_expr (exp);
700 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
701 times to get the address of either a higher stack frame, or a return
702 address located within it (depending on FNDECL_CODE). */
704 static rtx
705 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
707 int i;
708 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
709 if (tem == NULL_RTX)
711 /* For a zero count with __builtin_return_address, we don't care what
712 frame address we return, because target-specific definitions will
713 override us. Therefore frame pointer elimination is OK, and using
714 the soft frame pointer is OK.
716 For a nonzero count, or a zero count with __builtin_frame_address,
717 we require a stable offset from the current frame pointer to the
718 previous one, so we must use the hard frame pointer, and
719 we must disable frame pointer elimination. */
720 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
721 tem = frame_pointer_rtx;
722 else
724 tem = hard_frame_pointer_rtx;
726 /* Tell reload not to eliminate the frame pointer. */
727 crtl->accesses_prior_frames = 1;
731 if (count > 0)
732 SETUP_FRAME_ADDRESSES ();
734 /* On the SPARC, the return address is not in the frame, it is in a
735 register. There is no way to access it off of the current frame
736 pointer, but it can be accessed off the previous frame pointer by
737 reading the value from the register window save area. */
738 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
739 count--;
741 /* Scan back COUNT frames to the specified frame. */
742 for (i = 0; i < count; i++)
744 /* Assume the dynamic chain pointer is in the word that the
745 frame address points to, unless otherwise specified. */
746 tem = DYNAMIC_CHAIN_ADDRESS (tem);
747 tem = memory_address (Pmode, tem);
748 tem = gen_frame_mem (Pmode, tem);
749 tem = copy_to_reg (tem);
752 /* For __builtin_frame_address, return what we've got. But, on
753 the SPARC for example, we may have to add a bias. */
754 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
755 return FRAME_ADDR_RTX (tem);
757 /* For __builtin_return_address, get the return address from that frame. */
758 #ifdef RETURN_ADDR_RTX
759 tem = RETURN_ADDR_RTX (count, tem);
760 #else
761 tem = memory_address (Pmode,
762 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
763 tem = gen_frame_mem (Pmode, tem);
764 #endif
765 return tem;
768 /* Alias set used for setjmp buffer. */
769 static alias_set_type setjmp_alias_set = -1;
771 /* Construct the leading half of a __builtin_setjmp call. Control will
772 return to RECEIVER_LABEL. This is also called directly by the SJLJ
773 exception handling code. */
775 void
776 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
778 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
779 rtx stack_save;
780 rtx mem;
782 if (setjmp_alias_set == -1)
783 setjmp_alias_set = new_alias_set ();
785 buf_addr = convert_memory_address (Pmode, buf_addr);
787 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
789 /* We store the frame pointer and the address of receiver_label in
790 the buffer and use the rest of it for the stack save area, which
791 is machine-dependent. */
793 mem = gen_rtx_MEM (Pmode, buf_addr);
794 set_mem_alias_set (mem, setjmp_alias_set);
795 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
797 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
798 GET_MODE_SIZE (Pmode))),
799 set_mem_alias_set (mem, setjmp_alias_set);
801 emit_move_insn (validize_mem (mem),
802 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
804 stack_save = gen_rtx_MEM (sa_mode,
805 plus_constant (Pmode, buf_addr,
806 2 * GET_MODE_SIZE (Pmode)));
807 set_mem_alias_set (stack_save, setjmp_alias_set);
808 emit_stack_save (SAVE_NONLOCAL, &stack_save);
810 /* If there is further processing to do, do it. */
811 if (targetm.have_builtin_setjmp_setup ())
812 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
814 /* We have a nonlocal label. */
815 cfun->has_nonlocal_label = 1;
818 /* Construct the trailing part of a __builtin_setjmp call. This is
819 also called directly by the SJLJ exception handling code.
820 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
822 void
823 expand_builtin_setjmp_receiver (rtx receiver_label)
825 rtx chain;
827 /* Mark the FP as used when we get here, so we have to make sure it's
828 marked as used by this function. */
829 emit_use (hard_frame_pointer_rtx);
831 /* Mark the static chain as clobbered here so life information
832 doesn't get messed up for it. */
833 chain = targetm.calls.static_chain (current_function_decl, true);
834 if (chain && REG_P (chain))
835 emit_clobber (chain);
837 /* Now put in the code to restore the frame pointer, and argument
838 pointer, if needed. */
839 if (! targetm.have_nonlocal_goto ())
841 /* First adjust our frame pointer to its actual value. It was
842 previously set to the start of the virtual area corresponding to
843 the stacked variables when we branched here and now needs to be
844 adjusted to the actual hardware fp value.
846 Assignments to virtual registers are converted by
847 instantiate_virtual_regs into the corresponding assignment
848 to the underlying register (fp in this case) that makes
849 the original assignment true.
850 So the following insn will actually be decrementing fp by
851 STARTING_FRAME_OFFSET. */
852 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
854 /* Restoring the frame pointer also modifies the hard frame pointer.
855 Mark it used (so that the previous assignment remains live once
856 the frame pointer is eliminated) and clobbered (to represent the
857 implicit update from the assignment). */
858 emit_use (hard_frame_pointer_rtx);
859 emit_clobber (hard_frame_pointer_rtx);
862 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
864 /* If the argument pointer can be eliminated in favor of the
865 frame pointer, we don't need to restore it. We assume here
866 that if such an elimination is present, it can always be used.
867 This is the case on all known machines; if we don't make this
868 assumption, we do unnecessary saving on many machines. */
869 size_t i;
870 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
872 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
873 if (elim_regs[i].from == ARG_POINTER_REGNUM
874 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
875 break;
877 if (i == ARRAY_SIZE (elim_regs))
879 /* Now restore our arg pointer from the address at which it
880 was saved in our stack frame. */
881 emit_move_insn (crtl->args.internal_arg_pointer,
882 copy_to_reg (get_arg_pointer_save_area ()));
886 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
887 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
888 else if (targetm.have_nonlocal_goto_receiver ())
889 emit_insn (targetm.gen_nonlocal_goto_receiver ());
890 else
891 { /* Nothing */ }
893 /* We must not allow the code we just generated to be reordered by
894 scheduling. Specifically, the update of the frame pointer must
895 happen immediately, not later. */
896 emit_insn (gen_blockage ());
899 /* __builtin_longjmp is passed a pointer to an array of five words (not
900 all will be used on all machines). It operates similarly to the C
901 library function of the same name, but is more efficient. Much of
902 the code below is copied from the handling of non-local gotos. */
904 static void
905 expand_builtin_longjmp (rtx buf_addr, rtx value)
907 rtx fp, lab, stack;
908 rtx_insn *insn, *last;
909 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
911 /* DRAP is needed for stack realign if longjmp is expanded to current
912 function */
913 if (SUPPORTS_STACK_ALIGNMENT)
914 crtl->need_drap = true;
916 if (setjmp_alias_set == -1)
917 setjmp_alias_set = new_alias_set ();
919 buf_addr = convert_memory_address (Pmode, buf_addr);
921 buf_addr = force_reg (Pmode, buf_addr);
923 /* We require that the user must pass a second argument of 1, because
924 that is what builtin_setjmp will return. */
925 gcc_assert (value == const1_rtx);
927 last = get_last_insn ();
928 if (targetm.have_builtin_longjmp ())
929 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
930 else
932 fp = gen_rtx_MEM (Pmode, buf_addr);
933 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
934 GET_MODE_SIZE (Pmode)));
936 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
937 2 * GET_MODE_SIZE (Pmode)));
938 set_mem_alias_set (fp, setjmp_alias_set);
939 set_mem_alias_set (lab, setjmp_alias_set);
940 set_mem_alias_set (stack, setjmp_alias_set);
942 /* Pick up FP, label, and SP from the block and jump. This code is
943 from expand_goto in stmt.c; see there for detailed comments. */
944 if (targetm.have_nonlocal_goto ())
945 /* We have to pass a value to the nonlocal_goto pattern that will
946 get copied into the static_chain pointer, but it does not matter
947 what that value is, because builtin_setjmp does not use it. */
948 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
949 else
951 lab = copy_to_reg (lab);
953 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
954 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
956 emit_move_insn (hard_frame_pointer_rtx, fp);
957 emit_stack_restore (SAVE_NONLOCAL, stack);
959 emit_use (hard_frame_pointer_rtx);
960 emit_use (stack_pointer_rtx);
961 emit_indirect_jump (lab);
965 /* Search backwards and mark the jump insn as a non-local goto.
966 Note that this precludes the use of __builtin_longjmp to a
967 __builtin_setjmp target in the same function. However, we've
968 already cautioned the user that these functions are for
969 internal exception handling use only. */
970 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
972 gcc_assert (insn != last);
974 if (JUMP_P (insn))
976 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
977 break;
979 else if (CALL_P (insn))
980 break;
984 static inline bool
985 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
987 return (iter->i < iter->n);
990 /* This function validates the types of a function call argument list
991 against a specified list of tree_codes. If the last specifier is a 0,
992 that represents an ellipses, otherwise the last specifier must be a
993 VOID_TYPE. */
995 static bool
996 validate_arglist (const_tree callexpr, ...)
998 enum tree_code code;
999 bool res = 0;
1000 va_list ap;
1001 const_call_expr_arg_iterator iter;
1002 const_tree arg;
1004 va_start (ap, callexpr);
1005 init_const_call_expr_arg_iterator (callexpr, &iter);
1009 code = (enum tree_code) va_arg (ap, int);
1010 switch (code)
1012 case 0:
1013 /* This signifies an ellipses, any further arguments are all ok. */
1014 res = true;
1015 goto end;
1016 case VOID_TYPE:
1017 /* This signifies an endlink, if no arguments remain, return
1018 true, otherwise return false. */
1019 res = !more_const_call_expr_args_p (&iter);
1020 goto end;
1021 default:
1022 /* If no parameters remain or the parameter's code does not
1023 match the specified code, return false. Otherwise continue
1024 checking any remaining arguments. */
1025 arg = next_const_call_expr_arg (&iter);
1026 if (!validate_arg (arg, code))
1027 goto end;
1028 break;
1031 while (1);
1033 /* We need gotos here since we can only have one VA_CLOSE in a
1034 function. */
1035 end: ;
1036 va_end (ap);
1038 return res;
1041 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1042 and the address of the save area. */
1044 static rtx
1045 expand_builtin_nonlocal_goto (tree exp)
1047 tree t_label, t_save_area;
1048 rtx r_label, r_save_area, r_fp, r_sp;
1049 rtx_insn *insn;
1051 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1052 return NULL_RTX;
1054 t_label = CALL_EXPR_ARG (exp, 0);
1055 t_save_area = CALL_EXPR_ARG (exp, 1);
1057 r_label = expand_normal (t_label);
1058 r_label = convert_memory_address (Pmode, r_label);
1059 r_save_area = expand_normal (t_save_area);
1060 r_save_area = convert_memory_address (Pmode, r_save_area);
1061 /* Copy the address of the save location to a register just in case it was
1062 based on the frame pointer. */
1063 r_save_area = copy_to_reg (r_save_area);
1064 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1065 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1066 plus_constant (Pmode, r_save_area,
1067 GET_MODE_SIZE (Pmode)));
1069 crtl->has_nonlocal_goto = 1;
1071 /* ??? We no longer need to pass the static chain value, afaik. */
1072 if (targetm.have_nonlocal_goto ())
1073 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1074 else
1076 r_label = copy_to_reg (r_label);
1078 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1079 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1081 /* Restore frame pointer for containing function. */
1082 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1083 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1085 /* USE of hard_frame_pointer_rtx added for consistency;
1086 not clear if really needed. */
1087 emit_use (hard_frame_pointer_rtx);
1088 emit_use (stack_pointer_rtx);
1090 /* If the architecture is using a GP register, we must
1091 conservatively assume that the target function makes use of it.
1092 The prologue of functions with nonlocal gotos must therefore
1093 initialize the GP register to the appropriate value, and we
1094 must then make sure that this value is live at the point
1095 of the jump. (Note that this doesn't necessarily apply
1096 to targets with a nonlocal_goto pattern; they are free
1097 to implement it in their own way. Note also that this is
1098 a no-op if the GP register is a global invariant.) */
1099 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1100 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1101 emit_use (pic_offset_table_rtx);
1103 emit_indirect_jump (r_label);
1106 /* Search backwards to the jump insn and mark it as a
1107 non-local goto. */
1108 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1110 if (JUMP_P (insn))
1112 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1113 break;
1115 else if (CALL_P (insn))
1116 break;
1119 return const0_rtx;
1122 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1123 (not all will be used on all machines) that was passed to __builtin_setjmp.
1124 It updates the stack pointer in that block to the current value. This is
1125 also called directly by the SJLJ exception handling code. */
1127 void
1128 expand_builtin_update_setjmp_buf (rtx buf_addr)
1130 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1131 rtx stack_save
1132 = gen_rtx_MEM (sa_mode,
1133 memory_address
1134 (sa_mode,
1135 plus_constant (Pmode, buf_addr,
1136 2 * GET_MODE_SIZE (Pmode))));
1138 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1141 /* Expand a call to __builtin_prefetch. For a target that does not support
1142 data prefetch, evaluate the memory address argument in case it has side
1143 effects. */
1145 static void
1146 expand_builtin_prefetch (tree exp)
1148 tree arg0, arg1, arg2;
1149 int nargs;
1150 rtx op0, op1, op2;
1152 if (!validate_arglist (exp, POINTER_TYPE, 0))
1153 return;
1155 arg0 = CALL_EXPR_ARG (exp, 0);
1157 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1158 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1159 locality). */
1160 nargs = call_expr_nargs (exp);
1161 if (nargs > 1)
1162 arg1 = CALL_EXPR_ARG (exp, 1);
1163 else
1164 arg1 = integer_zero_node;
1165 if (nargs > 2)
1166 arg2 = CALL_EXPR_ARG (exp, 2);
1167 else
1168 arg2 = integer_three_node;
1170 /* Argument 0 is an address. */
1171 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1173 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1174 if (TREE_CODE (arg1) != INTEGER_CST)
1176 error ("second argument to %<__builtin_prefetch%> must be a constant");
1177 arg1 = integer_zero_node;
1179 op1 = expand_normal (arg1);
1180 /* Argument 1 must be either zero or one. */
1181 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1183 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1184 " using zero");
1185 op1 = const0_rtx;
1188 /* Argument 2 (locality) must be a compile-time constant int. */
1189 if (TREE_CODE (arg2) != INTEGER_CST)
1191 error ("third argument to %<__builtin_prefetch%> must be a constant");
1192 arg2 = integer_zero_node;
1194 op2 = expand_normal (arg2);
1195 /* Argument 2 must be 0, 1, 2, or 3. */
1196 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1198 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1199 op2 = const0_rtx;
1202 if (targetm.have_prefetch ())
1204 struct expand_operand ops[3];
1206 create_address_operand (&ops[0], op0);
1207 create_integer_operand (&ops[1], INTVAL (op1));
1208 create_integer_operand (&ops[2], INTVAL (op2));
1209 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1210 return;
1213 /* Don't do anything with direct references to volatile memory, but
1214 generate code to handle other side effects. */
1215 if (!MEM_P (op0) && side_effects_p (op0))
1216 emit_insn (op0);
1219 /* Get a MEM rtx for expression EXP which is the address of an operand
1220 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1221 the maximum length of the block of memory that might be accessed or
1222 NULL if unknown. */
1224 static rtx
1225 get_memory_rtx (tree exp, tree len)
1227 tree orig_exp = exp;
1228 rtx addr, mem;
1230 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1231 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1232 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1233 exp = TREE_OPERAND (exp, 0);
1235 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1236 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1238 /* Get an expression we can use to find the attributes to assign to MEM.
1239 First remove any nops. */
1240 while (CONVERT_EXPR_P (exp)
1241 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1242 exp = TREE_OPERAND (exp, 0);
1244 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1245 (as builtin stringops may alias with anything). */
1246 exp = fold_build2 (MEM_REF,
1247 build_array_type (char_type_node,
1248 build_range_type (sizetype,
1249 size_one_node, len)),
1250 exp, build_int_cst (ptr_type_node, 0));
1252 /* If the MEM_REF has no acceptable address, try to get the base object
1253 from the original address we got, and build an all-aliasing
1254 unknown-sized access to that one. */
1255 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1256 set_mem_attributes (mem, exp, 0);
1257 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1258 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1259 0))))
1261 exp = build_fold_addr_expr (exp);
1262 exp = fold_build2 (MEM_REF,
1263 build_array_type (char_type_node,
1264 build_range_type (sizetype,
1265 size_zero_node,
1266 NULL)),
1267 exp, build_int_cst (ptr_type_node, 0));
1268 set_mem_attributes (mem, exp, 0);
1270 set_mem_alias_set (mem, 0);
1271 return mem;
1274 /* Built-in functions to perform an untyped call and return. */
1276 #define apply_args_mode \
1277 (this_target_builtins->x_apply_args_mode)
1278 #define apply_result_mode \
1279 (this_target_builtins->x_apply_result_mode)
1281 /* Return the size required for the block returned by __builtin_apply_args,
1282 and initialize apply_args_mode. */
1284 static int
1285 apply_args_size (void)
1287 static int size = -1;
1288 int align;
1289 unsigned int regno;
1290 machine_mode mode;
1292 /* The values computed by this function never change. */
1293 if (size < 0)
1295 /* The first value is the incoming arg-pointer. */
1296 size = GET_MODE_SIZE (Pmode);
1298 /* The second value is the structure value address unless this is
1299 passed as an "invisible" first argument. */
1300 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1301 size += GET_MODE_SIZE (Pmode);
1303 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1304 if (FUNCTION_ARG_REGNO_P (regno))
1306 mode = targetm.calls.get_raw_arg_mode (regno);
1308 gcc_assert (mode != VOIDmode);
1310 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1311 if (size % align != 0)
1312 size = CEIL (size, align) * align;
1313 size += GET_MODE_SIZE (mode);
1314 apply_args_mode[regno] = mode;
1316 else
1318 apply_args_mode[regno] = VOIDmode;
1321 return size;
1324 /* Return the size required for the block returned by __builtin_apply,
1325 and initialize apply_result_mode. */
1327 static int
1328 apply_result_size (void)
1330 static int size = -1;
1331 int align, regno;
1332 machine_mode mode;
1334 /* The values computed by this function never change. */
1335 if (size < 0)
1337 size = 0;
1339 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1340 if (targetm.calls.function_value_regno_p (regno))
1342 mode = targetm.calls.get_raw_result_mode (regno);
1344 gcc_assert (mode != VOIDmode);
1346 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1347 if (size % align != 0)
1348 size = CEIL (size, align) * align;
1349 size += GET_MODE_SIZE (mode);
1350 apply_result_mode[regno] = mode;
1352 else
1353 apply_result_mode[regno] = VOIDmode;
1355 /* Allow targets that use untyped_call and untyped_return to override
1356 the size so that machine-specific information can be stored here. */
1357 #ifdef APPLY_RESULT_SIZE
1358 size = APPLY_RESULT_SIZE;
1359 #endif
1361 return size;
1364 /* Create a vector describing the result block RESULT. If SAVEP is true,
1365 the result block is used to save the values; otherwise it is used to
1366 restore the values. */
1368 static rtx
1369 result_vector (int savep, rtx result)
1371 int regno, size, align, nelts;
1372 machine_mode mode;
1373 rtx reg, mem;
1374 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1376 size = nelts = 0;
1377 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1378 if ((mode = apply_result_mode[regno]) != VOIDmode)
1380 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1381 if (size % align != 0)
1382 size = CEIL (size, align) * align;
1383 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1384 mem = adjust_address (result, mode, size);
1385 savevec[nelts++] = (savep
1386 ? gen_rtx_SET (mem, reg)
1387 : gen_rtx_SET (reg, mem));
1388 size += GET_MODE_SIZE (mode);
1390 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1393 /* Save the state required to perform an untyped call with the same
1394 arguments as were passed to the current function. */
1396 static rtx
1397 expand_builtin_apply_args_1 (void)
1399 rtx registers, tem;
1400 int size, align, regno;
1401 machine_mode mode;
1402 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1404 /* Create a block where the arg-pointer, structure value address,
1405 and argument registers can be saved. */
1406 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1408 /* Walk past the arg-pointer and structure value address. */
1409 size = GET_MODE_SIZE (Pmode);
1410 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1411 size += GET_MODE_SIZE (Pmode);
1413 /* Save each register used in calling a function to the block. */
1414 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1415 if ((mode = apply_args_mode[regno]) != VOIDmode)
1417 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1418 if (size % align != 0)
1419 size = CEIL (size, align) * align;
1421 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1423 emit_move_insn (adjust_address (registers, mode, size), tem);
1424 size += GET_MODE_SIZE (mode);
1427 /* Save the arg pointer to the block. */
1428 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1429 /* We need the pointer as the caller actually passed them to us, not
1430 as we might have pretended they were passed. Make sure it's a valid
1431 operand, as emit_move_insn isn't expected to handle a PLUS. */
1432 if (STACK_GROWS_DOWNWARD)
1434 = force_operand (plus_constant (Pmode, tem,
1435 crtl->args.pretend_args_size),
1436 NULL_RTX);
1437 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1439 size = GET_MODE_SIZE (Pmode);
1441 /* Save the structure value address unless this is passed as an
1442 "invisible" first argument. */
1443 if (struct_incoming_value)
1445 emit_move_insn (adjust_address (registers, Pmode, size),
1446 copy_to_reg (struct_incoming_value));
1447 size += GET_MODE_SIZE (Pmode);
1450 /* Return the address of the block. */
1451 return copy_addr_to_reg (XEXP (registers, 0));
1454 /* __builtin_apply_args returns block of memory allocated on
1455 the stack into which is stored the arg pointer, structure
1456 value address, static chain, and all the registers that might
1457 possibly be used in performing a function call. The code is
1458 moved to the start of the function so the incoming values are
1459 saved. */
1461 static rtx
1462 expand_builtin_apply_args (void)
1464 /* Don't do __builtin_apply_args more than once in a function.
1465 Save the result of the first call and reuse it. */
1466 if (apply_args_value != 0)
1467 return apply_args_value;
1469 /* When this function is called, it means that registers must be
1470 saved on entry to this function. So we migrate the
1471 call to the first insn of this function. */
1472 rtx temp;
1474 start_sequence ();
1475 temp = expand_builtin_apply_args_1 ();
1476 rtx_insn *seq = get_insns ();
1477 end_sequence ();
1479 apply_args_value = temp;
1481 /* Put the insns after the NOTE that starts the function.
1482 If this is inside a start_sequence, make the outer-level insn
1483 chain current, so the code is placed at the start of the
1484 function. If internal_arg_pointer is a non-virtual pseudo,
1485 it needs to be placed after the function that initializes
1486 that pseudo. */
1487 push_topmost_sequence ();
1488 if (REG_P (crtl->args.internal_arg_pointer)
1489 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1490 emit_insn_before (seq, parm_birth_insn);
1491 else
1492 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1493 pop_topmost_sequence ();
1494 return temp;
1498 /* Perform an untyped call and save the state required to perform an
1499 untyped return of whatever value was returned by the given function. */
1501 static rtx
1502 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1504 int size, align, regno;
1505 machine_mode mode;
1506 rtx incoming_args, result, reg, dest, src;
1507 rtx_call_insn *call_insn;
1508 rtx old_stack_level = 0;
1509 rtx call_fusage = 0;
1510 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1512 arguments = convert_memory_address (Pmode, arguments);
1514 /* Create a block where the return registers can be saved. */
1515 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1517 /* Fetch the arg pointer from the ARGUMENTS block. */
1518 incoming_args = gen_reg_rtx (Pmode);
1519 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1520 if (!STACK_GROWS_DOWNWARD)
1521 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1522 incoming_args, 0, OPTAB_LIB_WIDEN);
1524 /* Push a new argument block and copy the arguments. Do not allow
1525 the (potential) memcpy call below to interfere with our stack
1526 manipulations. */
1527 do_pending_stack_adjust ();
1528 NO_DEFER_POP;
1530 /* Save the stack with nonlocal if available. */
1531 if (targetm.have_save_stack_nonlocal ())
1532 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1533 else
1534 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1536 /* Allocate a block of memory onto the stack and copy the memory
1537 arguments to the outgoing arguments address. We can pass TRUE
1538 as the 4th argument because we just saved the stack pointer
1539 and will restore it right after the call. */
1540 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1542 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1543 may have already set current_function_calls_alloca to true.
1544 current_function_calls_alloca won't be set if argsize is zero,
1545 so we have to guarantee need_drap is true here. */
1546 if (SUPPORTS_STACK_ALIGNMENT)
1547 crtl->need_drap = true;
1549 dest = virtual_outgoing_args_rtx;
1550 if (!STACK_GROWS_DOWNWARD)
1552 if (CONST_INT_P (argsize))
1553 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1554 else
1555 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1557 dest = gen_rtx_MEM (BLKmode, dest);
1558 set_mem_align (dest, PARM_BOUNDARY);
1559 src = gen_rtx_MEM (BLKmode, incoming_args);
1560 set_mem_align (src, PARM_BOUNDARY);
1561 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1563 /* Refer to the argument block. */
1564 apply_args_size ();
1565 arguments = gen_rtx_MEM (BLKmode, arguments);
1566 set_mem_align (arguments, PARM_BOUNDARY);
1568 /* Walk past the arg-pointer and structure value address. */
1569 size = GET_MODE_SIZE (Pmode);
1570 if (struct_value)
1571 size += GET_MODE_SIZE (Pmode);
1573 /* Restore each of the registers previously saved. Make USE insns
1574 for each of these registers for use in making the call. */
1575 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1576 if ((mode = apply_args_mode[regno]) != VOIDmode)
1578 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1579 if (size % align != 0)
1580 size = CEIL (size, align) * align;
1581 reg = gen_rtx_REG (mode, regno);
1582 emit_move_insn (reg, adjust_address (arguments, mode, size));
1583 use_reg (&call_fusage, reg);
1584 size += GET_MODE_SIZE (mode);
1587 /* Restore the structure value address unless this is passed as an
1588 "invisible" first argument. */
1589 size = GET_MODE_SIZE (Pmode);
1590 if (struct_value)
1592 rtx value = gen_reg_rtx (Pmode);
1593 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1594 emit_move_insn (struct_value, value);
1595 if (REG_P (struct_value))
1596 use_reg (&call_fusage, struct_value);
1597 size += GET_MODE_SIZE (Pmode);
1600 /* All arguments and registers used for the call are set up by now! */
1601 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1603 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1604 and we don't want to load it into a register as an optimization,
1605 because prepare_call_address already did it if it should be done. */
1606 if (GET_CODE (function) != SYMBOL_REF)
1607 function = memory_address (FUNCTION_MODE, function);
1609 /* Generate the actual call instruction and save the return value. */
1610 if (targetm.have_untyped_call ())
1612 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1613 emit_call_insn (targetm.gen_untyped_call (mem, result,
1614 result_vector (1, result)));
1616 else if (targetm.have_call_value ())
1618 rtx valreg = 0;
1620 /* Locate the unique return register. It is not possible to
1621 express a call that sets more than one return register using
1622 call_value; use untyped_call for that. In fact, untyped_call
1623 only needs to save the return registers in the given block. */
1624 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1625 if ((mode = apply_result_mode[regno]) != VOIDmode)
1627 gcc_assert (!valreg); /* have_untyped_call required. */
1629 valreg = gen_rtx_REG (mode, regno);
1632 emit_insn (targetm.gen_call_value (valreg,
1633 gen_rtx_MEM (FUNCTION_MODE, function),
1634 const0_rtx, NULL_RTX, const0_rtx));
1636 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1638 else
1639 gcc_unreachable ();
1641 /* Find the CALL insn we just emitted, and attach the register usage
1642 information. */
1643 call_insn = last_call_insn ();
1644 add_function_usage_to (call_insn, call_fusage);
1646 /* Restore the stack. */
1647 if (targetm.have_save_stack_nonlocal ())
1648 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1649 else
1650 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1651 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1653 OK_DEFER_POP;
1655 /* Return the address of the result block. */
1656 result = copy_addr_to_reg (XEXP (result, 0));
1657 return convert_memory_address (ptr_mode, result);
1660 /* Perform an untyped return. */
1662 static void
1663 expand_builtin_return (rtx result)
1665 int size, align, regno;
1666 machine_mode mode;
1667 rtx reg;
1668 rtx_insn *call_fusage = 0;
1670 result = convert_memory_address (Pmode, result);
1672 apply_result_size ();
1673 result = gen_rtx_MEM (BLKmode, result);
1675 if (targetm.have_untyped_return ())
1677 rtx vector = result_vector (0, result);
1678 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1679 emit_barrier ();
1680 return;
1683 /* Restore the return value and note that each value is used. */
1684 size = 0;
1685 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1686 if ((mode = apply_result_mode[regno]) != VOIDmode)
1688 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1689 if (size % align != 0)
1690 size = CEIL (size, align) * align;
1691 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1692 emit_move_insn (reg, adjust_address (result, mode, size));
1694 push_to_sequence (call_fusage);
1695 emit_use (reg);
1696 call_fusage = get_insns ();
1697 end_sequence ();
1698 size += GET_MODE_SIZE (mode);
1701 /* Put the USE insns before the return. */
1702 emit_insn (call_fusage);
1704 /* Return whatever values was restored by jumping directly to the end
1705 of the function. */
1706 expand_naked_return ();
1709 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1711 static enum type_class
1712 type_to_class (tree type)
1714 switch (TREE_CODE (type))
1716 case VOID_TYPE: return void_type_class;
1717 case INTEGER_TYPE: return integer_type_class;
1718 case ENUMERAL_TYPE: return enumeral_type_class;
1719 case BOOLEAN_TYPE: return boolean_type_class;
1720 case POINTER_TYPE: return pointer_type_class;
1721 case REFERENCE_TYPE: return reference_type_class;
1722 case OFFSET_TYPE: return offset_type_class;
1723 case REAL_TYPE: return real_type_class;
1724 case COMPLEX_TYPE: return complex_type_class;
1725 case FUNCTION_TYPE: return function_type_class;
1726 case METHOD_TYPE: return method_type_class;
1727 case RECORD_TYPE: return record_type_class;
1728 case UNION_TYPE:
1729 case QUAL_UNION_TYPE: return union_type_class;
1730 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1731 ? string_type_class : array_type_class);
1732 case LANG_TYPE: return lang_type_class;
1733 default: return no_type_class;
1737 /* Expand a call EXP to __builtin_classify_type. */
1739 static rtx
1740 expand_builtin_classify_type (tree exp)
1742 if (call_expr_nargs (exp))
1743 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1744 return GEN_INT (no_type_class);
1747 /* This helper macro, meant to be used in mathfn_built_in below,
1748 determines which among a set of three builtin math functions is
1749 appropriate for a given type mode. The `F' and `L' cases are
1750 automatically generated from the `double' case. */
1751 #define CASE_MATHFN(MATHFN) \
1752 CASE_CFN_##MATHFN: \
1753 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1754 fcodel = BUILT_IN_##MATHFN##L ; break;
1755 /* Similar to above, but appends _R after any F/L suffix. */
1756 #define CASE_MATHFN_REENT(MATHFN) \
1757 case CFN_BUILT_IN_##MATHFN##_R: \
1758 case CFN_BUILT_IN_##MATHFN##F_R: \
1759 case CFN_BUILT_IN_##MATHFN##L_R: \
1760 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1761 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1763 /* Return a function equivalent to FN but operating on floating-point
1764 values of type TYPE, or END_BUILTINS if no such function exists.
1765 This is purely an operation on function codes; it does not guarantee
1766 that the target actually has an implementation of the function. */
1768 static built_in_function
1769 mathfn_built_in_2 (tree type, combined_fn fn)
1771 built_in_function fcode, fcodef, fcodel;
1773 switch (fn)
1775 CASE_MATHFN (ACOS)
1776 CASE_MATHFN (ACOSH)
1777 CASE_MATHFN (ASIN)
1778 CASE_MATHFN (ASINH)
1779 CASE_MATHFN (ATAN)
1780 CASE_MATHFN (ATAN2)
1781 CASE_MATHFN (ATANH)
1782 CASE_MATHFN (CBRT)
1783 CASE_MATHFN (CEIL)
1784 CASE_MATHFN (CEXPI)
1785 CASE_MATHFN (COPYSIGN)
1786 CASE_MATHFN (COS)
1787 CASE_MATHFN (COSH)
1788 CASE_MATHFN (DREM)
1789 CASE_MATHFN (ERF)
1790 CASE_MATHFN (ERFC)
1791 CASE_MATHFN (EXP)
1792 CASE_MATHFN (EXP10)
1793 CASE_MATHFN (EXP2)
1794 CASE_MATHFN (EXPM1)
1795 CASE_MATHFN (FABS)
1796 CASE_MATHFN (FDIM)
1797 CASE_MATHFN (FLOOR)
1798 CASE_MATHFN (FMA)
1799 CASE_MATHFN (FMAX)
1800 CASE_MATHFN (FMIN)
1801 CASE_MATHFN (FMOD)
1802 CASE_MATHFN (FREXP)
1803 CASE_MATHFN (GAMMA)
1804 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1805 CASE_MATHFN (HUGE_VAL)
1806 CASE_MATHFN (HYPOT)
1807 CASE_MATHFN (ILOGB)
1808 CASE_MATHFN (ICEIL)
1809 CASE_MATHFN (IFLOOR)
1810 CASE_MATHFN (INF)
1811 CASE_MATHFN (IRINT)
1812 CASE_MATHFN (IROUND)
1813 CASE_MATHFN (ISINF)
1814 CASE_MATHFN (J0)
1815 CASE_MATHFN (J1)
1816 CASE_MATHFN (JN)
1817 CASE_MATHFN (LCEIL)
1818 CASE_MATHFN (LDEXP)
1819 CASE_MATHFN (LFLOOR)
1820 CASE_MATHFN (LGAMMA)
1821 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1822 CASE_MATHFN (LLCEIL)
1823 CASE_MATHFN (LLFLOOR)
1824 CASE_MATHFN (LLRINT)
1825 CASE_MATHFN (LLROUND)
1826 CASE_MATHFN (LOG)
1827 CASE_MATHFN (LOG10)
1828 CASE_MATHFN (LOG1P)
1829 CASE_MATHFN (LOG2)
1830 CASE_MATHFN (LOGB)
1831 CASE_MATHFN (LRINT)
1832 CASE_MATHFN (LROUND)
1833 CASE_MATHFN (MODF)
1834 CASE_MATHFN (NAN)
1835 CASE_MATHFN (NANS)
1836 CASE_MATHFN (NEARBYINT)
1837 CASE_MATHFN (NEXTAFTER)
1838 CASE_MATHFN (NEXTTOWARD)
1839 CASE_MATHFN (POW)
1840 CASE_MATHFN (POWI)
1841 CASE_MATHFN (POW10)
1842 CASE_MATHFN (REMAINDER)
1843 CASE_MATHFN (REMQUO)
1844 CASE_MATHFN (RINT)
1845 CASE_MATHFN (ROUND)
1846 CASE_MATHFN (SCALB)
1847 CASE_MATHFN (SCALBLN)
1848 CASE_MATHFN (SCALBN)
1849 CASE_MATHFN (SIGNBIT)
1850 CASE_MATHFN (SIGNIFICAND)
1851 CASE_MATHFN (SIN)
1852 CASE_MATHFN (SINCOS)
1853 CASE_MATHFN (SINH)
1854 CASE_MATHFN (SQRT)
1855 CASE_MATHFN (TAN)
1856 CASE_MATHFN (TANH)
1857 CASE_MATHFN (TGAMMA)
1858 CASE_MATHFN (TRUNC)
1859 CASE_MATHFN (Y0)
1860 CASE_MATHFN (Y1)
1861 CASE_MATHFN (YN)
1863 default:
1864 return END_BUILTINS;
1867 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1868 return fcode;
1869 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1870 return fcodef;
1871 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1872 return fcodel;
1873 else
1874 return END_BUILTINS;
1877 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1878 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1879 otherwise use the explicit declaration. If we can't do the conversion,
1880 return null. */
1882 static tree
1883 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
1885 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1886 if (fcode2 == END_BUILTINS)
1887 return NULL_TREE;
1889 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1890 return NULL_TREE;
1892 return builtin_decl_explicit (fcode2);
1895 /* Like mathfn_built_in_1, but always use the implicit array. */
1897 tree
1898 mathfn_built_in (tree type, combined_fn fn)
1900 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1903 /* Like mathfn_built_in_1, but take a built_in_function and
1904 always use the implicit array. */
1906 tree
1907 mathfn_built_in (tree type, enum built_in_function fn)
1909 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
1912 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
1913 return its code, otherwise return IFN_LAST. Note that this function
1914 only tests whether the function is defined in internals.def, not whether
1915 it is actually available on the target. */
1917 internal_fn
1918 associated_internal_fn (tree fndecl)
1920 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
1921 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
1922 switch (DECL_FUNCTION_CODE (fndecl))
1924 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
1925 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1926 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
1927 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1928 #include "internal-fn.def"
1930 CASE_FLT_FN (BUILT_IN_POW10):
1931 return IFN_EXP10;
1933 CASE_FLT_FN (BUILT_IN_DREM):
1934 return IFN_REMAINDER;
1936 CASE_FLT_FN (BUILT_IN_SCALBN):
1937 CASE_FLT_FN (BUILT_IN_SCALBLN):
1938 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
1939 return IFN_LDEXP;
1940 return IFN_LAST;
1942 default:
1943 return IFN_LAST;
1947 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
1948 on the current target by a call to an internal function, return the
1949 code of that internal function, otherwise return IFN_LAST. The caller
1950 is responsible for ensuring that any side-effects of the built-in
1951 call are dealt with correctly. E.g. if CALL sets errno, the caller
1952 must decide that the errno result isn't needed or make it available
1953 in some other way. */
1955 internal_fn
1956 replacement_internal_fn (gcall *call)
1958 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1960 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
1961 if (ifn != IFN_LAST)
1963 tree_pair types = direct_internal_fn_types (ifn, call);
1964 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
1965 if (direct_internal_fn_supported_p (ifn, types, opt_type))
1966 return ifn;
1969 return IFN_LAST;
1972 /* Expand a call to the builtin trinary math functions (fma).
1973 Return NULL_RTX if a normal call should be emitted rather than expanding the
1974 function in-line. EXP is the expression that is a call to the builtin
1975 function; if convenient, the result should be placed in TARGET.
1976 SUBTARGET may be used as the target for computing one of EXP's
1977 operands. */
1979 static rtx
1980 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
1982 optab builtin_optab;
1983 rtx op0, op1, op2, result;
1984 rtx_insn *insns;
1985 tree fndecl = get_callee_fndecl (exp);
1986 tree arg0, arg1, arg2;
1987 machine_mode mode;
1989 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
1990 return NULL_RTX;
1992 arg0 = CALL_EXPR_ARG (exp, 0);
1993 arg1 = CALL_EXPR_ARG (exp, 1);
1994 arg2 = CALL_EXPR_ARG (exp, 2);
1996 switch (DECL_FUNCTION_CODE (fndecl))
1998 CASE_FLT_FN (BUILT_IN_FMA):
1999 builtin_optab = fma_optab; break;
2000 default:
2001 gcc_unreachable ();
2004 /* Make a suitable register to place result in. */
2005 mode = TYPE_MODE (TREE_TYPE (exp));
2007 /* Before working hard, check whether the instruction is available. */
2008 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2009 return NULL_RTX;
2011 result = gen_reg_rtx (mode);
2013 /* Always stabilize the argument list. */
2014 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2015 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2016 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2018 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2019 op1 = expand_normal (arg1);
2020 op2 = expand_normal (arg2);
2022 start_sequence ();
2024 /* Compute into RESULT.
2025 Set RESULT to wherever the result comes back. */
2026 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2027 result, 0);
2029 /* If we were unable to expand via the builtin, stop the sequence
2030 (without outputting the insns) and call to the library function
2031 with the stabilized argument list. */
2032 if (result == 0)
2034 end_sequence ();
2035 return expand_call (exp, target, target == const0_rtx);
2038 /* Output the entire sequence. */
2039 insns = get_insns ();
2040 end_sequence ();
2041 emit_insn (insns);
2043 return result;
2046 /* Expand a call to the builtin sin and cos math functions.
2047 Return NULL_RTX if a normal call should be emitted rather than expanding the
2048 function in-line. EXP is the expression that is a call to the builtin
2049 function; if convenient, the result should be placed in TARGET.
2050 SUBTARGET may be used as the target for computing one of EXP's
2051 operands. */
2053 static rtx
2054 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2056 optab builtin_optab;
2057 rtx op0;
2058 rtx_insn *insns;
2059 tree fndecl = get_callee_fndecl (exp);
2060 machine_mode mode;
2061 tree arg;
2063 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2064 return NULL_RTX;
2066 arg = CALL_EXPR_ARG (exp, 0);
2068 switch (DECL_FUNCTION_CODE (fndecl))
2070 CASE_FLT_FN (BUILT_IN_SIN):
2071 CASE_FLT_FN (BUILT_IN_COS):
2072 builtin_optab = sincos_optab; break;
2073 default:
2074 gcc_unreachable ();
2077 /* Make a suitable register to place result in. */
2078 mode = TYPE_MODE (TREE_TYPE (exp));
2080 /* Check if sincos insn is available, otherwise fallback
2081 to sin or cos insn. */
2082 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2083 switch (DECL_FUNCTION_CODE (fndecl))
2085 CASE_FLT_FN (BUILT_IN_SIN):
2086 builtin_optab = sin_optab; break;
2087 CASE_FLT_FN (BUILT_IN_COS):
2088 builtin_optab = cos_optab; break;
2089 default:
2090 gcc_unreachable ();
2093 /* Before working hard, check whether the instruction is available. */
2094 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2096 rtx result = gen_reg_rtx (mode);
2098 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2099 need to expand the argument again. This way, we will not perform
2100 side-effects more the once. */
2101 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2103 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2105 start_sequence ();
2107 /* Compute into RESULT.
2108 Set RESULT to wherever the result comes back. */
2109 if (builtin_optab == sincos_optab)
2111 int ok;
2113 switch (DECL_FUNCTION_CODE (fndecl))
2115 CASE_FLT_FN (BUILT_IN_SIN):
2116 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2117 break;
2118 CASE_FLT_FN (BUILT_IN_COS):
2119 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2120 break;
2121 default:
2122 gcc_unreachable ();
2124 gcc_assert (ok);
2126 else
2127 result = expand_unop (mode, builtin_optab, op0, result, 0);
2129 if (result != 0)
2131 /* Output the entire sequence. */
2132 insns = get_insns ();
2133 end_sequence ();
2134 emit_insn (insns);
2135 return result;
2138 /* If we were unable to expand via the builtin, stop the sequence
2139 (without outputting the insns) and call to the library function
2140 with the stabilized argument list. */
2141 end_sequence ();
2144 return expand_call (exp, target, target == const0_rtx);
2147 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2148 return an RTL instruction code that implements the functionality.
2149 If that isn't possible or available return CODE_FOR_nothing. */
2151 static enum insn_code
2152 interclass_mathfn_icode (tree arg, tree fndecl)
2154 bool errno_set = false;
2155 optab builtin_optab = unknown_optab;
2156 machine_mode mode;
2158 switch (DECL_FUNCTION_CODE (fndecl))
2160 CASE_FLT_FN (BUILT_IN_ILOGB):
2161 errno_set = true; builtin_optab = ilogb_optab; break;
2162 CASE_FLT_FN (BUILT_IN_ISINF):
2163 builtin_optab = isinf_optab; break;
2164 case BUILT_IN_ISNORMAL:
2165 case BUILT_IN_ISFINITE:
2166 CASE_FLT_FN (BUILT_IN_FINITE):
2167 case BUILT_IN_FINITED32:
2168 case BUILT_IN_FINITED64:
2169 case BUILT_IN_FINITED128:
2170 case BUILT_IN_ISINFD32:
2171 case BUILT_IN_ISINFD64:
2172 case BUILT_IN_ISINFD128:
2173 /* These builtins have no optabs (yet). */
2174 break;
2175 default:
2176 gcc_unreachable ();
2179 /* There's no easy way to detect the case we need to set EDOM. */
2180 if (flag_errno_math && errno_set)
2181 return CODE_FOR_nothing;
2183 /* Optab mode depends on the mode of the input argument. */
2184 mode = TYPE_MODE (TREE_TYPE (arg));
2186 if (builtin_optab)
2187 return optab_handler (builtin_optab, mode);
2188 return CODE_FOR_nothing;
2191 /* Expand a call to one of the builtin math functions that operate on
2192 floating point argument and output an integer result (ilogb, isinf,
2193 isnan, etc).
2194 Return 0 if a normal call should be emitted rather than expanding the
2195 function in-line. EXP is the expression that is a call to the builtin
2196 function; if convenient, the result should be placed in TARGET. */
2198 static rtx
2199 expand_builtin_interclass_mathfn (tree exp, rtx target)
2201 enum insn_code icode = CODE_FOR_nothing;
2202 rtx op0;
2203 tree fndecl = get_callee_fndecl (exp);
2204 machine_mode mode;
2205 tree arg;
2207 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2208 return NULL_RTX;
2210 arg = CALL_EXPR_ARG (exp, 0);
2211 icode = interclass_mathfn_icode (arg, fndecl);
2212 mode = TYPE_MODE (TREE_TYPE (arg));
2214 if (icode != CODE_FOR_nothing)
2216 struct expand_operand ops[1];
2217 rtx_insn *last = get_last_insn ();
2218 tree orig_arg = arg;
2220 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2221 need to expand the argument again. This way, we will not perform
2222 side-effects more the once. */
2223 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2225 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2227 if (mode != GET_MODE (op0))
2228 op0 = convert_to_mode (mode, op0, 0);
2230 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2231 if (maybe_legitimize_operands (icode, 0, 1, ops)
2232 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2233 return ops[0].value;
2235 delete_insns_since (last);
2236 CALL_EXPR_ARG (exp, 0) = orig_arg;
2239 return NULL_RTX;
2242 /* Expand a call to the builtin sincos math function.
2243 Return NULL_RTX if a normal call should be emitted rather than expanding the
2244 function in-line. EXP is the expression that is a call to the builtin
2245 function. */
2247 static rtx
2248 expand_builtin_sincos (tree exp)
2250 rtx op0, op1, op2, target1, target2;
2251 machine_mode mode;
2252 tree arg, sinp, cosp;
2253 int result;
2254 location_t loc = EXPR_LOCATION (exp);
2255 tree alias_type, alias_off;
2257 if (!validate_arglist (exp, REAL_TYPE,
2258 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2259 return NULL_RTX;
2261 arg = CALL_EXPR_ARG (exp, 0);
2262 sinp = CALL_EXPR_ARG (exp, 1);
2263 cosp = CALL_EXPR_ARG (exp, 2);
2265 /* Make a suitable register to place result in. */
2266 mode = TYPE_MODE (TREE_TYPE (arg));
2268 /* Check if sincos insn is available, otherwise emit the call. */
2269 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2270 return NULL_RTX;
2272 target1 = gen_reg_rtx (mode);
2273 target2 = gen_reg_rtx (mode);
2275 op0 = expand_normal (arg);
2276 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2277 alias_off = build_int_cst (alias_type, 0);
2278 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2279 sinp, alias_off));
2280 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2281 cosp, alias_off));
2283 /* Compute into target1 and target2.
2284 Set TARGET to wherever the result comes back. */
2285 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2286 gcc_assert (result);
2288 /* Move target1 and target2 to the memory locations indicated
2289 by op1 and op2. */
2290 emit_move_insn (op1, target1);
2291 emit_move_insn (op2, target2);
2293 return const0_rtx;
2296 /* Expand a call to the internal cexpi builtin to the sincos math function.
2297 EXP is the expression that is a call to the builtin function; if convenient,
2298 the result should be placed in TARGET. */
2300 static rtx
2301 expand_builtin_cexpi (tree exp, rtx target)
2303 tree fndecl = get_callee_fndecl (exp);
2304 tree arg, type;
2305 machine_mode mode;
2306 rtx op0, op1, op2;
2307 location_t loc = EXPR_LOCATION (exp);
2309 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2310 return NULL_RTX;
2312 arg = CALL_EXPR_ARG (exp, 0);
2313 type = TREE_TYPE (arg);
2314 mode = TYPE_MODE (TREE_TYPE (arg));
2316 /* Try expanding via a sincos optab, fall back to emitting a libcall
2317 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2318 is only generated from sincos, cexp or if we have either of them. */
2319 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2321 op1 = gen_reg_rtx (mode);
2322 op2 = gen_reg_rtx (mode);
2324 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2326 /* Compute into op1 and op2. */
2327 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2329 else if (targetm.libc_has_function (function_sincos))
2331 tree call, fn = NULL_TREE;
2332 tree top1, top2;
2333 rtx op1a, op2a;
2335 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2336 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2337 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2338 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2339 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2340 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2341 else
2342 gcc_unreachable ();
2344 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2345 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2346 op1a = copy_addr_to_reg (XEXP (op1, 0));
2347 op2a = copy_addr_to_reg (XEXP (op2, 0));
2348 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2349 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2351 /* Make sure not to fold the sincos call again. */
2352 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2353 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2354 call, 3, arg, top1, top2));
2356 else
2358 tree call, fn = NULL_TREE, narg;
2359 tree ctype = build_complex_type (type);
2361 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2362 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2363 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2364 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2365 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2366 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2367 else
2368 gcc_unreachable ();
2370 /* If we don't have a decl for cexp create one. This is the
2371 friendliest fallback if the user calls __builtin_cexpi
2372 without full target C99 function support. */
2373 if (fn == NULL_TREE)
2375 tree fntype;
2376 const char *name = NULL;
2378 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2379 name = "cexpf";
2380 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2381 name = "cexp";
2382 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2383 name = "cexpl";
2385 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2386 fn = build_fn_decl (name, fntype);
2389 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2390 build_real (type, dconst0), arg);
2392 /* Make sure not to fold the cexp call again. */
2393 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2394 return expand_expr (build_call_nary (ctype, call, 1, narg),
2395 target, VOIDmode, EXPAND_NORMAL);
2398 /* Now build the proper return type. */
2399 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2400 make_tree (TREE_TYPE (arg), op2),
2401 make_tree (TREE_TYPE (arg), op1)),
2402 target, VOIDmode, EXPAND_NORMAL);
2405 /* Conveniently construct a function call expression. FNDECL names the
2406 function to be called, N is the number of arguments, and the "..."
2407 parameters are the argument expressions. Unlike build_call_exr
2408 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2410 static tree
2411 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2413 va_list ap;
2414 tree fntype = TREE_TYPE (fndecl);
2415 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2417 va_start (ap, n);
2418 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2419 va_end (ap);
2420 SET_EXPR_LOCATION (fn, loc);
2421 return fn;
2424 /* Expand a call to one of the builtin rounding functions gcc defines
2425 as an extension (lfloor and lceil). As these are gcc extensions we
2426 do not need to worry about setting errno to EDOM.
2427 If expanding via optab fails, lower expression to (int)(floor(x)).
2428 EXP is the expression that is a call to the builtin function;
2429 if convenient, the result should be placed in TARGET. */
2431 static rtx
2432 expand_builtin_int_roundingfn (tree exp, rtx target)
2434 convert_optab builtin_optab;
2435 rtx op0, tmp;
2436 rtx_insn *insns;
2437 tree fndecl = get_callee_fndecl (exp);
2438 enum built_in_function fallback_fn;
2439 tree fallback_fndecl;
2440 machine_mode mode;
2441 tree arg;
2443 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2444 gcc_unreachable ();
2446 arg = CALL_EXPR_ARG (exp, 0);
2448 switch (DECL_FUNCTION_CODE (fndecl))
2450 CASE_FLT_FN (BUILT_IN_ICEIL):
2451 CASE_FLT_FN (BUILT_IN_LCEIL):
2452 CASE_FLT_FN (BUILT_IN_LLCEIL):
2453 builtin_optab = lceil_optab;
2454 fallback_fn = BUILT_IN_CEIL;
2455 break;
2457 CASE_FLT_FN (BUILT_IN_IFLOOR):
2458 CASE_FLT_FN (BUILT_IN_LFLOOR):
2459 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2460 builtin_optab = lfloor_optab;
2461 fallback_fn = BUILT_IN_FLOOR;
2462 break;
2464 default:
2465 gcc_unreachable ();
2468 /* Make a suitable register to place result in. */
2469 mode = TYPE_MODE (TREE_TYPE (exp));
2471 target = gen_reg_rtx (mode);
2473 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2474 need to expand the argument again. This way, we will not perform
2475 side-effects more the once. */
2476 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2478 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2480 start_sequence ();
2482 /* Compute into TARGET. */
2483 if (expand_sfix_optab (target, op0, builtin_optab))
2485 /* Output the entire sequence. */
2486 insns = get_insns ();
2487 end_sequence ();
2488 emit_insn (insns);
2489 return target;
2492 /* If we were unable to expand via the builtin, stop the sequence
2493 (without outputting the insns). */
2494 end_sequence ();
2496 /* Fall back to floating point rounding optab. */
2497 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2499 /* For non-C99 targets we may end up without a fallback fndecl here
2500 if the user called __builtin_lfloor directly. In this case emit
2501 a call to the floor/ceil variants nevertheless. This should result
2502 in the best user experience for not full C99 targets. */
2503 if (fallback_fndecl == NULL_TREE)
2505 tree fntype;
2506 const char *name = NULL;
2508 switch (DECL_FUNCTION_CODE (fndecl))
2510 case BUILT_IN_ICEIL:
2511 case BUILT_IN_LCEIL:
2512 case BUILT_IN_LLCEIL:
2513 name = "ceil";
2514 break;
2515 case BUILT_IN_ICEILF:
2516 case BUILT_IN_LCEILF:
2517 case BUILT_IN_LLCEILF:
2518 name = "ceilf";
2519 break;
2520 case BUILT_IN_ICEILL:
2521 case BUILT_IN_LCEILL:
2522 case BUILT_IN_LLCEILL:
2523 name = "ceill";
2524 break;
2525 case BUILT_IN_IFLOOR:
2526 case BUILT_IN_LFLOOR:
2527 case BUILT_IN_LLFLOOR:
2528 name = "floor";
2529 break;
2530 case BUILT_IN_IFLOORF:
2531 case BUILT_IN_LFLOORF:
2532 case BUILT_IN_LLFLOORF:
2533 name = "floorf";
2534 break;
2535 case BUILT_IN_IFLOORL:
2536 case BUILT_IN_LFLOORL:
2537 case BUILT_IN_LLFLOORL:
2538 name = "floorl";
2539 break;
2540 default:
2541 gcc_unreachable ();
2544 fntype = build_function_type_list (TREE_TYPE (arg),
2545 TREE_TYPE (arg), NULL_TREE);
2546 fallback_fndecl = build_fn_decl (name, fntype);
2549 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2551 tmp = expand_normal (exp);
2552 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2554 /* Truncate the result of floating point optab to integer
2555 via expand_fix (). */
2556 target = gen_reg_rtx (mode);
2557 expand_fix (target, tmp, 0);
2559 return target;
2562 /* Expand a call to one of the builtin math functions doing integer
2563 conversion (lrint).
2564 Return 0 if a normal call should be emitted rather than expanding the
2565 function in-line. EXP is the expression that is a call to the builtin
2566 function; if convenient, the result should be placed in TARGET. */
2568 static rtx
2569 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2571 convert_optab builtin_optab;
2572 rtx op0;
2573 rtx_insn *insns;
2574 tree fndecl = get_callee_fndecl (exp);
2575 tree arg;
2576 machine_mode mode;
2577 enum built_in_function fallback_fn = BUILT_IN_NONE;
2579 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2580 gcc_unreachable ();
2582 arg = CALL_EXPR_ARG (exp, 0);
2584 switch (DECL_FUNCTION_CODE (fndecl))
2586 CASE_FLT_FN (BUILT_IN_IRINT):
2587 fallback_fn = BUILT_IN_LRINT;
2588 gcc_fallthrough ();
2589 CASE_FLT_FN (BUILT_IN_LRINT):
2590 CASE_FLT_FN (BUILT_IN_LLRINT):
2591 builtin_optab = lrint_optab;
2592 break;
2594 CASE_FLT_FN (BUILT_IN_IROUND):
2595 fallback_fn = BUILT_IN_LROUND;
2596 gcc_fallthrough ();
2597 CASE_FLT_FN (BUILT_IN_LROUND):
2598 CASE_FLT_FN (BUILT_IN_LLROUND):
2599 builtin_optab = lround_optab;
2600 break;
2602 default:
2603 gcc_unreachable ();
2606 /* There's no easy way to detect the case we need to set EDOM. */
2607 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2608 return NULL_RTX;
2610 /* Make a suitable register to place result in. */
2611 mode = TYPE_MODE (TREE_TYPE (exp));
2613 /* There's no easy way to detect the case we need to set EDOM. */
2614 if (!flag_errno_math)
2616 rtx result = gen_reg_rtx (mode);
2618 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2619 need to expand the argument again. This way, we will not perform
2620 side-effects more the once. */
2621 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2623 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2625 start_sequence ();
2627 if (expand_sfix_optab (result, op0, builtin_optab))
2629 /* Output the entire sequence. */
2630 insns = get_insns ();
2631 end_sequence ();
2632 emit_insn (insns);
2633 return result;
2636 /* If we were unable to expand via the builtin, stop the sequence
2637 (without outputting the insns) and call to the library function
2638 with the stabilized argument list. */
2639 end_sequence ();
2642 if (fallback_fn != BUILT_IN_NONE)
2644 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2645 targets, (int) round (x) should never be transformed into
2646 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2647 a call to lround in the hope that the target provides at least some
2648 C99 functions. This should result in the best user experience for
2649 not full C99 targets. */
2650 tree fallback_fndecl = mathfn_built_in_1
2651 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2653 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2654 fallback_fndecl, 1, arg);
2656 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2657 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2658 return convert_to_mode (mode, target, 0);
2661 return expand_call (exp, target, target == const0_rtx);
2664 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2665 a normal call should be emitted rather than expanding the function
2666 in-line. EXP is the expression that is a call to the builtin
2667 function; if convenient, the result should be placed in TARGET. */
2669 static rtx
2670 expand_builtin_powi (tree exp, rtx target)
2672 tree arg0, arg1;
2673 rtx op0, op1;
2674 machine_mode mode;
2675 machine_mode mode2;
2677 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2678 return NULL_RTX;
2680 arg0 = CALL_EXPR_ARG (exp, 0);
2681 arg1 = CALL_EXPR_ARG (exp, 1);
2682 mode = TYPE_MODE (TREE_TYPE (exp));
2684 /* Emit a libcall to libgcc. */
2686 /* Mode of the 2nd argument must match that of an int. */
2687 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2689 if (target == NULL_RTX)
2690 target = gen_reg_rtx (mode);
2692 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2693 if (GET_MODE (op0) != mode)
2694 op0 = convert_to_mode (mode, op0, 0);
2695 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2696 if (GET_MODE (op1) != mode2)
2697 op1 = convert_to_mode (mode2, op1, 0);
2699 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2700 target, LCT_CONST, mode, 2,
2701 op0, mode, op1, mode2);
2703 return target;
2706 /* Expand expression EXP which is a call to the strlen builtin. Return
2707 NULL_RTX if we failed the caller should emit a normal call, otherwise
2708 try to get the result in TARGET, if convenient. */
2710 static rtx
2711 expand_builtin_strlen (tree exp, rtx target,
2712 machine_mode target_mode)
2714 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2715 return NULL_RTX;
2716 else
2718 struct expand_operand ops[4];
2719 rtx pat;
2720 tree len;
2721 tree src = CALL_EXPR_ARG (exp, 0);
2722 rtx src_reg;
2723 rtx_insn *before_strlen;
2724 machine_mode insn_mode = target_mode;
2725 enum insn_code icode = CODE_FOR_nothing;
2726 unsigned int align;
2728 /* If the length can be computed at compile-time, return it. */
2729 len = c_strlen (src, 0);
2730 if (len)
2731 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2733 /* If the length can be computed at compile-time and is constant
2734 integer, but there are side-effects in src, evaluate
2735 src for side-effects, then return len.
2736 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2737 can be optimized into: i++; x = 3; */
2738 len = c_strlen (src, 1);
2739 if (len && TREE_CODE (len) == INTEGER_CST)
2741 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2742 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2745 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2747 /* If SRC is not a pointer type, don't do this operation inline. */
2748 if (align == 0)
2749 return NULL_RTX;
2751 /* Bail out if we can't compute strlen in the right mode. */
2752 while (insn_mode != VOIDmode)
2754 icode = optab_handler (strlen_optab, insn_mode);
2755 if (icode != CODE_FOR_nothing)
2756 break;
2758 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2760 if (insn_mode == VOIDmode)
2761 return NULL_RTX;
2763 /* Make a place to hold the source address. We will not expand
2764 the actual source until we are sure that the expansion will
2765 not fail -- there are trees that cannot be expanded twice. */
2766 src_reg = gen_reg_rtx (Pmode);
2768 /* Mark the beginning of the strlen sequence so we can emit the
2769 source operand later. */
2770 before_strlen = get_last_insn ();
2772 create_output_operand (&ops[0], target, insn_mode);
2773 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2774 create_integer_operand (&ops[2], 0);
2775 create_integer_operand (&ops[3], align);
2776 if (!maybe_expand_insn (icode, 4, ops))
2777 return NULL_RTX;
2779 /* Now that we are assured of success, expand the source. */
2780 start_sequence ();
2781 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2782 if (pat != src_reg)
2784 #ifdef POINTERS_EXTEND_UNSIGNED
2785 if (GET_MODE (pat) != Pmode)
2786 pat = convert_to_mode (Pmode, pat,
2787 POINTERS_EXTEND_UNSIGNED);
2788 #endif
2789 emit_move_insn (src_reg, pat);
2791 pat = get_insns ();
2792 end_sequence ();
2794 if (before_strlen)
2795 emit_insn_after (pat, before_strlen);
2796 else
2797 emit_insn_before (pat, get_insns ());
2799 /* Return the value in the proper mode for this function. */
2800 if (GET_MODE (ops[0].value) == target_mode)
2801 target = ops[0].value;
2802 else if (target != 0)
2803 convert_move (target, ops[0].value, 0);
2804 else
2805 target = convert_to_mode (target_mode, ops[0].value, 0);
2807 return target;
2811 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2812 bytes from constant string DATA + OFFSET and return it as target
2813 constant. */
2815 static rtx
2816 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2817 machine_mode mode)
2819 const char *str = (const char *) data;
2821 gcc_assert (offset >= 0
2822 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2823 <= strlen (str) + 1));
2825 return c_readstr (str + offset, mode);
2828 /* LEN specify length of the block of memcpy/memset operation.
2829 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2830 In some cases we can make very likely guess on max size, then we
2831 set it into PROBABLE_MAX_SIZE. */
2833 static void
2834 determine_block_size (tree len, rtx len_rtx,
2835 unsigned HOST_WIDE_INT *min_size,
2836 unsigned HOST_WIDE_INT *max_size,
2837 unsigned HOST_WIDE_INT *probable_max_size)
2839 if (CONST_INT_P (len_rtx))
2841 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
2842 return;
2844 else
2846 wide_int min, max;
2847 enum value_range_type range_type = VR_UNDEFINED;
2849 /* Determine bounds from the type. */
2850 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2851 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2852 else
2853 *min_size = 0;
2854 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2855 *probable_max_size = *max_size
2856 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
2857 else
2858 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2860 if (TREE_CODE (len) == SSA_NAME)
2861 range_type = get_range_info (len, &min, &max);
2862 if (range_type == VR_RANGE)
2864 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
2865 *min_size = min.to_uhwi ();
2866 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
2867 *probable_max_size = *max_size = max.to_uhwi ();
2869 else if (range_type == VR_ANTI_RANGE)
2871 /* Anti range 0...N lets us to determine minimal size to N+1. */
2872 if (min == 0)
2874 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2875 *min_size = max.to_uhwi () + 1;
2877 /* Code like
2879 int n;
2880 if (n < 100)
2881 memcpy (a, b, n)
2883 Produce anti range allowing negative values of N. We still
2884 can use the information and make a guess that N is not negative.
2886 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
2887 *probable_max_size = min.to_uhwi () - 1;
2890 gcc_checking_assert (*max_size <=
2891 (unsigned HOST_WIDE_INT)
2892 GET_MODE_MASK (GET_MODE (len_rtx)));
2895 /* Helper function to do the actual work for expand_builtin_memcpy. */
2897 static rtx
2898 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
2900 const char *src_str;
2901 unsigned int src_align = get_pointer_alignment (src);
2902 unsigned int dest_align = get_pointer_alignment (dest);
2903 rtx dest_mem, src_mem, dest_addr, len_rtx;
2904 HOST_WIDE_INT expected_size = -1;
2905 unsigned int expected_align = 0;
2906 unsigned HOST_WIDE_INT min_size;
2907 unsigned HOST_WIDE_INT max_size;
2908 unsigned HOST_WIDE_INT probable_max_size;
2910 /* If DEST is not a pointer type, call the normal function. */
2911 if (dest_align == 0)
2912 return NULL_RTX;
2914 /* If either SRC is not a pointer type, don't do this
2915 operation in-line. */
2916 if (src_align == 0)
2917 return NULL_RTX;
2919 if (currently_expanding_gimple_stmt)
2920 stringop_block_profile (currently_expanding_gimple_stmt,
2921 &expected_align, &expected_size);
2923 if (expected_align < dest_align)
2924 expected_align = dest_align;
2925 dest_mem = get_memory_rtx (dest, len);
2926 set_mem_align (dest_mem, dest_align);
2927 len_rtx = expand_normal (len);
2928 determine_block_size (len, len_rtx, &min_size, &max_size,
2929 &probable_max_size);
2930 src_str = c_getstr (src);
2932 /* If SRC is a string constant and block move would be done
2933 by pieces, we can avoid loading the string from memory
2934 and only stored the computed constants. */
2935 if (src_str
2936 && CONST_INT_P (len_rtx)
2937 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2938 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2939 CONST_CAST (char *, src_str),
2940 dest_align, false))
2942 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2943 builtin_memcpy_read_str,
2944 CONST_CAST (char *, src_str),
2945 dest_align, false, 0);
2946 dest_mem = force_operand (XEXP (dest_mem, 0), target);
2947 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2948 return dest_mem;
2951 src_mem = get_memory_rtx (src, len);
2952 set_mem_align (src_mem, src_align);
2954 /* Copy word part most expediently. */
2955 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
2956 CALL_EXPR_TAILCALL (exp)
2957 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
2958 expected_align, expected_size,
2959 min_size, max_size, probable_max_size);
2961 if (dest_addr == 0)
2963 dest_addr = force_operand (XEXP (dest_mem, 0), target);
2964 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2967 return dest_addr;
2970 /* Expand a call EXP to the memcpy builtin.
2971 Return NULL_RTX if we failed, the caller should emit a normal call,
2972 otherwise try to get the result in TARGET, if convenient (and in
2973 mode MODE if that's convenient). */
2975 static rtx
2976 expand_builtin_memcpy (tree exp, rtx target)
2978 if (!validate_arglist (exp,
2979 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2980 return NULL_RTX;
2981 else
2983 tree dest = CALL_EXPR_ARG (exp, 0);
2984 tree src = CALL_EXPR_ARG (exp, 1);
2985 tree len = CALL_EXPR_ARG (exp, 2);
2986 return expand_builtin_memcpy_args (dest, src, len, target, exp);
2990 /* Expand an instrumented call EXP to the memcpy builtin.
2991 Return NULL_RTX if we failed, the caller should emit a normal call,
2992 otherwise try to get the result in TARGET, if convenient (and in
2993 mode MODE if that's convenient). */
2995 static rtx
2996 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
2998 if (!validate_arglist (exp,
2999 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3000 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3001 INTEGER_TYPE, VOID_TYPE))
3002 return NULL_RTX;
3003 else
3005 tree dest = CALL_EXPR_ARG (exp, 0);
3006 tree src = CALL_EXPR_ARG (exp, 2);
3007 tree len = CALL_EXPR_ARG (exp, 4);
3008 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3010 /* Return src bounds with the result. */
3011 if (res)
3013 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3014 expand_normal (CALL_EXPR_ARG (exp, 1)));
3015 res = chkp_join_splitted_slot (res, bnd);
3017 return res;
3021 /* Expand a call EXP to the mempcpy builtin.
3022 Return NULL_RTX if we failed; the caller should emit a normal call,
3023 otherwise try to get the result in TARGET, if convenient (and in
3024 mode MODE if that's convenient). If ENDP is 0 return the
3025 destination pointer, if ENDP is 1 return the end pointer ala
3026 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3027 stpcpy. */
3029 static rtx
3030 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3032 if (!validate_arglist (exp,
3033 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3034 return NULL_RTX;
3035 else
3037 tree dest = CALL_EXPR_ARG (exp, 0);
3038 tree src = CALL_EXPR_ARG (exp, 1);
3039 tree len = CALL_EXPR_ARG (exp, 2);
3040 return expand_builtin_mempcpy_args (dest, src, len,
3041 target, mode, /*endp=*/ 1,
3042 exp);
3046 /* Expand an instrumented call EXP to the mempcpy builtin.
3047 Return NULL_RTX if we failed, the caller should emit a normal call,
3048 otherwise try to get the result in TARGET, if convenient (and in
3049 mode MODE if that's convenient). */
3051 static rtx
3052 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3054 if (!validate_arglist (exp,
3055 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3056 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3057 INTEGER_TYPE, VOID_TYPE))
3058 return NULL_RTX;
3059 else
3061 tree dest = CALL_EXPR_ARG (exp, 0);
3062 tree src = CALL_EXPR_ARG (exp, 2);
3063 tree len = CALL_EXPR_ARG (exp, 4);
3064 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3065 mode, 1, exp);
3067 /* Return src bounds with the result. */
3068 if (res)
3070 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3071 expand_normal (CALL_EXPR_ARG (exp, 1)));
3072 res = chkp_join_splitted_slot (res, bnd);
3074 return res;
3078 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3079 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3080 so that this can also be called without constructing an actual CALL_EXPR.
3081 The other arguments and return value are the same as for
3082 expand_builtin_mempcpy. */
3084 static rtx
3085 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3086 rtx target, machine_mode mode, int endp,
3087 tree orig_exp)
3089 tree fndecl = get_callee_fndecl (orig_exp);
3091 /* If return value is ignored, transform mempcpy into memcpy. */
3092 if (target == const0_rtx
3093 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3094 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3096 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3097 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3098 dest, src, len);
3099 return expand_expr (result, target, mode, EXPAND_NORMAL);
3101 else if (target == const0_rtx
3102 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3104 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3105 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3106 dest, src, len);
3107 return expand_expr (result, target, mode, EXPAND_NORMAL);
3109 else
3111 const char *src_str;
3112 unsigned int src_align = get_pointer_alignment (src);
3113 unsigned int dest_align = get_pointer_alignment (dest);
3114 rtx dest_mem, src_mem, len_rtx;
3116 /* If either SRC or DEST is not a pointer type, don't do this
3117 operation in-line. */
3118 if (dest_align == 0 || src_align == 0)
3119 return NULL_RTX;
3121 /* If LEN is not constant, call the normal function. */
3122 if (! tree_fits_uhwi_p (len))
3123 return NULL_RTX;
3125 len_rtx = expand_normal (len);
3126 src_str = c_getstr (src);
3128 /* If SRC is a string constant and block move would be done
3129 by pieces, we can avoid loading the string from memory
3130 and only stored the computed constants. */
3131 if (src_str
3132 && CONST_INT_P (len_rtx)
3133 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3134 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3135 CONST_CAST (char *, src_str),
3136 dest_align, false))
3138 dest_mem = get_memory_rtx (dest, len);
3139 set_mem_align (dest_mem, dest_align);
3140 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3141 builtin_memcpy_read_str,
3142 CONST_CAST (char *, src_str),
3143 dest_align, false, endp);
3144 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3145 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3146 return dest_mem;
3149 if (CONST_INT_P (len_rtx)
3150 && can_move_by_pieces (INTVAL (len_rtx),
3151 MIN (dest_align, src_align)))
3153 dest_mem = get_memory_rtx (dest, len);
3154 set_mem_align (dest_mem, dest_align);
3155 src_mem = get_memory_rtx (src, len);
3156 set_mem_align (src_mem, src_align);
3157 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3158 MIN (dest_align, src_align), endp);
3159 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3160 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3161 return dest_mem;
3164 return NULL_RTX;
3168 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3169 we failed, the caller should emit a normal call, otherwise try to
3170 get the result in TARGET, if convenient. If ENDP is 0 return the
3171 destination pointer, if ENDP is 1 return the end pointer ala
3172 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3173 stpcpy. */
3175 static rtx
3176 expand_movstr (tree dest, tree src, rtx target, int endp)
3178 struct expand_operand ops[3];
3179 rtx dest_mem;
3180 rtx src_mem;
3182 if (!targetm.have_movstr ())
3183 return NULL_RTX;
3185 dest_mem = get_memory_rtx (dest, NULL);
3186 src_mem = get_memory_rtx (src, NULL);
3187 if (!endp)
3189 target = force_reg (Pmode, XEXP (dest_mem, 0));
3190 dest_mem = replace_equiv_address (dest_mem, target);
3193 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3194 create_fixed_operand (&ops[1], dest_mem);
3195 create_fixed_operand (&ops[2], src_mem);
3196 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3197 return NULL_RTX;
3199 if (endp && target != const0_rtx)
3201 target = ops[0].value;
3202 /* movstr is supposed to set end to the address of the NUL
3203 terminator. If the caller requested a mempcpy-like return value,
3204 adjust it. */
3205 if (endp == 1)
3207 rtx tem = plus_constant (GET_MODE (target),
3208 gen_lowpart (GET_MODE (target), target), 1);
3209 emit_move_insn (target, force_operand (tem, NULL_RTX));
3212 return target;
3215 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3216 NULL_RTX if we failed the caller should emit a normal call, otherwise
3217 try to get the result in TARGET, if convenient (and in mode MODE if that's
3218 convenient). */
3220 static rtx
3221 expand_builtin_strcpy (tree exp, rtx target)
3223 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3225 tree dest = CALL_EXPR_ARG (exp, 0);
3226 tree src = CALL_EXPR_ARG (exp, 1);
3227 return expand_builtin_strcpy_args (dest, src, target);
3229 return NULL_RTX;
3232 /* Helper function to do the actual work for expand_builtin_strcpy. The
3233 arguments to the builtin_strcpy call DEST and SRC are broken out
3234 so that this can also be called without constructing an actual CALL_EXPR.
3235 The other arguments and return value are the same as for
3236 expand_builtin_strcpy. */
3238 static rtx
3239 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3241 return expand_movstr (dest, src, target, /*endp=*/0);
3244 /* Expand a call EXP to the stpcpy builtin.
3245 Return NULL_RTX if we failed the caller should emit a normal call,
3246 otherwise try to get the result in TARGET, if convenient (and in
3247 mode MODE if that's convenient). */
3249 static rtx
3250 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3252 tree dst, src;
3253 location_t loc = EXPR_LOCATION (exp);
3255 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3256 return NULL_RTX;
3258 dst = CALL_EXPR_ARG (exp, 0);
3259 src = CALL_EXPR_ARG (exp, 1);
3261 /* If return value is ignored, transform stpcpy into strcpy. */
3262 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3264 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3265 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3266 return expand_expr (result, target, mode, EXPAND_NORMAL);
3268 else
3270 tree len, lenp1;
3271 rtx ret;
3273 /* Ensure we get an actual string whose length can be evaluated at
3274 compile-time, not an expression containing a string. This is
3275 because the latter will potentially produce pessimized code
3276 when used to produce the return value. */
3277 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3278 return expand_movstr (dst, src, target, /*endp=*/2);
3280 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3281 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3282 target, mode, /*endp=*/2,
3283 exp);
3285 if (ret)
3286 return ret;
3288 if (TREE_CODE (len) == INTEGER_CST)
3290 rtx len_rtx = expand_normal (len);
3292 if (CONST_INT_P (len_rtx))
3294 ret = expand_builtin_strcpy_args (dst, src, target);
3296 if (ret)
3298 if (! target)
3300 if (mode != VOIDmode)
3301 target = gen_reg_rtx (mode);
3302 else
3303 target = gen_reg_rtx (GET_MODE (ret));
3305 if (GET_MODE (target) != GET_MODE (ret))
3306 ret = gen_lowpart (GET_MODE (target), ret);
3308 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3309 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3310 gcc_assert (ret);
3312 return target;
3317 return expand_movstr (dst, src, target, /*endp=*/2);
3321 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3322 bytes from constant string DATA + OFFSET and return it as target
3323 constant. */
3326 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3327 machine_mode mode)
3329 const char *str = (const char *) data;
3331 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3332 return const0_rtx;
3334 return c_readstr (str + offset, mode);
3337 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3338 NULL_RTX if we failed the caller should emit a normal call. */
3340 static rtx
3341 expand_builtin_strncpy (tree exp, rtx target)
3343 location_t loc = EXPR_LOCATION (exp);
3345 if (validate_arglist (exp,
3346 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3348 tree dest = CALL_EXPR_ARG (exp, 0);
3349 tree src = CALL_EXPR_ARG (exp, 1);
3350 tree len = CALL_EXPR_ARG (exp, 2);
3351 tree slen = c_strlen (src, 1);
3353 /* We must be passed a constant len and src parameter. */
3354 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3355 return NULL_RTX;
3357 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3359 /* We're required to pad with trailing zeros if the requested
3360 len is greater than strlen(s2)+1. In that case try to
3361 use store_by_pieces, if it fails, punt. */
3362 if (tree_int_cst_lt (slen, len))
3364 unsigned int dest_align = get_pointer_alignment (dest);
3365 const char *p = c_getstr (src);
3366 rtx dest_mem;
3368 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3369 || !can_store_by_pieces (tree_to_uhwi (len),
3370 builtin_strncpy_read_str,
3371 CONST_CAST (char *, p),
3372 dest_align, false))
3373 return NULL_RTX;
3375 dest_mem = get_memory_rtx (dest, len);
3376 store_by_pieces (dest_mem, tree_to_uhwi (len),
3377 builtin_strncpy_read_str,
3378 CONST_CAST (char *, p), dest_align, false, 0);
3379 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3380 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3381 return dest_mem;
3384 return NULL_RTX;
3387 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3388 bytes from constant string DATA + OFFSET and return it as target
3389 constant. */
3392 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3393 machine_mode mode)
3395 const char *c = (const char *) data;
3396 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3398 memset (p, *c, GET_MODE_SIZE (mode));
3400 return c_readstr (p, mode);
3403 /* Callback routine for store_by_pieces. Return the RTL of a register
3404 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3405 char value given in the RTL register data. For example, if mode is
3406 4 bytes wide, return the RTL for 0x01010101*data. */
3408 static rtx
3409 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3410 machine_mode mode)
3412 rtx target, coeff;
3413 size_t size;
3414 char *p;
3416 size = GET_MODE_SIZE (mode);
3417 if (size == 1)
3418 return (rtx) data;
3420 p = XALLOCAVEC (char, size);
3421 memset (p, 1, size);
3422 coeff = c_readstr (p, mode);
3424 target = convert_to_mode (mode, (rtx) data, 1);
3425 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3426 return force_reg (mode, target);
3429 /* Expand expression EXP, which is a call to the memset builtin. Return
3430 NULL_RTX if we failed the caller should emit a normal call, otherwise
3431 try to get the result in TARGET, if convenient (and in mode MODE if that's
3432 convenient). */
3434 static rtx
3435 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3437 if (!validate_arglist (exp,
3438 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3439 return NULL_RTX;
3440 else
3442 tree dest = CALL_EXPR_ARG (exp, 0);
3443 tree val = CALL_EXPR_ARG (exp, 1);
3444 tree len = CALL_EXPR_ARG (exp, 2);
3445 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3449 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3450 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3451 try to get the result in TARGET, if convenient (and in mode MODE if that's
3452 convenient). */
3454 static rtx
3455 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3457 if (!validate_arglist (exp,
3458 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3459 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3460 return NULL_RTX;
3461 else
3463 tree dest = CALL_EXPR_ARG (exp, 0);
3464 tree val = CALL_EXPR_ARG (exp, 2);
3465 tree len = CALL_EXPR_ARG (exp, 3);
3466 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3468 /* Return src bounds with the result. */
3469 if (res)
3471 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3472 expand_normal (CALL_EXPR_ARG (exp, 1)));
3473 res = chkp_join_splitted_slot (res, bnd);
3475 return res;
3479 /* Helper function to do the actual work for expand_builtin_memset. The
3480 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3481 so that this can also be called without constructing an actual CALL_EXPR.
3482 The other arguments and return value are the same as for
3483 expand_builtin_memset. */
3485 static rtx
3486 expand_builtin_memset_args (tree dest, tree val, tree len,
3487 rtx target, machine_mode mode, tree orig_exp)
3489 tree fndecl, fn;
3490 enum built_in_function fcode;
3491 machine_mode val_mode;
3492 char c;
3493 unsigned int dest_align;
3494 rtx dest_mem, dest_addr, len_rtx;
3495 HOST_WIDE_INT expected_size = -1;
3496 unsigned int expected_align = 0;
3497 unsigned HOST_WIDE_INT min_size;
3498 unsigned HOST_WIDE_INT max_size;
3499 unsigned HOST_WIDE_INT probable_max_size;
3501 dest_align = get_pointer_alignment (dest);
3503 /* If DEST is not a pointer type, don't do this operation in-line. */
3504 if (dest_align == 0)
3505 return NULL_RTX;
3507 if (currently_expanding_gimple_stmt)
3508 stringop_block_profile (currently_expanding_gimple_stmt,
3509 &expected_align, &expected_size);
3511 if (expected_align < dest_align)
3512 expected_align = dest_align;
3514 /* If the LEN parameter is zero, return DEST. */
3515 if (integer_zerop (len))
3517 /* Evaluate and ignore VAL in case it has side-effects. */
3518 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3519 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3522 /* Stabilize the arguments in case we fail. */
3523 dest = builtin_save_expr (dest);
3524 val = builtin_save_expr (val);
3525 len = builtin_save_expr (len);
3527 len_rtx = expand_normal (len);
3528 determine_block_size (len, len_rtx, &min_size, &max_size,
3529 &probable_max_size);
3530 dest_mem = get_memory_rtx (dest, len);
3531 val_mode = TYPE_MODE (unsigned_char_type_node);
3533 if (TREE_CODE (val) != INTEGER_CST)
3535 rtx val_rtx;
3537 val_rtx = expand_normal (val);
3538 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3540 /* Assume that we can memset by pieces if we can store
3541 * the coefficients by pieces (in the required modes).
3542 * We can't pass builtin_memset_gen_str as that emits RTL. */
3543 c = 1;
3544 if (tree_fits_uhwi_p (len)
3545 && can_store_by_pieces (tree_to_uhwi (len),
3546 builtin_memset_read_str, &c, dest_align,
3547 true))
3549 val_rtx = force_reg (val_mode, val_rtx);
3550 store_by_pieces (dest_mem, tree_to_uhwi (len),
3551 builtin_memset_gen_str, val_rtx, dest_align,
3552 true, 0);
3554 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3555 dest_align, expected_align,
3556 expected_size, min_size, max_size,
3557 probable_max_size))
3558 goto do_libcall;
3560 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3561 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3562 return dest_mem;
3565 if (target_char_cast (val, &c))
3566 goto do_libcall;
3568 if (c)
3570 if (tree_fits_uhwi_p (len)
3571 && can_store_by_pieces (tree_to_uhwi (len),
3572 builtin_memset_read_str, &c, dest_align,
3573 true))
3574 store_by_pieces (dest_mem, tree_to_uhwi (len),
3575 builtin_memset_read_str, &c, dest_align, true, 0);
3576 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3577 gen_int_mode (c, val_mode),
3578 dest_align, expected_align,
3579 expected_size, min_size, max_size,
3580 probable_max_size))
3581 goto do_libcall;
3583 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3584 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3585 return dest_mem;
3588 set_mem_align (dest_mem, dest_align);
3589 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3590 CALL_EXPR_TAILCALL (orig_exp)
3591 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3592 expected_align, expected_size,
3593 min_size, max_size,
3594 probable_max_size);
3596 if (dest_addr == 0)
3598 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3599 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3602 return dest_addr;
3604 do_libcall:
3605 fndecl = get_callee_fndecl (orig_exp);
3606 fcode = DECL_FUNCTION_CODE (fndecl);
3607 if (fcode == BUILT_IN_MEMSET
3608 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3609 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3610 dest, val, len);
3611 else if (fcode == BUILT_IN_BZERO)
3612 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3613 dest, len);
3614 else
3615 gcc_unreachable ();
3616 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3617 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3618 return expand_call (fn, target, target == const0_rtx);
3621 /* Expand expression EXP, which is a call to the bzero builtin. Return
3622 NULL_RTX if we failed the caller should emit a normal call. */
3624 static rtx
3625 expand_builtin_bzero (tree exp)
3627 tree dest, size;
3628 location_t loc = EXPR_LOCATION (exp);
3630 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3631 return NULL_RTX;
3633 dest = CALL_EXPR_ARG (exp, 0);
3634 size = CALL_EXPR_ARG (exp, 1);
3636 /* New argument list transforming bzero(ptr x, int y) to
3637 memset(ptr x, int 0, size_t y). This is done this way
3638 so that if it isn't expanded inline, we fallback to
3639 calling bzero instead of memset. */
3641 return expand_builtin_memset_args (dest, integer_zero_node,
3642 fold_convert_loc (loc,
3643 size_type_node, size),
3644 const0_rtx, VOIDmode, exp);
3647 /* Try to expand cmpstr operation ICODE with the given operands.
3648 Return the result rtx on success, otherwise return null. */
3650 static rtx
3651 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
3652 HOST_WIDE_INT align)
3654 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3656 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3657 target = NULL_RTX;
3659 struct expand_operand ops[4];
3660 create_output_operand (&ops[0], target, insn_mode);
3661 create_fixed_operand (&ops[1], arg1_rtx);
3662 create_fixed_operand (&ops[2], arg2_rtx);
3663 create_integer_operand (&ops[3], align);
3664 if (maybe_expand_insn (icode, 4, ops))
3665 return ops[0].value;
3666 return NULL_RTX;
3669 /* Expand expression EXP, which is a call to the memcmp built-in function.
3670 Return NULL_RTX if we failed and the caller should emit a normal call,
3671 otherwise try to get the result in TARGET, if convenient.
3672 RESULT_EQ is true if we can relax the returned value to be either zero
3673 or nonzero, without caring about the sign. */
3675 static rtx
3676 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
3678 if (!validate_arglist (exp,
3679 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3680 return NULL_RTX;
3682 tree arg1 = CALL_EXPR_ARG (exp, 0);
3683 tree arg2 = CALL_EXPR_ARG (exp, 1);
3684 tree len = CALL_EXPR_ARG (exp, 2);
3685 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3686 location_t loc = EXPR_LOCATION (exp);
3688 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3689 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3691 /* If we don't have POINTER_TYPE, call the function. */
3692 if (arg1_align == 0 || arg2_align == 0)
3693 return NULL_RTX;
3695 rtx arg1_rtx = get_memory_rtx (arg1, len);
3696 rtx arg2_rtx = get_memory_rtx (arg2, len);
3697 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3699 /* Set MEM_SIZE as appropriate. */
3700 if (CONST_INT_P (len_rtx))
3702 set_mem_size (arg1_rtx, INTVAL (len_rtx));
3703 set_mem_size (arg2_rtx, INTVAL (len_rtx));
3706 by_pieces_constfn constfn = NULL;
3708 const char *src_str = c_getstr (arg2);
3709 if (result_eq && src_str == NULL)
3711 src_str = c_getstr (arg1);
3712 if (src_str != NULL)
3713 std::swap (arg1_rtx, arg2_rtx);
3716 /* If SRC is a string constant and block move would be done
3717 by pieces, we can avoid loading the string from memory
3718 and only stored the computed constants. */
3719 if (src_str
3720 && CONST_INT_P (len_rtx)
3721 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
3722 constfn = builtin_memcpy_read_str;
3724 rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
3725 TREE_TYPE (len), target,
3726 result_eq, constfn,
3727 CONST_CAST (char *, src_str));
3729 if (result)
3731 /* Return the value in the proper mode for this function. */
3732 if (GET_MODE (result) == mode)
3733 return result;
3735 if (target != 0)
3737 convert_move (target, result, 0);
3738 return target;
3741 return convert_to_mode (mode, result, 0);
3744 return NULL_RTX;
3747 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3748 if we failed the caller should emit a normal call, otherwise try to get
3749 the result in TARGET, if convenient. */
3751 static rtx
3752 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3754 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3755 return NULL_RTX;
3757 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
3758 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
3759 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
3761 rtx arg1_rtx, arg2_rtx;
3762 tree fndecl, fn;
3763 tree arg1 = CALL_EXPR_ARG (exp, 0);
3764 tree arg2 = CALL_EXPR_ARG (exp, 1);
3765 rtx result = NULL_RTX;
3767 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3768 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3770 /* If we don't have POINTER_TYPE, call the function. */
3771 if (arg1_align == 0 || arg2_align == 0)
3772 return NULL_RTX;
3774 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3775 arg1 = builtin_save_expr (arg1);
3776 arg2 = builtin_save_expr (arg2);
3778 arg1_rtx = get_memory_rtx (arg1, NULL);
3779 arg2_rtx = get_memory_rtx (arg2, NULL);
3781 /* Try to call cmpstrsi. */
3782 if (cmpstr_icode != CODE_FOR_nothing)
3783 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
3784 MIN (arg1_align, arg2_align));
3786 /* Try to determine at least one length and call cmpstrnsi. */
3787 if (!result && cmpstrn_icode != CODE_FOR_nothing)
3789 tree len;
3790 rtx arg3_rtx;
3792 tree len1 = c_strlen (arg1, 1);
3793 tree len2 = c_strlen (arg2, 1);
3795 if (len1)
3796 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3797 if (len2)
3798 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3800 /* If we don't have a constant length for the first, use the length
3801 of the second, if we know it. We don't require a constant for
3802 this case; some cost analysis could be done if both are available
3803 but neither is constant. For now, assume they're equally cheap,
3804 unless one has side effects. If both strings have constant lengths,
3805 use the smaller. */
3807 if (!len1)
3808 len = len2;
3809 else if (!len2)
3810 len = len1;
3811 else if (TREE_SIDE_EFFECTS (len1))
3812 len = len2;
3813 else if (TREE_SIDE_EFFECTS (len2))
3814 len = len1;
3815 else if (TREE_CODE (len1) != INTEGER_CST)
3816 len = len2;
3817 else if (TREE_CODE (len2) != INTEGER_CST)
3818 len = len1;
3819 else if (tree_int_cst_lt (len1, len2))
3820 len = len1;
3821 else
3822 len = len2;
3824 /* If both arguments have side effects, we cannot optimize. */
3825 if (len && !TREE_SIDE_EFFECTS (len))
3827 arg3_rtx = expand_normal (len);
3828 result = expand_cmpstrn_or_cmpmem
3829 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
3830 arg3_rtx, MIN (arg1_align, arg2_align));
3834 if (result)
3836 /* Return the value in the proper mode for this function. */
3837 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3838 if (GET_MODE (result) == mode)
3839 return result;
3840 if (target == 0)
3841 return convert_to_mode (mode, result, 0);
3842 convert_move (target, result, 0);
3843 return target;
3846 /* Expand the library call ourselves using a stabilized argument
3847 list to avoid re-evaluating the function's arguments twice. */
3848 fndecl = get_callee_fndecl (exp);
3849 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3850 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3851 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3852 return expand_call (fn, target, target == const0_rtx);
3854 return NULL_RTX;
3857 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3858 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3859 the result in TARGET, if convenient. */
3861 static rtx
3862 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3863 ATTRIBUTE_UNUSED machine_mode mode)
3865 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3867 if (!validate_arglist (exp,
3868 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3869 return NULL_RTX;
3871 /* If c_strlen can determine an expression for one of the string
3872 lengths, and it doesn't have side effects, then emit cmpstrnsi
3873 using length MIN(strlen(string)+1, arg3). */
3874 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
3875 if (cmpstrn_icode != CODE_FOR_nothing)
3877 tree len, len1, len2;
3878 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3879 rtx result;
3880 tree fndecl, fn;
3881 tree arg1 = CALL_EXPR_ARG (exp, 0);
3882 tree arg2 = CALL_EXPR_ARG (exp, 1);
3883 tree arg3 = CALL_EXPR_ARG (exp, 2);
3885 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3886 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3888 len1 = c_strlen (arg1, 1);
3889 len2 = c_strlen (arg2, 1);
3891 if (len1)
3892 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
3893 if (len2)
3894 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
3896 /* If we don't have a constant length for the first, use the length
3897 of the second, if we know it. We don't require a constant for
3898 this case; some cost analysis could be done if both are available
3899 but neither is constant. For now, assume they're equally cheap,
3900 unless one has side effects. If both strings have constant lengths,
3901 use the smaller. */
3903 if (!len1)
3904 len = len2;
3905 else if (!len2)
3906 len = len1;
3907 else if (TREE_SIDE_EFFECTS (len1))
3908 len = len2;
3909 else if (TREE_SIDE_EFFECTS (len2))
3910 len = len1;
3911 else if (TREE_CODE (len1) != INTEGER_CST)
3912 len = len2;
3913 else if (TREE_CODE (len2) != INTEGER_CST)
3914 len = len1;
3915 else if (tree_int_cst_lt (len1, len2))
3916 len = len1;
3917 else
3918 len = len2;
3920 /* If both arguments have side effects, we cannot optimize. */
3921 if (!len || TREE_SIDE_EFFECTS (len))
3922 return NULL_RTX;
3924 /* The actual new length parameter is MIN(len,arg3). */
3925 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
3926 fold_convert_loc (loc, TREE_TYPE (len), arg3));
3928 /* If we don't have POINTER_TYPE, call the function. */
3929 if (arg1_align == 0 || arg2_align == 0)
3930 return NULL_RTX;
3932 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
3933 arg1 = builtin_save_expr (arg1);
3934 arg2 = builtin_save_expr (arg2);
3935 len = builtin_save_expr (len);
3937 arg1_rtx = get_memory_rtx (arg1, len);
3938 arg2_rtx = get_memory_rtx (arg2, len);
3939 arg3_rtx = expand_normal (len);
3940 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
3941 arg2_rtx, TREE_TYPE (len), arg3_rtx,
3942 MIN (arg1_align, arg2_align));
3943 if (result)
3945 /* Return the value in the proper mode for this function. */
3946 mode = TYPE_MODE (TREE_TYPE (exp));
3947 if (GET_MODE (result) == mode)
3948 return result;
3949 if (target == 0)
3950 return convert_to_mode (mode, result, 0);
3951 convert_move (target, result, 0);
3952 return target;
3955 /* Expand the library call ourselves using a stabilized argument
3956 list to avoid re-evaluating the function's arguments twice. */
3957 fndecl = get_callee_fndecl (exp);
3958 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
3959 arg1, arg2, len);
3960 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3961 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3962 return expand_call (fn, target, target == const0_rtx);
3964 return NULL_RTX;
3967 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
3968 if that's convenient. */
3971 expand_builtin_saveregs (void)
3973 rtx val;
3974 rtx_insn *seq;
3976 /* Don't do __builtin_saveregs more than once in a function.
3977 Save the result of the first call and reuse it. */
3978 if (saveregs_value != 0)
3979 return saveregs_value;
3981 /* When this function is called, it means that registers must be
3982 saved on entry to this function. So we migrate the call to the
3983 first insn of this function. */
3985 start_sequence ();
3987 /* Do whatever the machine needs done in this case. */
3988 val = targetm.calls.expand_builtin_saveregs ();
3990 seq = get_insns ();
3991 end_sequence ();
3993 saveregs_value = val;
3995 /* Put the insns after the NOTE that starts the function. If this
3996 is inside a start_sequence, make the outer-level insn chain current, so
3997 the code is placed at the start of the function. */
3998 push_topmost_sequence ();
3999 emit_insn_after (seq, entry_of_function ());
4000 pop_topmost_sequence ();
4002 return val;
4005 /* Expand a call to __builtin_next_arg. */
4007 static rtx
4008 expand_builtin_next_arg (void)
4010 /* Checking arguments is already done in fold_builtin_next_arg
4011 that must be called before this function. */
4012 return expand_binop (ptr_mode, add_optab,
4013 crtl->args.internal_arg_pointer,
4014 crtl->args.arg_offset_rtx,
4015 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4018 /* Make it easier for the backends by protecting the valist argument
4019 from multiple evaluations. */
4021 static tree
4022 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4024 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4026 /* The current way of determining the type of valist is completely
4027 bogus. We should have the information on the va builtin instead. */
4028 if (!vatype)
4029 vatype = targetm.fn_abi_va_list (cfun->decl);
4031 if (TREE_CODE (vatype) == ARRAY_TYPE)
4033 if (TREE_SIDE_EFFECTS (valist))
4034 valist = save_expr (valist);
4036 /* For this case, the backends will be expecting a pointer to
4037 vatype, but it's possible we've actually been given an array
4038 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4039 So fix it. */
4040 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4042 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4043 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4046 else
4048 tree pt = build_pointer_type (vatype);
4050 if (! needs_lvalue)
4052 if (! TREE_SIDE_EFFECTS (valist))
4053 return valist;
4055 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4056 TREE_SIDE_EFFECTS (valist) = 1;
4059 if (TREE_SIDE_EFFECTS (valist))
4060 valist = save_expr (valist);
4061 valist = fold_build2_loc (loc, MEM_REF,
4062 vatype, valist, build_int_cst (pt, 0));
4065 return valist;
4068 /* The "standard" definition of va_list is void*. */
4070 tree
4071 std_build_builtin_va_list (void)
4073 return ptr_type_node;
4076 /* The "standard" abi va_list is va_list_type_node. */
4078 tree
4079 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4081 return va_list_type_node;
4084 /* The "standard" type of va_list is va_list_type_node. */
4086 tree
4087 std_canonical_va_list_type (tree type)
4089 tree wtype, htype;
4091 wtype = va_list_type_node;
4092 htype = type;
4093 /* Treat structure va_list types. */
4094 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4095 htype = TREE_TYPE (htype);
4096 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4098 /* If va_list is an array type, the argument may have decayed
4099 to a pointer type, e.g. by being passed to another function.
4100 In that case, unwrap both types so that we can compare the
4101 underlying records. */
4102 if (TREE_CODE (htype) == ARRAY_TYPE
4103 || POINTER_TYPE_P (htype))
4105 wtype = TREE_TYPE (wtype);
4106 htype = TREE_TYPE (htype);
4109 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4110 return va_list_type_node;
4112 return NULL_TREE;
4115 /* The "standard" implementation of va_start: just assign `nextarg' to
4116 the variable. */
4118 void
4119 std_expand_builtin_va_start (tree valist, rtx nextarg)
4121 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4122 convert_move (va_r, nextarg, 0);
4124 /* We do not have any valid bounds for the pointer, so
4125 just store zero bounds for it. */
4126 if (chkp_function_instrumented_p (current_function_decl))
4127 chkp_expand_bounds_reset_for_mem (valist,
4128 make_tree (TREE_TYPE (valist),
4129 nextarg));
4132 /* Expand EXP, a call to __builtin_va_start. */
4134 static rtx
4135 expand_builtin_va_start (tree exp)
4137 rtx nextarg;
4138 tree valist;
4139 location_t loc = EXPR_LOCATION (exp);
4141 if (call_expr_nargs (exp) < 2)
4143 error_at (loc, "too few arguments to function %<va_start%>");
4144 return const0_rtx;
4147 if (fold_builtin_next_arg (exp, true))
4148 return const0_rtx;
4150 nextarg = expand_builtin_next_arg ();
4151 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4153 if (targetm.expand_builtin_va_start)
4154 targetm.expand_builtin_va_start (valist, nextarg);
4155 else
4156 std_expand_builtin_va_start (valist, nextarg);
4158 return const0_rtx;
4161 /* Expand EXP, a call to __builtin_va_end. */
4163 static rtx
4164 expand_builtin_va_end (tree exp)
4166 tree valist = CALL_EXPR_ARG (exp, 0);
4168 /* Evaluate for side effects, if needed. I hate macros that don't
4169 do that. */
4170 if (TREE_SIDE_EFFECTS (valist))
4171 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4173 return const0_rtx;
4176 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4177 builtin rather than just as an assignment in stdarg.h because of the
4178 nastiness of array-type va_list types. */
4180 static rtx
4181 expand_builtin_va_copy (tree exp)
4183 tree dst, src, t;
4184 location_t loc = EXPR_LOCATION (exp);
4186 dst = CALL_EXPR_ARG (exp, 0);
4187 src = CALL_EXPR_ARG (exp, 1);
4189 dst = stabilize_va_list_loc (loc, dst, 1);
4190 src = stabilize_va_list_loc (loc, src, 0);
4192 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4194 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4196 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4197 TREE_SIDE_EFFECTS (t) = 1;
4198 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4200 else
4202 rtx dstb, srcb, size;
4204 /* Evaluate to pointers. */
4205 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4206 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4207 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4208 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4210 dstb = convert_memory_address (Pmode, dstb);
4211 srcb = convert_memory_address (Pmode, srcb);
4213 /* "Dereference" to BLKmode memories. */
4214 dstb = gen_rtx_MEM (BLKmode, dstb);
4215 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4216 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4217 srcb = gen_rtx_MEM (BLKmode, srcb);
4218 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4219 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4221 /* Copy. */
4222 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4225 return const0_rtx;
4228 /* Expand a call to one of the builtin functions __builtin_frame_address or
4229 __builtin_return_address. */
4231 static rtx
4232 expand_builtin_frame_address (tree fndecl, tree exp)
4234 /* The argument must be a nonnegative integer constant.
4235 It counts the number of frames to scan up the stack.
4236 The value is either the frame pointer value or the return
4237 address saved in that frame. */
4238 if (call_expr_nargs (exp) == 0)
4239 /* Warning about missing arg was already issued. */
4240 return const0_rtx;
4241 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4243 error ("invalid argument to %qD", fndecl);
4244 return const0_rtx;
4246 else
4248 /* Number of frames to scan up the stack. */
4249 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4251 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4253 /* Some ports cannot access arbitrary stack frames. */
4254 if (tem == NULL)
4256 warning (0, "unsupported argument to %qD", fndecl);
4257 return const0_rtx;
4260 if (count)
4262 /* Warn since no effort is made to ensure that any frame
4263 beyond the current one exists or can be safely reached. */
4264 warning (OPT_Wframe_address, "calling %qD with "
4265 "a nonzero argument is unsafe", fndecl);
4268 /* For __builtin_frame_address, return what we've got. */
4269 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4270 return tem;
4272 if (!REG_P (tem)
4273 && ! CONSTANT_P (tem))
4274 tem = copy_addr_to_reg (tem);
4275 return tem;
4279 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4280 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4281 is the same as for allocate_dynamic_stack_space. */
4283 static rtx
4284 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4286 rtx op0;
4287 rtx result;
4288 bool valid_arglist;
4289 unsigned int align;
4290 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4291 == BUILT_IN_ALLOCA_WITH_ALIGN);
4293 valid_arglist
4294 = (alloca_with_align
4295 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4296 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4298 if (!valid_arglist)
4299 return NULL_RTX;
4301 /* Compute the argument. */
4302 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4304 /* Compute the alignment. */
4305 align = (alloca_with_align
4306 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4307 : BIGGEST_ALIGNMENT);
4309 /* Allocate the desired space. */
4310 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4311 result = convert_memory_address (ptr_mode, result);
4313 return result;
4316 /* Expand a call to bswap builtin in EXP.
4317 Return NULL_RTX if a normal call should be emitted rather than expanding the
4318 function in-line. If convenient, the result should be placed in TARGET.
4319 SUBTARGET may be used as the target for computing one of EXP's operands. */
4321 static rtx
4322 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4323 rtx subtarget)
4325 tree arg;
4326 rtx op0;
4328 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4329 return NULL_RTX;
4331 arg = CALL_EXPR_ARG (exp, 0);
4332 op0 = expand_expr (arg,
4333 subtarget && GET_MODE (subtarget) == target_mode
4334 ? subtarget : NULL_RTX,
4335 target_mode, EXPAND_NORMAL);
4336 if (GET_MODE (op0) != target_mode)
4337 op0 = convert_to_mode (target_mode, op0, 1);
4339 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4341 gcc_assert (target);
4343 return convert_to_mode (target_mode, target, 1);
4346 /* Expand a call to a unary builtin in EXP.
4347 Return NULL_RTX if a normal call should be emitted rather than expanding the
4348 function in-line. If convenient, the result should be placed in TARGET.
4349 SUBTARGET may be used as the target for computing one of EXP's operands. */
4351 static rtx
4352 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4353 rtx subtarget, optab op_optab)
4355 rtx op0;
4357 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4358 return NULL_RTX;
4360 /* Compute the argument. */
4361 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4362 (subtarget
4363 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4364 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4365 VOIDmode, EXPAND_NORMAL);
4366 /* Compute op, into TARGET if possible.
4367 Set TARGET to wherever the result comes back. */
4368 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4369 op_optab, op0, target, op_optab != clrsb_optab);
4370 gcc_assert (target);
4372 return convert_to_mode (target_mode, target, 0);
4375 /* Expand a call to __builtin_expect. We just return our argument
4376 as the builtin_expect semantic should've been already executed by
4377 tree branch prediction pass. */
4379 static rtx
4380 expand_builtin_expect (tree exp, rtx target)
4382 tree arg;
4384 if (call_expr_nargs (exp) < 2)
4385 return const0_rtx;
4386 arg = CALL_EXPR_ARG (exp, 0);
4388 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4389 /* When guessing was done, the hints should be already stripped away. */
4390 gcc_assert (!flag_guess_branch_prob
4391 || optimize == 0 || seen_error ());
4392 return target;
4395 /* Expand a call to __builtin_assume_aligned. We just return our first
4396 argument as the builtin_assume_aligned semantic should've been already
4397 executed by CCP. */
4399 static rtx
4400 expand_builtin_assume_aligned (tree exp, rtx target)
4402 if (call_expr_nargs (exp) < 2)
4403 return const0_rtx;
4404 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4405 EXPAND_NORMAL);
4406 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4407 && (call_expr_nargs (exp) < 3
4408 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4409 return target;
4412 void
4413 expand_builtin_trap (void)
4415 if (targetm.have_trap ())
4417 rtx_insn *insn = emit_insn (targetm.gen_trap ());
4418 /* For trap insns when not accumulating outgoing args force
4419 REG_ARGS_SIZE note to prevent crossjumping of calls with
4420 different args sizes. */
4421 if (!ACCUMULATE_OUTGOING_ARGS)
4422 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4424 else
4426 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
4427 tree call_expr = build_call_expr (fn, 0);
4428 expand_call (call_expr, NULL_RTX, false);
4431 emit_barrier ();
4434 /* Expand a call to __builtin_unreachable. We do nothing except emit
4435 a barrier saying that control flow will not pass here.
4437 It is the responsibility of the program being compiled to ensure
4438 that control flow does never reach __builtin_unreachable. */
4439 static void
4440 expand_builtin_unreachable (void)
4442 emit_barrier ();
4445 /* Expand EXP, a call to fabs, fabsf or fabsl.
4446 Return NULL_RTX if a normal call should be emitted rather than expanding
4447 the function inline. If convenient, the result should be placed
4448 in TARGET. SUBTARGET may be used as the target for computing
4449 the operand. */
4451 static rtx
4452 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4454 machine_mode mode;
4455 tree arg;
4456 rtx op0;
4458 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4459 return NULL_RTX;
4461 arg = CALL_EXPR_ARG (exp, 0);
4462 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4463 mode = TYPE_MODE (TREE_TYPE (arg));
4464 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4465 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4468 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4469 Return NULL is a normal call should be emitted rather than expanding the
4470 function inline. If convenient, the result should be placed in TARGET.
4471 SUBTARGET may be used as the target for computing the operand. */
4473 static rtx
4474 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4476 rtx op0, op1;
4477 tree arg;
4479 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4480 return NULL_RTX;
4482 arg = CALL_EXPR_ARG (exp, 0);
4483 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4485 arg = CALL_EXPR_ARG (exp, 1);
4486 op1 = expand_normal (arg);
4488 return expand_copysign (op0, op1, target);
4491 /* Expand a call to __builtin___clear_cache. */
4493 static rtx
4494 expand_builtin___clear_cache (tree exp)
4496 if (!targetm.code_for_clear_cache)
4498 #ifdef CLEAR_INSN_CACHE
4499 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4500 does something. Just do the default expansion to a call to
4501 __clear_cache(). */
4502 return NULL_RTX;
4503 #else
4504 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4505 does nothing. There is no need to call it. Do nothing. */
4506 return const0_rtx;
4507 #endif /* CLEAR_INSN_CACHE */
4510 /* We have a "clear_cache" insn, and it will handle everything. */
4511 tree begin, end;
4512 rtx begin_rtx, end_rtx;
4514 /* We must not expand to a library call. If we did, any
4515 fallback library function in libgcc that might contain a call to
4516 __builtin___clear_cache() would recurse infinitely. */
4517 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4519 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4520 return const0_rtx;
4523 if (targetm.have_clear_cache ())
4525 struct expand_operand ops[2];
4527 begin = CALL_EXPR_ARG (exp, 0);
4528 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4530 end = CALL_EXPR_ARG (exp, 1);
4531 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4533 create_address_operand (&ops[0], begin_rtx);
4534 create_address_operand (&ops[1], end_rtx);
4535 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
4536 return const0_rtx;
4538 return const0_rtx;
4541 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4543 static rtx
4544 round_trampoline_addr (rtx tramp)
4546 rtx temp, addend, mask;
4548 /* If we don't need too much alignment, we'll have been guaranteed
4549 proper alignment by get_trampoline_type. */
4550 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4551 return tramp;
4553 /* Round address up to desired boundary. */
4554 temp = gen_reg_rtx (Pmode);
4555 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4556 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4558 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4559 temp, 0, OPTAB_LIB_WIDEN);
4560 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4561 temp, 0, OPTAB_LIB_WIDEN);
4563 return tramp;
4566 static rtx
4567 expand_builtin_init_trampoline (tree exp, bool onstack)
4569 tree t_tramp, t_func, t_chain;
4570 rtx m_tramp, r_tramp, r_chain, tmp;
4572 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4573 POINTER_TYPE, VOID_TYPE))
4574 return NULL_RTX;
4576 t_tramp = CALL_EXPR_ARG (exp, 0);
4577 t_func = CALL_EXPR_ARG (exp, 1);
4578 t_chain = CALL_EXPR_ARG (exp, 2);
4580 r_tramp = expand_normal (t_tramp);
4581 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4582 MEM_NOTRAP_P (m_tramp) = 1;
4584 /* If ONSTACK, the TRAMP argument should be the address of a field
4585 within the local function's FRAME decl. Either way, let's see if
4586 we can fill in the MEM_ATTRs for this memory. */
4587 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4588 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4590 /* Creator of a heap trampoline is responsible for making sure the
4591 address is aligned to at least STACK_BOUNDARY. Normally malloc
4592 will ensure this anyhow. */
4593 tmp = round_trampoline_addr (r_tramp);
4594 if (tmp != r_tramp)
4596 m_tramp = change_address (m_tramp, BLKmode, tmp);
4597 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4598 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4601 /* The FUNC argument should be the address of the nested function.
4602 Extract the actual function decl to pass to the hook. */
4603 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4604 t_func = TREE_OPERAND (t_func, 0);
4605 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4607 r_chain = expand_normal (t_chain);
4609 /* Generate insns to initialize the trampoline. */
4610 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4612 if (onstack)
4614 trampolines_created = 1;
4616 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4617 "trampoline generated for nested function %qD", t_func);
4620 return const0_rtx;
4623 static rtx
4624 expand_builtin_adjust_trampoline (tree exp)
4626 rtx tramp;
4628 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4629 return NULL_RTX;
4631 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4632 tramp = round_trampoline_addr (tramp);
4633 if (targetm.calls.trampoline_adjust_address)
4634 tramp = targetm.calls.trampoline_adjust_address (tramp);
4636 return tramp;
4639 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4640 function. The function first checks whether the back end provides
4641 an insn to implement signbit for the respective mode. If not, it
4642 checks whether the floating point format of the value is such that
4643 the sign bit can be extracted. If that is not the case, error out.
4644 EXP is the expression that is a call to the builtin function; if
4645 convenient, the result should be placed in TARGET. */
4646 static rtx
4647 expand_builtin_signbit (tree exp, rtx target)
4649 const struct real_format *fmt;
4650 machine_mode fmode, imode, rmode;
4651 tree arg;
4652 int word, bitpos;
4653 enum insn_code icode;
4654 rtx temp;
4655 location_t loc = EXPR_LOCATION (exp);
4657 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4658 return NULL_RTX;
4660 arg = CALL_EXPR_ARG (exp, 0);
4661 fmode = TYPE_MODE (TREE_TYPE (arg));
4662 rmode = TYPE_MODE (TREE_TYPE (exp));
4663 fmt = REAL_MODE_FORMAT (fmode);
4665 arg = builtin_save_expr (arg);
4667 /* Expand the argument yielding a RTX expression. */
4668 temp = expand_normal (arg);
4670 /* Check if the back end provides an insn that handles signbit for the
4671 argument's mode. */
4672 icode = optab_handler (signbit_optab, fmode);
4673 if (icode != CODE_FOR_nothing)
4675 rtx_insn *last = get_last_insn ();
4676 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4677 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4678 return target;
4679 delete_insns_since (last);
4682 /* For floating point formats without a sign bit, implement signbit
4683 as "ARG < 0.0". */
4684 bitpos = fmt->signbit_ro;
4685 if (bitpos < 0)
4687 /* But we can't do this if the format supports signed zero. */
4688 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
4690 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4691 build_real (TREE_TYPE (arg), dconst0));
4692 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4695 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4697 imode = int_mode_for_mode (fmode);
4698 gcc_assert (imode != BLKmode);
4699 temp = gen_lowpart (imode, temp);
4701 else
4703 imode = word_mode;
4704 /* Handle targets with different FP word orders. */
4705 if (FLOAT_WORDS_BIG_ENDIAN)
4706 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4707 else
4708 word = bitpos / BITS_PER_WORD;
4709 temp = operand_subword_force (temp, word, fmode);
4710 bitpos = bitpos % BITS_PER_WORD;
4713 /* Force the intermediate word_mode (or narrower) result into a
4714 register. This avoids attempting to create paradoxical SUBREGs
4715 of floating point modes below. */
4716 temp = force_reg (imode, temp);
4718 /* If the bitpos is within the "result mode" lowpart, the operation
4719 can be implement with a single bitwise AND. Otherwise, we need
4720 a right shift and an AND. */
4722 if (bitpos < GET_MODE_BITSIZE (rmode))
4724 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
4726 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4727 temp = gen_lowpart (rmode, temp);
4728 temp = expand_binop (rmode, and_optab, temp,
4729 immed_wide_int_const (mask, rmode),
4730 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4732 else
4734 /* Perform a logical right shift to place the signbit in the least
4735 significant bit, then truncate the result to the desired mode
4736 and mask just this bit. */
4737 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4738 temp = gen_lowpart (rmode, temp);
4739 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4740 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4743 return temp;
4746 /* Expand fork or exec calls. TARGET is the desired target of the
4747 call. EXP is the call. FN is the
4748 identificator of the actual function. IGNORE is nonzero if the
4749 value is to be ignored. */
4751 static rtx
4752 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4754 tree id, decl;
4755 tree call;
4757 /* If we are not profiling, just call the function. */
4758 if (!profile_arc_flag)
4759 return NULL_RTX;
4761 /* Otherwise call the wrapper. This should be equivalent for the rest of
4762 compiler, so the code does not diverge, and the wrapper may run the
4763 code necessary for keeping the profiling sane. */
4765 switch (DECL_FUNCTION_CODE (fn))
4767 case BUILT_IN_FORK:
4768 id = get_identifier ("__gcov_fork");
4769 break;
4771 case BUILT_IN_EXECL:
4772 id = get_identifier ("__gcov_execl");
4773 break;
4775 case BUILT_IN_EXECV:
4776 id = get_identifier ("__gcov_execv");
4777 break;
4779 case BUILT_IN_EXECLP:
4780 id = get_identifier ("__gcov_execlp");
4781 break;
4783 case BUILT_IN_EXECLE:
4784 id = get_identifier ("__gcov_execle");
4785 break;
4787 case BUILT_IN_EXECVP:
4788 id = get_identifier ("__gcov_execvp");
4789 break;
4791 case BUILT_IN_EXECVE:
4792 id = get_identifier ("__gcov_execve");
4793 break;
4795 default:
4796 gcc_unreachable ();
4799 decl = build_decl (DECL_SOURCE_LOCATION (fn),
4800 FUNCTION_DECL, id, TREE_TYPE (fn));
4801 DECL_EXTERNAL (decl) = 1;
4802 TREE_PUBLIC (decl) = 1;
4803 DECL_ARTIFICIAL (decl) = 1;
4804 TREE_NOTHROW (decl) = 1;
4805 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
4806 DECL_VISIBILITY_SPECIFIED (decl) = 1;
4807 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
4808 return expand_call (call, target, ignore);
4813 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
4814 the pointer in these functions is void*, the tree optimizers may remove
4815 casts. The mode computed in expand_builtin isn't reliable either, due
4816 to __sync_bool_compare_and_swap.
4818 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
4819 group of builtins. This gives us log2 of the mode size. */
4821 static inline machine_mode
4822 get_builtin_sync_mode (int fcode_diff)
4824 /* The size is not negotiable, so ask not to get BLKmode in return
4825 if the target indicates that a smaller size would be better. */
4826 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
4829 /* Expand the memory expression LOC and return the appropriate memory operand
4830 for the builtin_sync operations. */
4832 static rtx
4833 get_builtin_sync_mem (tree loc, machine_mode mode)
4835 rtx addr, mem;
4837 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
4838 addr = convert_memory_address (Pmode, addr);
4840 /* Note that we explicitly do not want any alias information for this
4841 memory, so that we kill all other live memories. Otherwise we don't
4842 satisfy the full barrier semantics of the intrinsic. */
4843 mem = validize_mem (gen_rtx_MEM (mode, addr));
4845 /* The alignment needs to be at least according to that of the mode. */
4846 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
4847 get_pointer_alignment (loc)));
4848 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
4849 MEM_VOLATILE_P (mem) = 1;
4851 return mem;
4854 /* Make sure an argument is in the right mode.
4855 EXP is the tree argument.
4856 MODE is the mode it should be in. */
4858 static rtx
4859 expand_expr_force_mode (tree exp, machine_mode mode)
4861 rtx val;
4862 machine_mode old_mode;
4864 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
4865 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
4866 of CONST_INTs, where we know the old_mode only from the call argument. */
4868 old_mode = GET_MODE (val);
4869 if (old_mode == VOIDmode)
4870 old_mode = TYPE_MODE (TREE_TYPE (exp));
4871 val = convert_modes (mode, old_mode, val, 1);
4872 return val;
4876 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
4877 EXP is the CALL_EXPR. CODE is the rtx code
4878 that corresponds to the arithmetic or logical operation from the name;
4879 an exception here is that NOT actually means NAND. TARGET is an optional
4880 place for us to store the results; AFTER is true if this is the
4881 fetch_and_xxx form. */
4883 static rtx
4884 expand_builtin_sync_operation (machine_mode mode, tree exp,
4885 enum rtx_code code, bool after,
4886 rtx target)
4888 rtx val, mem;
4889 location_t loc = EXPR_LOCATION (exp);
4891 if (code == NOT && warn_sync_nand)
4893 tree fndecl = get_callee_fndecl (exp);
4894 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4896 static bool warned_f_a_n, warned_n_a_f;
4898 switch (fcode)
4900 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
4901 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
4902 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
4903 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
4904 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
4905 if (warned_f_a_n)
4906 break;
4908 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
4909 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
4910 warned_f_a_n = true;
4911 break;
4913 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
4914 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
4915 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
4916 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
4917 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
4918 if (warned_n_a_f)
4919 break;
4921 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
4922 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
4923 warned_n_a_f = true;
4924 break;
4926 default:
4927 gcc_unreachable ();
4931 /* Expand the operands. */
4932 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
4933 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
4935 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
4936 after);
4939 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
4940 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
4941 true if this is the boolean form. TARGET is a place for us to store the
4942 results; this is NOT optional if IS_BOOL is true. */
4944 static rtx
4945 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
4946 bool is_bool, rtx target)
4948 rtx old_val, new_val, mem;
4949 rtx *pbool, *poval;
4951 /* Expand the operands. */
4952 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
4953 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
4954 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
4956 pbool = poval = NULL;
4957 if (target != const0_rtx)
4959 if (is_bool)
4960 pbool = &target;
4961 else
4962 poval = &target;
4964 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
4965 false, MEMMODEL_SYNC_SEQ_CST,
4966 MEMMODEL_SYNC_SEQ_CST))
4967 return NULL_RTX;
4969 return target;
4972 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
4973 general form is actually an atomic exchange, and some targets only
4974 support a reduced form with the second argument being a constant 1.
4975 EXP is the CALL_EXPR; TARGET is an optional place for us to store
4976 the results. */
4978 static rtx
4979 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
4980 rtx target)
4982 rtx val, mem;
4984 /* Expand the operands. */
4985 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
4986 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
4988 return expand_sync_lock_test_and_set (target, mem, val);
4991 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
4993 static void
4994 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
4996 rtx mem;
4998 /* Expand the operands. */
4999 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5001 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5004 /* Given an integer representing an ``enum memmodel'', verify its
5005 correctness and return the memory model enum. */
5007 static enum memmodel
5008 get_memmodel (tree exp)
5010 rtx op;
5011 unsigned HOST_WIDE_INT val;
5012 source_location loc
5013 = expansion_point_location_if_in_system_header (input_location);
5015 /* If the parameter is not a constant, it's a run time value so we'll just
5016 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5017 if (TREE_CODE (exp) != INTEGER_CST)
5018 return MEMMODEL_SEQ_CST;
5020 op = expand_normal (exp);
5022 val = INTVAL (op);
5023 if (targetm.memmodel_check)
5024 val = targetm.memmodel_check (val);
5025 else if (val & ~MEMMODEL_MASK)
5027 warning_at (loc, OPT_Winvalid_memory_model,
5028 "unknown architecture specifier in memory model to builtin");
5029 return MEMMODEL_SEQ_CST;
5032 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5033 if (memmodel_base (val) >= MEMMODEL_LAST)
5035 warning_at (loc, OPT_Winvalid_memory_model,
5036 "invalid memory model argument to builtin");
5037 return MEMMODEL_SEQ_CST;
5040 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5041 be conservative and promote consume to acquire. */
5042 if (val == MEMMODEL_CONSUME)
5043 val = MEMMODEL_ACQUIRE;
5045 return (enum memmodel) val;
5048 /* Expand the __atomic_exchange intrinsic:
5049 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5050 EXP is the CALL_EXPR.
5051 TARGET is an optional place for us to store the results. */
5053 static rtx
5054 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5056 rtx val, mem;
5057 enum memmodel model;
5059 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5061 if (!flag_inline_atomics)
5062 return NULL_RTX;
5064 /* Expand the operands. */
5065 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5066 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5068 return expand_atomic_exchange (target, mem, val, model);
5071 /* Expand the __atomic_compare_exchange intrinsic:
5072 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5073 TYPE desired, BOOL weak,
5074 enum memmodel success,
5075 enum memmodel failure)
5076 EXP is the CALL_EXPR.
5077 TARGET is an optional place for us to store the results. */
5079 static rtx
5080 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5081 rtx target)
5083 rtx expect, desired, mem, oldval;
5084 rtx_code_label *label;
5085 enum memmodel success, failure;
5086 tree weak;
5087 bool is_weak;
5088 source_location loc
5089 = expansion_point_location_if_in_system_header (input_location);
5091 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5092 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5094 if (failure > success)
5096 warning_at (loc, OPT_Winvalid_memory_model,
5097 "failure memory model cannot be stronger than success "
5098 "memory model for %<__atomic_compare_exchange%>");
5099 success = MEMMODEL_SEQ_CST;
5102 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5104 warning_at (loc, OPT_Winvalid_memory_model,
5105 "invalid failure memory model for "
5106 "%<__atomic_compare_exchange%>");
5107 failure = MEMMODEL_SEQ_CST;
5108 success = MEMMODEL_SEQ_CST;
5112 if (!flag_inline_atomics)
5113 return NULL_RTX;
5115 /* Expand the operands. */
5116 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5118 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5119 expect = convert_memory_address (Pmode, expect);
5120 expect = gen_rtx_MEM (mode, expect);
5121 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5123 weak = CALL_EXPR_ARG (exp, 3);
5124 is_weak = false;
5125 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5126 is_weak = true;
5128 if (target == const0_rtx)
5129 target = NULL;
5131 /* Lest the rtl backend create a race condition with an imporoper store
5132 to memory, always create a new pseudo for OLDVAL. */
5133 oldval = NULL;
5135 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5136 is_weak, success, failure))
5137 return NULL_RTX;
5139 /* Conditionally store back to EXPECT, lest we create a race condition
5140 with an improper store to memory. */
5141 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5142 the normal case where EXPECT is totally private, i.e. a register. At
5143 which point the store can be unconditional. */
5144 label = gen_label_rtx ();
5145 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5146 GET_MODE (target), 1, label);
5147 emit_move_insn (expect, oldval);
5148 emit_label (label);
5150 return target;
5153 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5154 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5155 call. The weak parameter must be dropped to match the expected parameter
5156 list and the expected argument changed from value to pointer to memory
5157 slot. */
5159 static void
5160 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5162 unsigned int z;
5163 vec<tree, va_gc> *vec;
5165 vec_alloc (vec, 5);
5166 vec->quick_push (gimple_call_arg (call, 0));
5167 tree expected = gimple_call_arg (call, 1);
5168 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5169 TREE_TYPE (expected));
5170 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5171 if (expd != x)
5172 emit_move_insn (x, expd);
5173 tree v = make_tree (TREE_TYPE (expected), x);
5174 vec->quick_push (build1 (ADDR_EXPR,
5175 build_pointer_type (TREE_TYPE (expected)), v));
5176 vec->quick_push (gimple_call_arg (call, 2));
5177 /* Skip the boolean weak parameter. */
5178 for (z = 4; z < 6; z++)
5179 vec->quick_push (gimple_call_arg (call, z));
5180 built_in_function fncode
5181 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5182 + exact_log2 (GET_MODE_SIZE (mode)));
5183 tree fndecl = builtin_decl_explicit (fncode);
5184 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
5185 fndecl);
5186 tree exp = build_call_vec (boolean_type_node, fn, vec);
5187 tree lhs = gimple_call_lhs (call);
5188 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
5189 if (lhs)
5191 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5192 if (GET_MODE (boolret) != mode)
5193 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5194 x = force_reg (mode, x);
5195 write_complex_part (target, boolret, true);
5196 write_complex_part (target, x, false);
5200 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
5202 void
5203 expand_ifn_atomic_compare_exchange (gcall *call)
5205 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
5206 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
5207 machine_mode mode = mode_for_size (BITS_PER_UNIT * size, MODE_INT, 0);
5208 rtx expect, desired, mem, oldval, boolret;
5209 enum memmodel success, failure;
5210 tree lhs;
5211 bool is_weak;
5212 source_location loc
5213 = expansion_point_location_if_in_system_header (gimple_location (call));
5215 success = get_memmodel (gimple_call_arg (call, 4));
5216 failure = get_memmodel (gimple_call_arg (call, 5));
5218 if (failure > success)
5220 warning_at (loc, OPT_Winvalid_memory_model,
5221 "failure memory model cannot be stronger than success "
5222 "memory model for %<__atomic_compare_exchange%>");
5223 success = MEMMODEL_SEQ_CST;
5226 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5228 warning_at (loc, OPT_Winvalid_memory_model,
5229 "invalid failure memory model for "
5230 "%<__atomic_compare_exchange%>");
5231 failure = MEMMODEL_SEQ_CST;
5232 success = MEMMODEL_SEQ_CST;
5235 if (!flag_inline_atomics)
5237 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5238 return;
5241 /* Expand the operands. */
5242 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
5244 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
5245 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
5247 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
5249 boolret = NULL;
5250 oldval = NULL;
5252 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
5253 is_weak, success, failure))
5255 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5256 return;
5259 lhs = gimple_call_lhs (call);
5260 if (lhs)
5262 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5263 if (GET_MODE (boolret) != mode)
5264 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5265 write_complex_part (target, boolret, true);
5266 write_complex_part (target, oldval, false);
5270 /* Expand the __atomic_load intrinsic:
5271 TYPE __atomic_load (TYPE *object, enum memmodel)
5272 EXP is the CALL_EXPR.
5273 TARGET is an optional place for us to store the results. */
5275 static rtx
5276 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5278 rtx mem;
5279 enum memmodel model;
5281 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5282 if (is_mm_release (model) || is_mm_acq_rel (model))
5284 source_location loc
5285 = expansion_point_location_if_in_system_header (input_location);
5286 warning_at (loc, OPT_Winvalid_memory_model,
5287 "invalid memory model for %<__atomic_load%>");
5288 model = MEMMODEL_SEQ_CST;
5291 if (!flag_inline_atomics)
5292 return NULL_RTX;
5294 /* Expand the operand. */
5295 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5297 return expand_atomic_load (target, mem, model);
5301 /* Expand the __atomic_store intrinsic:
5302 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5303 EXP is the CALL_EXPR.
5304 TARGET is an optional place for us to store the results. */
5306 static rtx
5307 expand_builtin_atomic_store (machine_mode mode, tree exp)
5309 rtx mem, val;
5310 enum memmodel model;
5312 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5313 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5314 || is_mm_release (model)))
5316 source_location loc
5317 = expansion_point_location_if_in_system_header (input_location);
5318 warning_at (loc, OPT_Winvalid_memory_model,
5319 "invalid memory model for %<__atomic_store%>");
5320 model = MEMMODEL_SEQ_CST;
5323 if (!flag_inline_atomics)
5324 return NULL_RTX;
5326 /* Expand the operands. */
5327 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5328 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5330 return expand_atomic_store (mem, val, model, false);
5333 /* Expand the __atomic_fetch_XXX intrinsic:
5334 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5335 EXP is the CALL_EXPR.
5336 TARGET is an optional place for us to store the results.
5337 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5338 FETCH_AFTER is true if returning the result of the operation.
5339 FETCH_AFTER is false if returning the value before the operation.
5340 IGNORE is true if the result is not used.
5341 EXT_CALL is the correct builtin for an external call if this cannot be
5342 resolved to an instruction sequence. */
5344 static rtx
5345 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5346 enum rtx_code code, bool fetch_after,
5347 bool ignore, enum built_in_function ext_call)
5349 rtx val, mem, ret;
5350 enum memmodel model;
5351 tree fndecl;
5352 tree addr;
5354 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5356 /* Expand the operands. */
5357 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5358 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5360 /* Only try generating instructions if inlining is turned on. */
5361 if (flag_inline_atomics)
5363 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5364 if (ret)
5365 return ret;
5368 /* Return if a different routine isn't needed for the library call. */
5369 if (ext_call == BUILT_IN_NONE)
5370 return NULL_RTX;
5372 /* Change the call to the specified function. */
5373 fndecl = get_callee_fndecl (exp);
5374 addr = CALL_EXPR_FN (exp);
5375 STRIP_NOPS (addr);
5377 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5378 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5380 /* Expand the call here so we can emit trailing code. */
5381 ret = expand_call (exp, target, ignore);
5383 /* Replace the original function just in case it matters. */
5384 TREE_OPERAND (addr, 0) = fndecl;
5386 /* Then issue the arithmetic correction to return the right result. */
5387 if (!ignore)
5389 if (code == NOT)
5391 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5392 OPTAB_LIB_WIDEN);
5393 ret = expand_simple_unop (mode, NOT, ret, target, true);
5395 else
5396 ret = expand_simple_binop (mode, code, ret, val, target, true,
5397 OPTAB_LIB_WIDEN);
5399 return ret;
5402 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
5404 void
5405 expand_ifn_atomic_bit_test_and (gcall *call)
5407 tree ptr = gimple_call_arg (call, 0);
5408 tree bit = gimple_call_arg (call, 1);
5409 tree flag = gimple_call_arg (call, 2);
5410 tree lhs = gimple_call_lhs (call);
5411 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
5412 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
5413 enum rtx_code code;
5414 optab optab;
5415 struct expand_operand ops[5];
5417 gcc_assert (flag_inline_atomics);
5419 if (gimple_call_num_args (call) == 4)
5420 model = get_memmodel (gimple_call_arg (call, 3));
5422 rtx mem = get_builtin_sync_mem (ptr, mode);
5423 rtx val = expand_expr_force_mode (bit, mode);
5425 switch (gimple_call_internal_fn (call))
5427 case IFN_ATOMIC_BIT_TEST_AND_SET:
5428 code = IOR;
5429 optab = atomic_bit_test_and_set_optab;
5430 break;
5431 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
5432 code = XOR;
5433 optab = atomic_bit_test_and_complement_optab;
5434 break;
5435 case IFN_ATOMIC_BIT_TEST_AND_RESET:
5436 code = AND;
5437 optab = atomic_bit_test_and_reset_optab;
5438 break;
5439 default:
5440 gcc_unreachable ();
5443 if (lhs == NULL_TREE)
5445 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
5446 val, NULL_RTX, true, OPTAB_DIRECT);
5447 if (code == AND)
5448 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
5449 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
5450 return;
5453 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5454 enum insn_code icode = direct_optab_handler (optab, mode);
5455 gcc_assert (icode != CODE_FOR_nothing);
5456 create_output_operand (&ops[0], target, mode);
5457 create_fixed_operand (&ops[1], mem);
5458 create_convert_operand_to (&ops[2], val, mode, true);
5459 create_integer_operand (&ops[3], model);
5460 create_integer_operand (&ops[4], integer_onep (flag));
5461 if (maybe_expand_insn (icode, 5, ops))
5462 return;
5464 rtx bitval = val;
5465 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
5466 val, NULL_RTX, true, OPTAB_DIRECT);
5467 rtx maskval = val;
5468 if (code == AND)
5469 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
5470 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
5471 code, model, false);
5472 if (integer_onep (flag))
5474 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
5475 NULL_RTX, true, OPTAB_DIRECT);
5476 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
5477 true, OPTAB_DIRECT);
5479 else
5480 result = expand_simple_binop (mode, AND, result, maskval, target, true,
5481 OPTAB_DIRECT);
5482 if (result != target)
5483 emit_move_insn (target, result);
5486 /* Expand an atomic clear operation.
5487 void _atomic_clear (BOOL *obj, enum memmodel)
5488 EXP is the call expression. */
5490 static rtx
5491 expand_builtin_atomic_clear (tree exp)
5493 machine_mode mode;
5494 rtx mem, ret;
5495 enum memmodel model;
5497 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5498 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5499 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5501 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
5503 source_location loc
5504 = expansion_point_location_if_in_system_header (input_location);
5505 warning_at (loc, OPT_Winvalid_memory_model,
5506 "invalid memory model for %<__atomic_store%>");
5507 model = MEMMODEL_SEQ_CST;
5510 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5511 Failing that, a store is issued by __atomic_store. The only way this can
5512 fail is if the bool type is larger than a word size. Unlikely, but
5513 handle it anyway for completeness. Assume a single threaded model since
5514 there is no atomic support in this case, and no barriers are required. */
5515 ret = expand_atomic_store (mem, const0_rtx, model, true);
5516 if (!ret)
5517 emit_move_insn (mem, const0_rtx);
5518 return const0_rtx;
5521 /* Expand an atomic test_and_set operation.
5522 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5523 EXP is the call expression. */
5525 static rtx
5526 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5528 rtx mem;
5529 enum memmodel model;
5530 machine_mode mode;
5532 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5533 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5534 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5536 return expand_atomic_test_and_set (target, mem, model);
5540 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5541 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5543 static tree
5544 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5546 int size;
5547 machine_mode mode;
5548 unsigned int mode_align, type_align;
5550 if (TREE_CODE (arg0) != INTEGER_CST)
5551 return NULL_TREE;
5553 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5554 mode = mode_for_size (size, MODE_INT, 0);
5555 mode_align = GET_MODE_ALIGNMENT (mode);
5557 if (TREE_CODE (arg1) == INTEGER_CST)
5559 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
5561 /* Either this argument is null, or it's a fake pointer encoding
5562 the alignment of the object. */
5563 val = least_bit_hwi (val);
5564 val *= BITS_PER_UNIT;
5566 if (val == 0 || mode_align < val)
5567 type_align = mode_align;
5568 else
5569 type_align = val;
5571 else
5573 tree ttype = TREE_TYPE (arg1);
5575 /* This function is usually invoked and folded immediately by the front
5576 end before anything else has a chance to look at it. The pointer
5577 parameter at this point is usually cast to a void *, so check for that
5578 and look past the cast. */
5579 if (CONVERT_EXPR_P (arg1)
5580 && POINTER_TYPE_P (ttype)
5581 && VOID_TYPE_P (TREE_TYPE (ttype))
5582 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
5583 arg1 = TREE_OPERAND (arg1, 0);
5585 ttype = TREE_TYPE (arg1);
5586 gcc_assert (POINTER_TYPE_P (ttype));
5588 /* Get the underlying type of the object. */
5589 ttype = TREE_TYPE (ttype);
5590 type_align = TYPE_ALIGN (ttype);
5593 /* If the object has smaller alignment, the lock free routines cannot
5594 be used. */
5595 if (type_align < mode_align)
5596 return boolean_false_node;
5598 /* Check if a compare_and_swap pattern exists for the mode which represents
5599 the required size. The pattern is not allowed to fail, so the existence
5600 of the pattern indicates support is present. */
5601 if (can_compare_and_swap_p (mode, true))
5602 return boolean_true_node;
5603 else
5604 return boolean_false_node;
5607 /* Return true if the parameters to call EXP represent an object which will
5608 always generate lock free instructions. The first argument represents the
5609 size of the object, and the second parameter is a pointer to the object
5610 itself. If NULL is passed for the object, then the result is based on
5611 typical alignment for an object of the specified size. Otherwise return
5612 false. */
5614 static rtx
5615 expand_builtin_atomic_always_lock_free (tree exp)
5617 tree size;
5618 tree arg0 = CALL_EXPR_ARG (exp, 0);
5619 tree arg1 = CALL_EXPR_ARG (exp, 1);
5621 if (TREE_CODE (arg0) != INTEGER_CST)
5623 error ("non-constant argument 1 to __atomic_always_lock_free");
5624 return const0_rtx;
5627 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5628 if (size == boolean_true_node)
5629 return const1_rtx;
5630 return const0_rtx;
5633 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5634 is lock free on this architecture. */
5636 static tree
5637 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5639 if (!flag_inline_atomics)
5640 return NULL_TREE;
5642 /* If it isn't always lock free, don't generate a result. */
5643 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5644 return boolean_true_node;
5646 return NULL_TREE;
5649 /* Return true if the parameters to call EXP represent an object which will
5650 always generate lock free instructions. The first argument represents the
5651 size of the object, and the second parameter is a pointer to the object
5652 itself. If NULL is passed for the object, then the result is based on
5653 typical alignment for an object of the specified size. Otherwise return
5654 NULL*/
5656 static rtx
5657 expand_builtin_atomic_is_lock_free (tree exp)
5659 tree size;
5660 tree arg0 = CALL_EXPR_ARG (exp, 0);
5661 tree arg1 = CALL_EXPR_ARG (exp, 1);
5663 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5665 error ("non-integer argument 1 to __atomic_is_lock_free");
5666 return NULL_RTX;
5669 if (!flag_inline_atomics)
5670 return NULL_RTX;
5672 /* If the value is known at compile time, return the RTX for it. */
5673 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5674 if (size == boolean_true_node)
5675 return const1_rtx;
5677 return NULL_RTX;
5680 /* Expand the __atomic_thread_fence intrinsic:
5681 void __atomic_thread_fence (enum memmodel)
5682 EXP is the CALL_EXPR. */
5684 static void
5685 expand_builtin_atomic_thread_fence (tree exp)
5687 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5688 expand_mem_thread_fence (model);
5691 /* Expand the __atomic_signal_fence intrinsic:
5692 void __atomic_signal_fence (enum memmodel)
5693 EXP is the CALL_EXPR. */
5695 static void
5696 expand_builtin_atomic_signal_fence (tree exp)
5698 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5699 expand_mem_signal_fence (model);
5702 /* Expand the __sync_synchronize intrinsic. */
5704 static void
5705 expand_builtin_sync_synchronize (void)
5707 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
5710 static rtx
5711 expand_builtin_thread_pointer (tree exp, rtx target)
5713 enum insn_code icode;
5714 if (!validate_arglist (exp, VOID_TYPE))
5715 return const0_rtx;
5716 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5717 if (icode != CODE_FOR_nothing)
5719 struct expand_operand op;
5720 /* If the target is not sutitable then create a new target. */
5721 if (target == NULL_RTX
5722 || !REG_P (target)
5723 || GET_MODE (target) != Pmode)
5724 target = gen_reg_rtx (Pmode);
5725 create_output_operand (&op, target, Pmode);
5726 expand_insn (icode, 1, &op);
5727 return target;
5729 error ("__builtin_thread_pointer is not supported on this target");
5730 return const0_rtx;
5733 static void
5734 expand_builtin_set_thread_pointer (tree exp)
5736 enum insn_code icode;
5737 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5738 return;
5739 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5740 if (icode != CODE_FOR_nothing)
5742 struct expand_operand op;
5743 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5744 Pmode, EXPAND_NORMAL);
5745 create_input_operand (&op, val, Pmode);
5746 expand_insn (icode, 1, &op);
5747 return;
5749 error ("__builtin_set_thread_pointer is not supported on this target");
5753 /* Emit code to restore the current value of stack. */
5755 static void
5756 expand_stack_restore (tree var)
5758 rtx_insn *prev;
5759 rtx sa = expand_normal (var);
5761 sa = convert_memory_address (Pmode, sa);
5763 prev = get_last_insn ();
5764 emit_stack_restore (SAVE_BLOCK, sa);
5766 record_new_stack_level ();
5768 fixup_args_size_notes (prev, get_last_insn (), 0);
5771 /* Emit code to save the current value of stack. */
5773 static rtx
5774 expand_stack_save (void)
5776 rtx ret = NULL_RTX;
5778 emit_stack_save (SAVE_BLOCK, &ret);
5779 return ret;
5783 /* Expand an expression EXP that calls a built-in function,
5784 with result going to TARGET if that's convenient
5785 (and in mode MODE if that's convenient).
5786 SUBTARGET may be used as the target for computing one of EXP's operands.
5787 IGNORE is nonzero if the value is to be ignored. */
5790 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5791 int ignore)
5793 tree fndecl = get_callee_fndecl (exp);
5794 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5795 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5796 int flags;
5798 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5799 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5801 /* When ASan is enabled, we don't want to expand some memory/string
5802 builtins and rely on libsanitizer's hooks. This allows us to avoid
5803 redundant checks and be sure, that possible overflow will be detected
5804 by ASan. */
5806 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5807 return expand_call (exp, target, ignore);
5809 /* When not optimizing, generate calls to library functions for a certain
5810 set of builtins. */
5811 if (!optimize
5812 && !called_as_built_in (fndecl)
5813 && fcode != BUILT_IN_FORK
5814 && fcode != BUILT_IN_EXECL
5815 && fcode != BUILT_IN_EXECV
5816 && fcode != BUILT_IN_EXECLP
5817 && fcode != BUILT_IN_EXECLE
5818 && fcode != BUILT_IN_EXECVP
5819 && fcode != BUILT_IN_EXECVE
5820 && fcode != BUILT_IN_ALLOCA
5821 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5822 && fcode != BUILT_IN_FREE
5823 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5824 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5825 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5826 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5827 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5828 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5829 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5830 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5831 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5832 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5833 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5834 && fcode != BUILT_IN_CHKP_BNDRET)
5835 return expand_call (exp, target, ignore);
5837 /* The built-in function expanders test for target == const0_rtx
5838 to determine whether the function's result will be ignored. */
5839 if (ignore)
5840 target = const0_rtx;
5842 /* If the result of a pure or const built-in function is ignored, and
5843 none of its arguments are volatile, we can avoid expanding the
5844 built-in call and just evaluate the arguments for side-effects. */
5845 if (target == const0_rtx
5846 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5847 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5849 bool volatilep = false;
5850 tree arg;
5851 call_expr_arg_iterator iter;
5853 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5854 if (TREE_THIS_VOLATILE (arg))
5856 volatilep = true;
5857 break;
5860 if (! volatilep)
5862 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5863 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5864 return const0_rtx;
5868 /* expand_builtin_with_bounds is supposed to be used for
5869 instrumented builtin calls. */
5870 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
5872 switch (fcode)
5874 CASE_FLT_FN (BUILT_IN_FABS):
5875 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
5876 case BUILT_IN_FABSD32:
5877 case BUILT_IN_FABSD64:
5878 case BUILT_IN_FABSD128:
5879 target = expand_builtin_fabs (exp, target, subtarget);
5880 if (target)
5881 return target;
5882 break;
5884 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5885 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
5886 target = expand_builtin_copysign (exp, target, subtarget);
5887 if (target)
5888 return target;
5889 break;
5891 /* Just do a normal library call if we were unable to fold
5892 the values. */
5893 CASE_FLT_FN (BUILT_IN_CABS):
5894 break;
5896 CASE_FLT_FN (BUILT_IN_FMA):
5897 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5898 if (target)
5899 return target;
5900 break;
5902 CASE_FLT_FN (BUILT_IN_ILOGB):
5903 if (! flag_unsafe_math_optimizations)
5904 break;
5905 gcc_fallthrough ();
5906 CASE_FLT_FN (BUILT_IN_ISINF):
5907 CASE_FLT_FN (BUILT_IN_FINITE):
5908 case BUILT_IN_ISFINITE:
5909 case BUILT_IN_ISNORMAL:
5910 target = expand_builtin_interclass_mathfn (exp, target);
5911 if (target)
5912 return target;
5913 break;
5915 CASE_FLT_FN (BUILT_IN_ICEIL):
5916 CASE_FLT_FN (BUILT_IN_LCEIL):
5917 CASE_FLT_FN (BUILT_IN_LLCEIL):
5918 CASE_FLT_FN (BUILT_IN_LFLOOR):
5919 CASE_FLT_FN (BUILT_IN_IFLOOR):
5920 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5921 target = expand_builtin_int_roundingfn (exp, target);
5922 if (target)
5923 return target;
5924 break;
5926 CASE_FLT_FN (BUILT_IN_IRINT):
5927 CASE_FLT_FN (BUILT_IN_LRINT):
5928 CASE_FLT_FN (BUILT_IN_LLRINT):
5929 CASE_FLT_FN (BUILT_IN_IROUND):
5930 CASE_FLT_FN (BUILT_IN_LROUND):
5931 CASE_FLT_FN (BUILT_IN_LLROUND):
5932 target = expand_builtin_int_roundingfn_2 (exp, target);
5933 if (target)
5934 return target;
5935 break;
5937 CASE_FLT_FN (BUILT_IN_POWI):
5938 target = expand_builtin_powi (exp, target);
5939 if (target)
5940 return target;
5941 break;
5943 CASE_FLT_FN (BUILT_IN_CEXPI):
5944 target = expand_builtin_cexpi (exp, target);
5945 gcc_assert (target);
5946 return target;
5948 CASE_FLT_FN (BUILT_IN_SIN):
5949 CASE_FLT_FN (BUILT_IN_COS):
5950 if (! flag_unsafe_math_optimizations)
5951 break;
5952 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5953 if (target)
5954 return target;
5955 break;
5957 CASE_FLT_FN (BUILT_IN_SINCOS):
5958 if (! flag_unsafe_math_optimizations)
5959 break;
5960 target = expand_builtin_sincos (exp);
5961 if (target)
5962 return target;
5963 break;
5965 case BUILT_IN_APPLY_ARGS:
5966 return expand_builtin_apply_args ();
5968 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5969 FUNCTION with a copy of the parameters described by
5970 ARGUMENTS, and ARGSIZE. It returns a block of memory
5971 allocated on the stack into which is stored all the registers
5972 that might possibly be used for returning the result of a
5973 function. ARGUMENTS is the value returned by
5974 __builtin_apply_args. ARGSIZE is the number of bytes of
5975 arguments that must be copied. ??? How should this value be
5976 computed? We'll also need a safe worst case value for varargs
5977 functions. */
5978 case BUILT_IN_APPLY:
5979 if (!validate_arglist (exp, POINTER_TYPE,
5980 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5981 && !validate_arglist (exp, REFERENCE_TYPE,
5982 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5983 return const0_rtx;
5984 else
5986 rtx ops[3];
5988 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5989 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5990 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5992 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5995 /* __builtin_return (RESULT) causes the function to return the
5996 value described by RESULT. RESULT is address of the block of
5997 memory returned by __builtin_apply. */
5998 case BUILT_IN_RETURN:
5999 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6000 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6001 return const0_rtx;
6003 case BUILT_IN_SAVEREGS:
6004 return expand_builtin_saveregs ();
6006 case BUILT_IN_VA_ARG_PACK:
6007 /* All valid uses of __builtin_va_arg_pack () are removed during
6008 inlining. */
6009 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6010 return const0_rtx;
6012 case BUILT_IN_VA_ARG_PACK_LEN:
6013 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6014 inlining. */
6015 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6016 return const0_rtx;
6018 /* Return the address of the first anonymous stack arg. */
6019 case BUILT_IN_NEXT_ARG:
6020 if (fold_builtin_next_arg (exp, false))
6021 return const0_rtx;
6022 return expand_builtin_next_arg ();
6024 case BUILT_IN_CLEAR_CACHE:
6025 target = expand_builtin___clear_cache (exp);
6026 if (target)
6027 return target;
6028 break;
6030 case BUILT_IN_CLASSIFY_TYPE:
6031 return expand_builtin_classify_type (exp);
6033 case BUILT_IN_CONSTANT_P:
6034 return const0_rtx;
6036 case BUILT_IN_FRAME_ADDRESS:
6037 case BUILT_IN_RETURN_ADDRESS:
6038 return expand_builtin_frame_address (fndecl, exp);
6040 /* Returns the address of the area where the structure is returned.
6041 0 otherwise. */
6042 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6043 if (call_expr_nargs (exp) != 0
6044 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6045 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6046 return const0_rtx;
6047 else
6048 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6050 case BUILT_IN_ALLOCA:
6051 case BUILT_IN_ALLOCA_WITH_ALIGN:
6052 /* If the allocation stems from the declaration of a variable-sized
6053 object, it cannot accumulate. */
6054 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6055 if (target)
6056 return target;
6057 break;
6059 case BUILT_IN_STACK_SAVE:
6060 return expand_stack_save ();
6062 case BUILT_IN_STACK_RESTORE:
6063 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6064 return const0_rtx;
6066 case BUILT_IN_BSWAP16:
6067 case BUILT_IN_BSWAP32:
6068 case BUILT_IN_BSWAP64:
6069 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6070 if (target)
6071 return target;
6072 break;
6074 CASE_INT_FN (BUILT_IN_FFS):
6075 target = expand_builtin_unop (target_mode, exp, target,
6076 subtarget, ffs_optab);
6077 if (target)
6078 return target;
6079 break;
6081 CASE_INT_FN (BUILT_IN_CLZ):
6082 target = expand_builtin_unop (target_mode, exp, target,
6083 subtarget, clz_optab);
6084 if (target)
6085 return target;
6086 break;
6088 CASE_INT_FN (BUILT_IN_CTZ):
6089 target = expand_builtin_unop (target_mode, exp, target,
6090 subtarget, ctz_optab);
6091 if (target)
6092 return target;
6093 break;
6095 CASE_INT_FN (BUILT_IN_CLRSB):
6096 target = expand_builtin_unop (target_mode, exp, target,
6097 subtarget, clrsb_optab);
6098 if (target)
6099 return target;
6100 break;
6102 CASE_INT_FN (BUILT_IN_POPCOUNT):
6103 target = expand_builtin_unop (target_mode, exp, target,
6104 subtarget, popcount_optab);
6105 if (target)
6106 return target;
6107 break;
6109 CASE_INT_FN (BUILT_IN_PARITY):
6110 target = expand_builtin_unop (target_mode, exp, target,
6111 subtarget, parity_optab);
6112 if (target)
6113 return target;
6114 break;
6116 case BUILT_IN_STRLEN:
6117 target = expand_builtin_strlen (exp, target, target_mode);
6118 if (target)
6119 return target;
6120 break;
6122 case BUILT_IN_STRCPY:
6123 target = expand_builtin_strcpy (exp, target);
6124 if (target)
6125 return target;
6126 break;
6128 case BUILT_IN_STRNCPY:
6129 target = expand_builtin_strncpy (exp, target);
6130 if (target)
6131 return target;
6132 break;
6134 case BUILT_IN_STPCPY:
6135 target = expand_builtin_stpcpy (exp, target, mode);
6136 if (target)
6137 return target;
6138 break;
6140 case BUILT_IN_MEMCPY:
6141 target = expand_builtin_memcpy (exp, target);
6142 if (target)
6143 return target;
6144 break;
6146 case BUILT_IN_MEMPCPY:
6147 target = expand_builtin_mempcpy (exp, target, mode);
6148 if (target)
6149 return target;
6150 break;
6152 case BUILT_IN_MEMSET:
6153 target = expand_builtin_memset (exp, target, mode);
6154 if (target)
6155 return target;
6156 break;
6158 case BUILT_IN_BZERO:
6159 target = expand_builtin_bzero (exp);
6160 if (target)
6161 return target;
6162 break;
6164 case BUILT_IN_STRCMP:
6165 target = expand_builtin_strcmp (exp, target);
6166 if (target)
6167 return target;
6168 break;
6170 case BUILT_IN_STRNCMP:
6171 target = expand_builtin_strncmp (exp, target, mode);
6172 if (target)
6173 return target;
6174 break;
6176 case BUILT_IN_BCMP:
6177 case BUILT_IN_MEMCMP:
6178 case BUILT_IN_MEMCMP_EQ:
6179 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
6180 if (target)
6181 return target;
6182 if (fcode == BUILT_IN_MEMCMP_EQ)
6184 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
6185 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
6187 break;
6189 case BUILT_IN_SETJMP:
6190 /* This should have been lowered to the builtins below. */
6191 gcc_unreachable ();
6193 case BUILT_IN_SETJMP_SETUP:
6194 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6195 and the receiver label. */
6196 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6198 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6199 VOIDmode, EXPAND_NORMAL);
6200 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6201 rtx_insn *label_r = label_rtx (label);
6203 /* This is copied from the handling of non-local gotos. */
6204 expand_builtin_setjmp_setup (buf_addr, label_r);
6205 nonlocal_goto_handler_labels
6206 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6207 nonlocal_goto_handler_labels);
6208 /* ??? Do not let expand_label treat us as such since we would
6209 not want to be both on the list of non-local labels and on
6210 the list of forced labels. */
6211 FORCED_LABEL (label) = 0;
6212 return const0_rtx;
6214 break;
6216 case BUILT_IN_SETJMP_RECEIVER:
6217 /* __builtin_setjmp_receiver is passed the receiver label. */
6218 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6220 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6221 rtx_insn *label_r = label_rtx (label);
6223 expand_builtin_setjmp_receiver (label_r);
6224 return const0_rtx;
6226 break;
6228 /* __builtin_longjmp is passed a pointer to an array of five words.
6229 It's similar to the C library longjmp function but works with
6230 __builtin_setjmp above. */
6231 case BUILT_IN_LONGJMP:
6232 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6234 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6235 VOIDmode, EXPAND_NORMAL);
6236 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6238 if (value != const1_rtx)
6240 error ("%<__builtin_longjmp%> second argument must be 1");
6241 return const0_rtx;
6244 expand_builtin_longjmp (buf_addr, value);
6245 return const0_rtx;
6247 break;
6249 case BUILT_IN_NONLOCAL_GOTO:
6250 target = expand_builtin_nonlocal_goto (exp);
6251 if (target)
6252 return target;
6253 break;
6255 /* This updates the setjmp buffer that is its argument with the value
6256 of the current stack pointer. */
6257 case BUILT_IN_UPDATE_SETJMP_BUF:
6258 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6260 rtx buf_addr
6261 = expand_normal (CALL_EXPR_ARG (exp, 0));
6263 expand_builtin_update_setjmp_buf (buf_addr);
6264 return const0_rtx;
6266 break;
6268 case BUILT_IN_TRAP:
6269 expand_builtin_trap ();
6270 return const0_rtx;
6272 case BUILT_IN_UNREACHABLE:
6273 expand_builtin_unreachable ();
6274 return const0_rtx;
6276 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6277 case BUILT_IN_SIGNBITD32:
6278 case BUILT_IN_SIGNBITD64:
6279 case BUILT_IN_SIGNBITD128:
6280 target = expand_builtin_signbit (exp, target);
6281 if (target)
6282 return target;
6283 break;
6285 /* Various hooks for the DWARF 2 __throw routine. */
6286 case BUILT_IN_UNWIND_INIT:
6287 expand_builtin_unwind_init ();
6288 return const0_rtx;
6289 case BUILT_IN_DWARF_CFA:
6290 return virtual_cfa_rtx;
6291 #ifdef DWARF2_UNWIND_INFO
6292 case BUILT_IN_DWARF_SP_COLUMN:
6293 return expand_builtin_dwarf_sp_column ();
6294 case BUILT_IN_INIT_DWARF_REG_SIZES:
6295 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6296 return const0_rtx;
6297 #endif
6298 case BUILT_IN_FROB_RETURN_ADDR:
6299 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6300 case BUILT_IN_EXTRACT_RETURN_ADDR:
6301 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6302 case BUILT_IN_EH_RETURN:
6303 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6304 CALL_EXPR_ARG (exp, 1));
6305 return const0_rtx;
6306 case BUILT_IN_EH_RETURN_DATA_REGNO:
6307 return expand_builtin_eh_return_data_regno (exp);
6308 case BUILT_IN_EXTEND_POINTER:
6309 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6310 case BUILT_IN_EH_POINTER:
6311 return expand_builtin_eh_pointer (exp);
6312 case BUILT_IN_EH_FILTER:
6313 return expand_builtin_eh_filter (exp);
6314 case BUILT_IN_EH_COPY_VALUES:
6315 return expand_builtin_eh_copy_values (exp);
6317 case BUILT_IN_VA_START:
6318 return expand_builtin_va_start (exp);
6319 case BUILT_IN_VA_END:
6320 return expand_builtin_va_end (exp);
6321 case BUILT_IN_VA_COPY:
6322 return expand_builtin_va_copy (exp);
6323 case BUILT_IN_EXPECT:
6324 return expand_builtin_expect (exp, target);
6325 case BUILT_IN_ASSUME_ALIGNED:
6326 return expand_builtin_assume_aligned (exp, target);
6327 case BUILT_IN_PREFETCH:
6328 expand_builtin_prefetch (exp);
6329 return const0_rtx;
6331 case BUILT_IN_INIT_TRAMPOLINE:
6332 return expand_builtin_init_trampoline (exp, true);
6333 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6334 return expand_builtin_init_trampoline (exp, false);
6335 case BUILT_IN_ADJUST_TRAMPOLINE:
6336 return expand_builtin_adjust_trampoline (exp);
6338 case BUILT_IN_FORK:
6339 case BUILT_IN_EXECL:
6340 case BUILT_IN_EXECV:
6341 case BUILT_IN_EXECLP:
6342 case BUILT_IN_EXECLE:
6343 case BUILT_IN_EXECVP:
6344 case BUILT_IN_EXECVE:
6345 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6346 if (target)
6347 return target;
6348 break;
6350 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6351 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6352 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6353 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6354 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6355 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6356 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6357 if (target)
6358 return target;
6359 break;
6361 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6362 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6363 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6364 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6365 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6366 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6367 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6368 if (target)
6369 return target;
6370 break;
6372 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6373 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6374 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6375 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6376 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6377 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6378 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6379 if (target)
6380 return target;
6381 break;
6383 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6384 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6385 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6386 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6387 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6388 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6389 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6390 if (target)
6391 return target;
6392 break;
6394 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6395 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6396 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6397 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6398 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6399 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6400 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6401 if (target)
6402 return target;
6403 break;
6405 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6406 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6407 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6408 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6409 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6410 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6411 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6412 if (target)
6413 return target;
6414 break;
6416 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6417 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6418 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6419 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6420 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6421 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6422 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6423 if (target)
6424 return target;
6425 break;
6427 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6428 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6429 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6430 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6431 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6432 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6433 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6434 if (target)
6435 return target;
6436 break;
6438 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6439 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6440 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6441 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6442 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6443 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6444 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6445 if (target)
6446 return target;
6447 break;
6449 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6450 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6451 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6452 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6453 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6454 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6455 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6456 if (target)
6457 return target;
6458 break;
6460 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6461 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6462 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6463 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6464 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6465 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6466 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6467 if (target)
6468 return target;
6469 break;
6471 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6472 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6473 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6474 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6475 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6476 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6477 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6478 if (target)
6479 return target;
6480 break;
6482 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6483 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6484 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6485 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6486 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6487 if (mode == VOIDmode)
6488 mode = TYPE_MODE (boolean_type_node);
6489 if (!target || !register_operand (target, mode))
6490 target = gen_reg_rtx (mode);
6492 mode = get_builtin_sync_mode
6493 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6494 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6495 if (target)
6496 return target;
6497 break;
6499 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6500 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6501 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6502 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6503 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6504 mode = get_builtin_sync_mode
6505 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6506 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6507 if (target)
6508 return target;
6509 break;
6511 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6512 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6513 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6514 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6515 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6516 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6517 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6518 if (target)
6519 return target;
6520 break;
6522 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6523 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6524 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6525 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6526 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6527 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6528 expand_builtin_sync_lock_release (mode, exp);
6529 return const0_rtx;
6531 case BUILT_IN_SYNC_SYNCHRONIZE:
6532 expand_builtin_sync_synchronize ();
6533 return const0_rtx;
6535 case BUILT_IN_ATOMIC_EXCHANGE_1:
6536 case BUILT_IN_ATOMIC_EXCHANGE_2:
6537 case BUILT_IN_ATOMIC_EXCHANGE_4:
6538 case BUILT_IN_ATOMIC_EXCHANGE_8:
6539 case BUILT_IN_ATOMIC_EXCHANGE_16:
6540 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6541 target = expand_builtin_atomic_exchange (mode, exp, target);
6542 if (target)
6543 return target;
6544 break;
6546 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6547 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6548 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6549 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6550 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6552 unsigned int nargs, z;
6553 vec<tree, va_gc> *vec;
6555 mode =
6556 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6557 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6558 if (target)
6559 return target;
6561 /* If this is turned into an external library call, the weak parameter
6562 must be dropped to match the expected parameter list. */
6563 nargs = call_expr_nargs (exp);
6564 vec_alloc (vec, nargs - 1);
6565 for (z = 0; z < 3; z++)
6566 vec->quick_push (CALL_EXPR_ARG (exp, z));
6567 /* Skip the boolean weak parameter. */
6568 for (z = 4; z < 6; z++)
6569 vec->quick_push (CALL_EXPR_ARG (exp, z));
6570 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6571 break;
6574 case BUILT_IN_ATOMIC_LOAD_1:
6575 case BUILT_IN_ATOMIC_LOAD_2:
6576 case BUILT_IN_ATOMIC_LOAD_4:
6577 case BUILT_IN_ATOMIC_LOAD_8:
6578 case BUILT_IN_ATOMIC_LOAD_16:
6579 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6580 target = expand_builtin_atomic_load (mode, exp, target);
6581 if (target)
6582 return target;
6583 break;
6585 case BUILT_IN_ATOMIC_STORE_1:
6586 case BUILT_IN_ATOMIC_STORE_2:
6587 case BUILT_IN_ATOMIC_STORE_4:
6588 case BUILT_IN_ATOMIC_STORE_8:
6589 case BUILT_IN_ATOMIC_STORE_16:
6590 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6591 target = expand_builtin_atomic_store (mode, exp);
6592 if (target)
6593 return const0_rtx;
6594 break;
6596 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6597 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6598 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6599 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6600 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6602 enum built_in_function lib;
6603 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6604 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6605 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6606 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6607 ignore, lib);
6608 if (target)
6609 return target;
6610 break;
6612 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6613 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6614 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6615 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6616 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6618 enum built_in_function lib;
6619 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6620 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6621 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6622 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6623 ignore, lib);
6624 if (target)
6625 return target;
6626 break;
6628 case BUILT_IN_ATOMIC_AND_FETCH_1:
6629 case BUILT_IN_ATOMIC_AND_FETCH_2:
6630 case BUILT_IN_ATOMIC_AND_FETCH_4:
6631 case BUILT_IN_ATOMIC_AND_FETCH_8:
6632 case BUILT_IN_ATOMIC_AND_FETCH_16:
6634 enum built_in_function lib;
6635 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6636 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6637 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6638 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6639 ignore, lib);
6640 if (target)
6641 return target;
6642 break;
6644 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6645 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6646 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6647 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6648 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6650 enum built_in_function lib;
6651 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6652 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6653 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6654 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6655 ignore, lib);
6656 if (target)
6657 return target;
6658 break;
6660 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6661 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6662 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6663 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6664 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6666 enum built_in_function lib;
6667 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6668 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6669 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6670 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6671 ignore, lib);
6672 if (target)
6673 return target;
6674 break;
6676 case BUILT_IN_ATOMIC_OR_FETCH_1:
6677 case BUILT_IN_ATOMIC_OR_FETCH_2:
6678 case BUILT_IN_ATOMIC_OR_FETCH_4:
6679 case BUILT_IN_ATOMIC_OR_FETCH_8:
6680 case BUILT_IN_ATOMIC_OR_FETCH_16:
6682 enum built_in_function lib;
6683 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6684 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6685 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6686 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6687 ignore, lib);
6688 if (target)
6689 return target;
6690 break;
6692 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6693 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6694 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6695 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6696 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6697 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6698 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6699 ignore, BUILT_IN_NONE);
6700 if (target)
6701 return target;
6702 break;
6704 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6705 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6706 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6707 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6708 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6709 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6710 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6711 ignore, BUILT_IN_NONE);
6712 if (target)
6713 return target;
6714 break;
6716 case BUILT_IN_ATOMIC_FETCH_AND_1:
6717 case BUILT_IN_ATOMIC_FETCH_AND_2:
6718 case BUILT_IN_ATOMIC_FETCH_AND_4:
6719 case BUILT_IN_ATOMIC_FETCH_AND_8:
6720 case BUILT_IN_ATOMIC_FETCH_AND_16:
6721 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6722 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6723 ignore, BUILT_IN_NONE);
6724 if (target)
6725 return target;
6726 break;
6728 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6729 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6730 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6731 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6732 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6733 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6734 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6735 ignore, BUILT_IN_NONE);
6736 if (target)
6737 return target;
6738 break;
6740 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6741 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6742 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6743 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6744 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6745 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6746 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6747 ignore, BUILT_IN_NONE);
6748 if (target)
6749 return target;
6750 break;
6752 case BUILT_IN_ATOMIC_FETCH_OR_1:
6753 case BUILT_IN_ATOMIC_FETCH_OR_2:
6754 case BUILT_IN_ATOMIC_FETCH_OR_4:
6755 case BUILT_IN_ATOMIC_FETCH_OR_8:
6756 case BUILT_IN_ATOMIC_FETCH_OR_16:
6757 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6758 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6759 ignore, BUILT_IN_NONE);
6760 if (target)
6761 return target;
6762 break;
6764 case BUILT_IN_ATOMIC_TEST_AND_SET:
6765 return expand_builtin_atomic_test_and_set (exp, target);
6767 case BUILT_IN_ATOMIC_CLEAR:
6768 return expand_builtin_atomic_clear (exp);
6770 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6771 return expand_builtin_atomic_always_lock_free (exp);
6773 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6774 target = expand_builtin_atomic_is_lock_free (exp);
6775 if (target)
6776 return target;
6777 break;
6779 case BUILT_IN_ATOMIC_THREAD_FENCE:
6780 expand_builtin_atomic_thread_fence (exp);
6781 return const0_rtx;
6783 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6784 expand_builtin_atomic_signal_fence (exp);
6785 return const0_rtx;
6787 case BUILT_IN_OBJECT_SIZE:
6788 return expand_builtin_object_size (exp);
6790 case BUILT_IN_MEMCPY_CHK:
6791 case BUILT_IN_MEMPCPY_CHK:
6792 case BUILT_IN_MEMMOVE_CHK:
6793 case BUILT_IN_MEMSET_CHK:
6794 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6795 if (target)
6796 return target;
6797 break;
6799 case BUILT_IN_STRCPY_CHK:
6800 case BUILT_IN_STPCPY_CHK:
6801 case BUILT_IN_STRNCPY_CHK:
6802 case BUILT_IN_STPNCPY_CHK:
6803 case BUILT_IN_STRCAT_CHK:
6804 case BUILT_IN_STRNCAT_CHK:
6805 case BUILT_IN_SNPRINTF_CHK:
6806 case BUILT_IN_VSNPRINTF_CHK:
6807 maybe_emit_chk_warning (exp, fcode);
6808 break;
6810 case BUILT_IN_SPRINTF_CHK:
6811 case BUILT_IN_VSPRINTF_CHK:
6812 maybe_emit_sprintf_chk_warning (exp, fcode);
6813 break;
6815 case BUILT_IN_FREE:
6816 if (warn_free_nonheap_object)
6817 maybe_emit_free_warning (exp);
6818 break;
6820 case BUILT_IN_THREAD_POINTER:
6821 return expand_builtin_thread_pointer (exp, target);
6823 case BUILT_IN_SET_THREAD_POINTER:
6824 expand_builtin_set_thread_pointer (exp);
6825 return const0_rtx;
6827 case BUILT_IN_CILK_DETACH:
6828 expand_builtin_cilk_detach (exp);
6829 return const0_rtx;
6831 case BUILT_IN_CILK_POP_FRAME:
6832 expand_builtin_cilk_pop_frame (exp);
6833 return const0_rtx;
6835 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6836 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6837 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6838 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6839 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6840 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6841 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6842 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6843 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6844 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6845 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6846 /* We allow user CHKP builtins if Pointer Bounds
6847 Checker is off. */
6848 if (!chkp_function_instrumented_p (current_function_decl))
6850 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
6851 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6852 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
6853 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
6854 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
6855 return expand_normal (CALL_EXPR_ARG (exp, 0));
6856 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
6857 return expand_normal (size_zero_node);
6858 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
6859 return expand_normal (size_int (-1));
6860 else
6861 return const0_rtx;
6863 /* FALLTHROUGH */
6865 case BUILT_IN_CHKP_BNDMK:
6866 case BUILT_IN_CHKP_BNDSTX:
6867 case BUILT_IN_CHKP_BNDCL:
6868 case BUILT_IN_CHKP_BNDCU:
6869 case BUILT_IN_CHKP_BNDLDX:
6870 case BUILT_IN_CHKP_BNDRET:
6871 case BUILT_IN_CHKP_INTERSECT:
6872 case BUILT_IN_CHKP_NARROW:
6873 case BUILT_IN_CHKP_EXTRACT_LOWER:
6874 case BUILT_IN_CHKP_EXTRACT_UPPER:
6875 /* Software implementation of Pointer Bounds Checker is NYI.
6876 Target support is required. */
6877 error ("Your target platform does not support -fcheck-pointer-bounds");
6878 break;
6880 case BUILT_IN_ACC_ON_DEVICE:
6881 /* Do library call, if we failed to expand the builtin when
6882 folding. */
6883 break;
6885 default: /* just do library call, if unknown builtin */
6886 break;
6889 /* The switch statement above can drop through to cause the function
6890 to be called normally. */
6891 return expand_call (exp, target, ignore);
6894 /* Similar to expand_builtin but is used for instrumented calls. */
6897 expand_builtin_with_bounds (tree exp, rtx target,
6898 rtx subtarget ATTRIBUTE_UNUSED,
6899 machine_mode mode, int ignore)
6901 tree fndecl = get_callee_fndecl (exp);
6902 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6904 gcc_assert (CALL_WITH_BOUNDS_P (exp));
6906 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6907 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6909 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
6910 && fcode < END_CHKP_BUILTINS);
6912 switch (fcode)
6914 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
6915 target = expand_builtin_memcpy_with_bounds (exp, target);
6916 if (target)
6917 return target;
6918 break;
6920 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
6921 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
6922 if (target)
6923 return target;
6924 break;
6926 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
6927 target = expand_builtin_memset_with_bounds (exp, target, mode);
6928 if (target)
6929 return target;
6930 break;
6932 default:
6933 break;
6936 /* The switch statement above can drop through to cause the function
6937 to be called normally. */
6938 return expand_call (exp, target, ignore);
6941 /* Determine whether a tree node represents a call to a built-in
6942 function. If the tree T is a call to a built-in function with
6943 the right number of arguments of the appropriate types, return
6944 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6945 Otherwise the return value is END_BUILTINS. */
6947 enum built_in_function
6948 builtin_mathfn_code (const_tree t)
6950 const_tree fndecl, arg, parmlist;
6951 const_tree argtype, parmtype;
6952 const_call_expr_arg_iterator iter;
6954 if (TREE_CODE (t) != CALL_EXPR
6955 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6956 return END_BUILTINS;
6958 fndecl = get_callee_fndecl (t);
6959 if (fndecl == NULL_TREE
6960 || TREE_CODE (fndecl) != FUNCTION_DECL
6961 || ! DECL_BUILT_IN (fndecl)
6962 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6963 return END_BUILTINS;
6965 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6966 init_const_call_expr_arg_iterator (t, &iter);
6967 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6969 /* If a function doesn't take a variable number of arguments,
6970 the last element in the list will have type `void'. */
6971 parmtype = TREE_VALUE (parmlist);
6972 if (VOID_TYPE_P (parmtype))
6974 if (more_const_call_expr_args_p (&iter))
6975 return END_BUILTINS;
6976 return DECL_FUNCTION_CODE (fndecl);
6979 if (! more_const_call_expr_args_p (&iter))
6980 return END_BUILTINS;
6982 arg = next_const_call_expr_arg (&iter);
6983 argtype = TREE_TYPE (arg);
6985 if (SCALAR_FLOAT_TYPE_P (parmtype))
6987 if (! SCALAR_FLOAT_TYPE_P (argtype))
6988 return END_BUILTINS;
6990 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6992 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6993 return END_BUILTINS;
6995 else if (POINTER_TYPE_P (parmtype))
6997 if (! POINTER_TYPE_P (argtype))
6998 return END_BUILTINS;
7000 else if (INTEGRAL_TYPE_P (parmtype))
7002 if (! INTEGRAL_TYPE_P (argtype))
7003 return END_BUILTINS;
7005 else
7006 return END_BUILTINS;
7009 /* Variable-length argument list. */
7010 return DECL_FUNCTION_CODE (fndecl);
7013 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7014 evaluate to a constant. */
7016 static tree
7017 fold_builtin_constant_p (tree arg)
7019 /* We return 1 for a numeric type that's known to be a constant
7020 value at compile-time or for an aggregate type that's a
7021 literal constant. */
7022 STRIP_NOPS (arg);
7024 /* If we know this is a constant, emit the constant of one. */
7025 if (CONSTANT_CLASS_P (arg)
7026 || (TREE_CODE (arg) == CONSTRUCTOR
7027 && TREE_CONSTANT (arg)))
7028 return integer_one_node;
7029 if (TREE_CODE (arg) == ADDR_EXPR)
7031 tree op = TREE_OPERAND (arg, 0);
7032 if (TREE_CODE (op) == STRING_CST
7033 || (TREE_CODE (op) == ARRAY_REF
7034 && integer_zerop (TREE_OPERAND (op, 1))
7035 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7036 return integer_one_node;
7039 /* If this expression has side effects, show we don't know it to be a
7040 constant. Likewise if it's a pointer or aggregate type since in
7041 those case we only want literals, since those are only optimized
7042 when generating RTL, not later.
7043 And finally, if we are compiling an initializer, not code, we
7044 need to return a definite result now; there's not going to be any
7045 more optimization done. */
7046 if (TREE_SIDE_EFFECTS (arg)
7047 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7048 || POINTER_TYPE_P (TREE_TYPE (arg))
7049 || cfun == 0
7050 || folding_initializer
7051 || force_folding_builtin_constant_p)
7052 return integer_zero_node;
7054 return NULL_TREE;
7057 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7058 return it as a truthvalue. */
7060 static tree
7061 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7062 tree predictor)
7064 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7066 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7067 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7068 ret_type = TREE_TYPE (TREE_TYPE (fn));
7069 pred_type = TREE_VALUE (arg_types);
7070 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7072 pred = fold_convert_loc (loc, pred_type, pred);
7073 expected = fold_convert_loc (loc, expected_type, expected);
7074 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7075 predictor);
7077 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7078 build_int_cst (ret_type, 0));
7081 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7082 NULL_TREE if no simplification is possible. */
7084 tree
7085 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7087 tree inner, fndecl, inner_arg0;
7088 enum tree_code code;
7090 /* Distribute the expected value over short-circuiting operators.
7091 See through the cast from truthvalue_type_node to long. */
7092 inner_arg0 = arg0;
7093 while (CONVERT_EXPR_P (inner_arg0)
7094 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7095 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7096 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7098 /* If this is a builtin_expect within a builtin_expect keep the
7099 inner one. See through a comparison against a constant. It
7100 might have been added to create a thruthvalue. */
7101 inner = inner_arg0;
7103 if (COMPARISON_CLASS_P (inner)
7104 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7105 inner = TREE_OPERAND (inner, 0);
7107 if (TREE_CODE (inner) == CALL_EXPR
7108 && (fndecl = get_callee_fndecl (inner))
7109 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7110 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7111 return arg0;
7113 inner = inner_arg0;
7114 code = TREE_CODE (inner);
7115 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7117 tree op0 = TREE_OPERAND (inner, 0);
7118 tree op1 = TREE_OPERAND (inner, 1);
7120 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7121 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7122 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7124 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7127 /* If the argument isn't invariant then there's nothing else we can do. */
7128 if (!TREE_CONSTANT (inner_arg0))
7129 return NULL_TREE;
7131 /* If we expect that a comparison against the argument will fold to
7132 a constant return the constant. In practice, this means a true
7133 constant or the address of a non-weak symbol. */
7134 inner = inner_arg0;
7135 STRIP_NOPS (inner);
7136 if (TREE_CODE (inner) == ADDR_EXPR)
7140 inner = TREE_OPERAND (inner, 0);
7142 while (TREE_CODE (inner) == COMPONENT_REF
7143 || TREE_CODE (inner) == ARRAY_REF);
7144 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
7145 return NULL_TREE;
7148 /* Otherwise, ARG0 already has the proper type for the return value. */
7149 return arg0;
7152 /* Fold a call to __builtin_classify_type with argument ARG. */
7154 static tree
7155 fold_builtin_classify_type (tree arg)
7157 if (arg == 0)
7158 return build_int_cst (integer_type_node, no_type_class);
7160 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7163 /* Fold a call to __builtin_strlen with argument ARG. */
7165 static tree
7166 fold_builtin_strlen (location_t loc, tree type, tree arg)
7168 if (!validate_arg (arg, POINTER_TYPE))
7169 return NULL_TREE;
7170 else
7172 tree len = c_strlen (arg, 0);
7174 if (len)
7175 return fold_convert_loc (loc, type, len);
7177 return NULL_TREE;
7181 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7183 static tree
7184 fold_builtin_inf (location_t loc, tree type, int warn)
7186 REAL_VALUE_TYPE real;
7188 /* __builtin_inff is intended to be usable to define INFINITY on all
7189 targets. If an infinity is not available, INFINITY expands "to a
7190 positive constant of type float that overflows at translation
7191 time", footnote "In this case, using INFINITY will violate the
7192 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7193 Thus we pedwarn to ensure this constraint violation is
7194 diagnosed. */
7195 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7196 pedwarn (loc, 0, "target format does not support infinity");
7198 real_inf (&real);
7199 return build_real (type, real);
7202 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7203 NULL_TREE if no simplification can be made. */
7205 static tree
7206 fold_builtin_sincos (location_t loc,
7207 tree arg0, tree arg1, tree arg2)
7209 tree type;
7210 tree fndecl, call = NULL_TREE;
7212 if (!validate_arg (arg0, REAL_TYPE)
7213 || !validate_arg (arg1, POINTER_TYPE)
7214 || !validate_arg (arg2, POINTER_TYPE))
7215 return NULL_TREE;
7217 type = TREE_TYPE (arg0);
7219 /* Calculate the result when the argument is a constant. */
7220 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
7221 if (fn == END_BUILTINS)
7222 return NULL_TREE;
7224 /* Canonicalize sincos to cexpi. */
7225 if (TREE_CODE (arg0) == REAL_CST)
7227 tree complex_type = build_complex_type (type);
7228 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
7230 if (!call)
7232 if (!targetm.libc_has_function (function_c99_math_complex)
7233 || !builtin_decl_implicit_p (fn))
7234 return NULL_TREE;
7235 fndecl = builtin_decl_explicit (fn);
7236 call = build_call_expr_loc (loc, fndecl, 1, arg0);
7237 call = builtin_save_expr (call);
7240 return build2 (COMPOUND_EXPR, void_type_node,
7241 build2 (MODIFY_EXPR, void_type_node,
7242 build_fold_indirect_ref_loc (loc, arg1),
7243 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
7244 build2 (MODIFY_EXPR, void_type_node,
7245 build_fold_indirect_ref_loc (loc, arg2),
7246 fold_build1_loc (loc, REALPART_EXPR, type, call)));
7249 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
7250 arguments to the call, and TYPE is its return type.
7251 Return NULL_TREE if no simplification can be made. */
7253 static tree
7254 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
7256 if (!validate_arg (arg1, POINTER_TYPE)
7257 || !validate_arg (arg2, INTEGER_TYPE)
7258 || !validate_arg (len, INTEGER_TYPE))
7259 return NULL_TREE;
7260 else
7262 const char *p1;
7264 if (TREE_CODE (arg2) != INTEGER_CST
7265 || !tree_fits_uhwi_p (len))
7266 return NULL_TREE;
7268 p1 = c_getstr (arg1);
7269 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
7271 char c;
7272 const char *r;
7273 tree tem;
7275 if (target_char_cast (arg2, &c))
7276 return NULL_TREE;
7278 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
7280 if (r == NULL)
7281 return build_int_cst (TREE_TYPE (arg1), 0);
7283 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
7284 return fold_convert_loc (loc, type, tem);
7286 return NULL_TREE;
7290 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7291 Return NULL_TREE if no simplification can be made. */
7293 static tree
7294 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
7296 if (!validate_arg (arg1, POINTER_TYPE)
7297 || !validate_arg (arg2, POINTER_TYPE)
7298 || !validate_arg (len, INTEGER_TYPE))
7299 return NULL_TREE;
7301 /* If the LEN parameter is zero, return zero. */
7302 if (integer_zerop (len))
7303 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7304 arg1, arg2);
7306 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7307 if (operand_equal_p (arg1, arg2, 0))
7308 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7310 /* If len parameter is one, return an expression corresponding to
7311 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7312 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7314 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7315 tree cst_uchar_ptr_node
7316 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7318 tree ind1
7319 = fold_convert_loc (loc, integer_type_node,
7320 build1 (INDIRECT_REF, cst_uchar_node,
7321 fold_convert_loc (loc,
7322 cst_uchar_ptr_node,
7323 arg1)));
7324 tree ind2
7325 = fold_convert_loc (loc, integer_type_node,
7326 build1 (INDIRECT_REF, cst_uchar_node,
7327 fold_convert_loc (loc,
7328 cst_uchar_ptr_node,
7329 arg2)));
7330 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7333 return NULL_TREE;
7336 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
7337 Return NULL_TREE if no simplification can be made. */
7339 static tree
7340 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
7342 if (!validate_arg (arg1, POINTER_TYPE)
7343 || !validate_arg (arg2, POINTER_TYPE))
7344 return NULL_TREE;
7346 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7347 if (operand_equal_p (arg1, arg2, 0))
7348 return integer_zero_node;
7350 /* If the second arg is "", return *(const unsigned char*)arg1. */
7351 const char *p2 = c_getstr (arg2);
7352 if (p2 && *p2 == '\0')
7354 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7355 tree cst_uchar_ptr_node
7356 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7358 return fold_convert_loc (loc, integer_type_node,
7359 build1 (INDIRECT_REF, cst_uchar_node,
7360 fold_convert_loc (loc,
7361 cst_uchar_ptr_node,
7362 arg1)));
7365 /* If the first arg is "", return -*(const unsigned char*)arg2. */
7366 const char *p1 = c_getstr (arg1);
7367 if (p1 && *p1 == '\0')
7369 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7370 tree cst_uchar_ptr_node
7371 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7373 tree temp
7374 = fold_convert_loc (loc, integer_type_node,
7375 build1 (INDIRECT_REF, cst_uchar_node,
7376 fold_convert_loc (loc,
7377 cst_uchar_ptr_node,
7378 arg2)));
7379 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
7382 return NULL_TREE;
7385 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
7386 Return NULL_TREE if no simplification can be made. */
7388 static tree
7389 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
7391 if (!validate_arg (arg1, POINTER_TYPE)
7392 || !validate_arg (arg2, POINTER_TYPE)
7393 || !validate_arg (len, INTEGER_TYPE))
7394 return NULL_TREE;
7396 /* If the LEN parameter is zero, return zero. */
7397 if (integer_zerop (len))
7398 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7399 arg1, arg2);
7401 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7402 if (operand_equal_p (arg1, arg2, 0))
7403 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7405 /* If the second arg is "", and the length is greater than zero,
7406 return *(const unsigned char*)arg1. */
7407 const char *p2 = c_getstr (arg2);
7408 if (p2 && *p2 == '\0'
7409 && TREE_CODE (len) == INTEGER_CST
7410 && tree_int_cst_sgn (len) == 1)
7412 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7413 tree cst_uchar_ptr_node
7414 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7416 return fold_convert_loc (loc, integer_type_node,
7417 build1 (INDIRECT_REF, cst_uchar_node,
7418 fold_convert_loc (loc,
7419 cst_uchar_ptr_node,
7420 arg1)));
7423 /* If the first arg is "", and the length is greater than zero,
7424 return -*(const unsigned char*)arg2. */
7425 const char *p1 = c_getstr (arg1);
7426 if (p1 && *p1 == '\0'
7427 && TREE_CODE (len) == INTEGER_CST
7428 && tree_int_cst_sgn (len) == 1)
7430 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7431 tree cst_uchar_ptr_node
7432 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7434 tree temp = fold_convert_loc (loc, integer_type_node,
7435 build1 (INDIRECT_REF, cst_uchar_node,
7436 fold_convert_loc (loc,
7437 cst_uchar_ptr_node,
7438 arg2)));
7439 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
7442 /* If len parameter is one, return an expression corresponding to
7443 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7444 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7446 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7447 tree cst_uchar_ptr_node
7448 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7450 tree ind1 = fold_convert_loc (loc, integer_type_node,
7451 build1 (INDIRECT_REF, cst_uchar_node,
7452 fold_convert_loc (loc,
7453 cst_uchar_ptr_node,
7454 arg1)));
7455 tree ind2 = fold_convert_loc (loc, integer_type_node,
7456 build1 (INDIRECT_REF, cst_uchar_node,
7457 fold_convert_loc (loc,
7458 cst_uchar_ptr_node,
7459 arg2)));
7460 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7463 return NULL_TREE;
7466 /* Fold a call to builtin isascii with argument ARG. */
7468 static tree
7469 fold_builtin_isascii (location_t loc, tree arg)
7471 if (!validate_arg (arg, INTEGER_TYPE))
7472 return NULL_TREE;
7473 else
7475 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
7476 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
7477 build_int_cst (integer_type_node,
7478 ~ (unsigned HOST_WIDE_INT) 0x7f));
7479 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
7480 arg, integer_zero_node);
7484 /* Fold a call to builtin toascii with argument ARG. */
7486 static tree
7487 fold_builtin_toascii (location_t loc, tree arg)
7489 if (!validate_arg (arg, INTEGER_TYPE))
7490 return NULL_TREE;
7492 /* Transform toascii(c) -> (c & 0x7f). */
7493 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
7494 build_int_cst (integer_type_node, 0x7f));
7497 /* Fold a call to builtin isdigit with argument ARG. */
7499 static tree
7500 fold_builtin_isdigit (location_t loc, tree arg)
7502 if (!validate_arg (arg, INTEGER_TYPE))
7503 return NULL_TREE;
7504 else
7506 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
7507 /* According to the C standard, isdigit is unaffected by locale.
7508 However, it definitely is affected by the target character set. */
7509 unsigned HOST_WIDE_INT target_digit0
7510 = lang_hooks.to_target_charset ('0');
7512 if (target_digit0 == 0)
7513 return NULL_TREE;
7515 arg = fold_convert_loc (loc, unsigned_type_node, arg);
7516 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
7517 build_int_cst (unsigned_type_node, target_digit0));
7518 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
7519 build_int_cst (unsigned_type_node, 9));
7523 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
7525 static tree
7526 fold_builtin_fabs (location_t loc, tree arg, tree type)
7528 if (!validate_arg (arg, REAL_TYPE))
7529 return NULL_TREE;
7531 arg = fold_convert_loc (loc, type, arg);
7532 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7535 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
7537 static tree
7538 fold_builtin_abs (location_t loc, tree arg, tree type)
7540 if (!validate_arg (arg, INTEGER_TYPE))
7541 return NULL_TREE;
7543 arg = fold_convert_loc (loc, type, arg);
7544 return fold_build1_loc (loc, ABS_EXPR, type, arg);
7547 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
7549 static tree
7550 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
7552 /* ??? Only expand to FMA_EXPR if it's directly supported. */
7553 if (validate_arg (arg0, REAL_TYPE)
7554 && validate_arg (arg1, REAL_TYPE)
7555 && validate_arg (arg2, REAL_TYPE)
7556 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
7557 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
7559 return NULL_TREE;
7562 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
7564 static tree
7565 fold_builtin_carg (location_t loc, tree arg, tree type)
7567 if (validate_arg (arg, COMPLEX_TYPE)
7568 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7570 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
7572 if (atan2_fn)
7574 tree new_arg = builtin_save_expr (arg);
7575 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
7576 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
7577 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
7581 return NULL_TREE;
7584 /* Fold a call to builtin frexp, we can assume the base is 2. */
7586 static tree
7587 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
7589 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7590 return NULL_TREE;
7592 STRIP_NOPS (arg0);
7594 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
7595 return NULL_TREE;
7597 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7599 /* Proceed if a valid pointer type was passed in. */
7600 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
7602 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
7603 tree frac, exp;
7605 switch (value->cl)
7607 case rvc_zero:
7608 /* For +-0, return (*exp = 0, +-0). */
7609 exp = integer_zero_node;
7610 frac = arg0;
7611 break;
7612 case rvc_nan:
7613 case rvc_inf:
7614 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
7615 return omit_one_operand_loc (loc, rettype, arg0, arg1);
7616 case rvc_normal:
7618 /* Since the frexp function always expects base 2, and in
7619 GCC normalized significands are already in the range
7620 [0.5, 1.0), we have exactly what frexp wants. */
7621 REAL_VALUE_TYPE frac_rvt = *value;
7622 SET_REAL_EXP (&frac_rvt, 0);
7623 frac = build_real (rettype, frac_rvt);
7624 exp = build_int_cst (integer_type_node, REAL_EXP (value));
7626 break;
7627 default:
7628 gcc_unreachable ();
7631 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7632 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
7633 TREE_SIDE_EFFECTS (arg1) = 1;
7634 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
7637 return NULL_TREE;
7640 /* Fold a call to builtin modf. */
7642 static tree
7643 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
7645 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7646 return NULL_TREE;
7648 STRIP_NOPS (arg0);
7650 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
7651 return NULL_TREE;
7653 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7655 /* Proceed if a valid pointer type was passed in. */
7656 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
7658 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
7659 REAL_VALUE_TYPE trunc, frac;
7661 switch (value->cl)
7663 case rvc_nan:
7664 case rvc_zero:
7665 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
7666 trunc = frac = *value;
7667 break;
7668 case rvc_inf:
7669 /* For +-Inf, return (*arg1 = arg0, +-0). */
7670 frac = dconst0;
7671 frac.sign = value->sign;
7672 trunc = *value;
7673 break;
7674 case rvc_normal:
7675 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
7676 real_trunc (&trunc, VOIDmode, value);
7677 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
7678 /* If the original number was negative and already
7679 integral, then the fractional part is -0.0. */
7680 if (value->sign && frac.cl == rvc_zero)
7681 frac.sign = value->sign;
7682 break;
7685 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7686 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
7687 build_real (rettype, trunc));
7688 TREE_SIDE_EFFECTS (arg1) = 1;
7689 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
7690 build_real (rettype, frac));
7693 return NULL_TREE;
7696 /* Given a location LOC, an interclass builtin function decl FNDECL
7697 and its single argument ARG, return an folded expression computing
7698 the same, or NULL_TREE if we either couldn't or didn't want to fold
7699 (the latter happen if there's an RTL instruction available). */
7701 static tree
7702 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
7704 machine_mode mode;
7706 if (!validate_arg (arg, REAL_TYPE))
7707 return NULL_TREE;
7709 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
7710 return NULL_TREE;
7712 mode = TYPE_MODE (TREE_TYPE (arg));
7714 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
7716 /* If there is no optab, try generic code. */
7717 switch (DECL_FUNCTION_CODE (fndecl))
7719 tree result;
7721 CASE_FLT_FN (BUILT_IN_ISINF):
7723 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
7724 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
7725 tree type = TREE_TYPE (arg);
7726 REAL_VALUE_TYPE r;
7727 char buf[128];
7729 if (is_ibm_extended)
7731 /* NaN and Inf are encoded in the high-order double value
7732 only. The low-order value is not significant. */
7733 type = double_type_node;
7734 mode = DFmode;
7735 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7737 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7738 real_from_string (&r, buf);
7739 result = build_call_expr (isgr_fn, 2,
7740 fold_build1_loc (loc, ABS_EXPR, type, arg),
7741 build_real (type, r));
7742 return result;
7744 CASE_FLT_FN (BUILT_IN_FINITE):
7745 case BUILT_IN_ISFINITE:
7747 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
7748 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7749 tree type = TREE_TYPE (arg);
7750 REAL_VALUE_TYPE r;
7751 char buf[128];
7753 if (is_ibm_extended)
7755 /* NaN and Inf are encoded in the high-order double value
7756 only. The low-order value is not significant. */
7757 type = double_type_node;
7758 mode = DFmode;
7759 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7761 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7762 real_from_string (&r, buf);
7763 result = build_call_expr (isle_fn, 2,
7764 fold_build1_loc (loc, ABS_EXPR, type, arg),
7765 build_real (type, r));
7766 /*result = fold_build2_loc (loc, UNGT_EXPR,
7767 TREE_TYPE (TREE_TYPE (fndecl)),
7768 fold_build1_loc (loc, ABS_EXPR, type, arg),
7769 build_real (type, r));
7770 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
7771 TREE_TYPE (TREE_TYPE (fndecl)),
7772 result);*/
7773 return result;
7775 case BUILT_IN_ISNORMAL:
7777 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
7778 islessequal(fabs(x),DBL_MAX). */
7779 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7780 tree type = TREE_TYPE (arg);
7781 tree orig_arg, max_exp, min_exp;
7782 machine_mode orig_mode = mode;
7783 REAL_VALUE_TYPE rmax, rmin;
7784 char buf[128];
7786 orig_arg = arg = builtin_save_expr (arg);
7787 if (is_ibm_extended)
7789 /* Use double to test the normal range of IBM extended
7790 precision. Emin for IBM extended precision is
7791 different to emin for IEEE double, being 53 higher
7792 since the low double exponent is at least 53 lower
7793 than the high double exponent. */
7794 type = double_type_node;
7795 mode = DFmode;
7796 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7798 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
7800 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7801 real_from_string (&rmax, buf);
7802 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
7803 real_from_string (&rmin, buf);
7804 max_exp = build_real (type, rmax);
7805 min_exp = build_real (type, rmin);
7807 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
7808 if (is_ibm_extended)
7810 /* Testing the high end of the range is done just using
7811 the high double, using the same test as isfinite().
7812 For the subnormal end of the range we first test the
7813 high double, then if its magnitude is equal to the
7814 limit of 0x1p-969, we test whether the low double is
7815 non-zero and opposite sign to the high double. */
7816 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
7817 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
7818 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
7819 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
7820 arg, min_exp);
7821 tree as_complex = build1 (VIEW_CONVERT_EXPR,
7822 complex_double_type_node, orig_arg);
7823 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
7824 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
7825 tree zero = build_real (type, dconst0);
7826 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
7827 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
7828 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
7829 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
7830 fold_build3 (COND_EXPR,
7831 integer_type_node,
7832 hilt, logt, lolt));
7833 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
7834 eq_min, ok_lo);
7835 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
7836 gt_min, eq_min);
7838 else
7840 tree const isge_fn
7841 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
7842 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
7844 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
7845 max_exp, min_exp);
7846 return result;
7848 default:
7849 break;
7852 return NULL_TREE;
7855 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
7856 ARG is the argument for the call. */
7858 static tree
7859 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
7861 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7863 if (!validate_arg (arg, REAL_TYPE))
7864 return NULL_TREE;
7866 switch (builtin_index)
7868 case BUILT_IN_ISINF:
7869 if (!HONOR_INFINITIES (arg))
7870 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
7872 return NULL_TREE;
7874 case BUILT_IN_ISINF_SIGN:
7876 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
7877 /* In a boolean context, GCC will fold the inner COND_EXPR to
7878 1. So e.g. "if (isinf_sign(x))" would be folded to just
7879 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
7880 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
7881 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
7882 tree tmp = NULL_TREE;
7884 arg = builtin_save_expr (arg);
7886 if (signbit_fn && isinf_fn)
7888 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
7889 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
7891 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
7892 signbit_call, integer_zero_node);
7893 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
7894 isinf_call, integer_zero_node);
7896 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
7897 integer_minus_one_node, integer_one_node);
7898 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
7899 isinf_call, tmp,
7900 integer_zero_node);
7903 return tmp;
7906 case BUILT_IN_ISFINITE:
7907 if (!HONOR_NANS (arg)
7908 && !HONOR_INFINITIES (arg))
7909 return omit_one_operand_loc (loc, type, integer_one_node, arg);
7911 return NULL_TREE;
7913 case BUILT_IN_ISNAN:
7914 if (!HONOR_NANS (arg))
7915 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
7918 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
7919 if (is_ibm_extended)
7921 /* NaN and Inf are encoded in the high-order double value
7922 only. The low-order value is not significant. */
7923 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
7926 arg = builtin_save_expr (arg);
7927 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
7929 default:
7930 gcc_unreachable ();
7934 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
7935 This builtin will generate code to return the appropriate floating
7936 point classification depending on the value of the floating point
7937 number passed in. The possible return values must be supplied as
7938 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
7939 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
7940 one floating point argument which is "type generic". */
7942 static tree
7943 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
7945 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
7946 arg, type, res, tmp;
7947 machine_mode mode;
7948 REAL_VALUE_TYPE r;
7949 char buf[128];
7951 /* Verify the required arguments in the original call. */
7952 if (nargs != 6
7953 || !validate_arg (args[0], INTEGER_TYPE)
7954 || !validate_arg (args[1], INTEGER_TYPE)
7955 || !validate_arg (args[2], INTEGER_TYPE)
7956 || !validate_arg (args[3], INTEGER_TYPE)
7957 || !validate_arg (args[4], INTEGER_TYPE)
7958 || !validate_arg (args[5], REAL_TYPE))
7959 return NULL_TREE;
7961 fp_nan = args[0];
7962 fp_infinite = args[1];
7963 fp_normal = args[2];
7964 fp_subnormal = args[3];
7965 fp_zero = args[4];
7966 arg = args[5];
7967 type = TREE_TYPE (arg);
7968 mode = TYPE_MODE (type);
7969 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
7971 /* fpclassify(x) ->
7972 isnan(x) ? FP_NAN :
7973 (fabs(x) == Inf ? FP_INFINITE :
7974 (fabs(x) >= DBL_MIN ? FP_NORMAL :
7975 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
7977 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
7978 build_real (type, dconst0));
7979 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
7980 tmp, fp_zero, fp_subnormal);
7982 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
7983 real_from_string (&r, buf);
7984 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
7985 arg, build_real (type, r));
7986 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
7988 if (HONOR_INFINITIES (mode))
7990 real_inf (&r);
7991 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
7992 build_real (type, r));
7993 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
7994 fp_infinite, res);
7997 if (HONOR_NANS (mode))
7999 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8000 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8003 return res;
8006 /* Fold a call to an unordered comparison function such as
8007 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8008 being called and ARG0 and ARG1 are the arguments for the call.
8009 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8010 the opposite of the desired result. UNORDERED_CODE is used
8011 for modes that can hold NaNs and ORDERED_CODE is used for
8012 the rest. */
8014 static tree
8015 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8016 enum tree_code unordered_code,
8017 enum tree_code ordered_code)
8019 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8020 enum tree_code code;
8021 tree type0, type1;
8022 enum tree_code code0, code1;
8023 tree cmp_type = NULL_TREE;
8025 type0 = TREE_TYPE (arg0);
8026 type1 = TREE_TYPE (arg1);
8028 code0 = TREE_CODE (type0);
8029 code1 = TREE_CODE (type1);
8031 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8032 /* Choose the wider of two real types. */
8033 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8034 ? type0 : type1;
8035 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8036 cmp_type = type0;
8037 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8038 cmp_type = type1;
8040 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8041 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8043 if (unordered_code == UNORDERED_EXPR)
8045 if (!HONOR_NANS (arg0))
8046 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8047 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8050 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8051 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8052 fold_build2_loc (loc, code, type, arg0, arg1));
8055 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8056 arithmetics if it can never overflow, or into internal functions that
8057 return both result of arithmetics and overflowed boolean flag in
8058 a complex integer result, or some other check for overflow.
8059 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8060 checking part of that. */
8062 static tree
8063 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8064 tree arg0, tree arg1, tree arg2)
8066 enum internal_fn ifn = IFN_LAST;
8067 /* The code of the expression corresponding to the type-generic
8068 built-in, or ERROR_MARK for the type-specific ones. */
8069 enum tree_code opcode = ERROR_MARK;
8070 bool ovf_only = false;
8072 switch (fcode)
8074 case BUILT_IN_ADD_OVERFLOW_P:
8075 ovf_only = true;
8076 /* FALLTHRU */
8077 case BUILT_IN_ADD_OVERFLOW:
8078 opcode = PLUS_EXPR;
8079 /* FALLTHRU */
8080 case BUILT_IN_SADD_OVERFLOW:
8081 case BUILT_IN_SADDL_OVERFLOW:
8082 case BUILT_IN_SADDLL_OVERFLOW:
8083 case BUILT_IN_UADD_OVERFLOW:
8084 case BUILT_IN_UADDL_OVERFLOW:
8085 case BUILT_IN_UADDLL_OVERFLOW:
8086 ifn = IFN_ADD_OVERFLOW;
8087 break;
8088 case BUILT_IN_SUB_OVERFLOW_P:
8089 ovf_only = true;
8090 /* FALLTHRU */
8091 case BUILT_IN_SUB_OVERFLOW:
8092 opcode = MINUS_EXPR;
8093 /* FALLTHRU */
8094 case BUILT_IN_SSUB_OVERFLOW:
8095 case BUILT_IN_SSUBL_OVERFLOW:
8096 case BUILT_IN_SSUBLL_OVERFLOW:
8097 case BUILT_IN_USUB_OVERFLOW:
8098 case BUILT_IN_USUBL_OVERFLOW:
8099 case BUILT_IN_USUBLL_OVERFLOW:
8100 ifn = IFN_SUB_OVERFLOW;
8101 break;
8102 case BUILT_IN_MUL_OVERFLOW_P:
8103 ovf_only = true;
8104 /* FALLTHRU */
8105 case BUILT_IN_MUL_OVERFLOW:
8106 opcode = MULT_EXPR;
8107 /* FALLTHRU */
8108 case BUILT_IN_SMUL_OVERFLOW:
8109 case BUILT_IN_SMULL_OVERFLOW:
8110 case BUILT_IN_SMULLL_OVERFLOW:
8111 case BUILT_IN_UMUL_OVERFLOW:
8112 case BUILT_IN_UMULL_OVERFLOW:
8113 case BUILT_IN_UMULLL_OVERFLOW:
8114 ifn = IFN_MUL_OVERFLOW;
8115 break;
8116 default:
8117 gcc_unreachable ();
8120 /* For the "generic" overloads, the first two arguments can have different
8121 types and the last argument determines the target type to use to check
8122 for overflow. The arguments of the other overloads all have the same
8123 type. */
8124 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
8126 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8127 arguments are constant, attempt to fold the built-in call into a constant
8128 expression indicating whether or not it detected an overflow. */
8129 if (ovf_only
8130 && TREE_CODE (arg0) == INTEGER_CST
8131 && TREE_CODE (arg1) == INTEGER_CST)
8132 /* Perform the computation in the target type and check for overflow. */
8133 return omit_one_operand_loc (loc, boolean_type_node,
8134 arith_overflowed_p (opcode, type, arg0, arg1)
8135 ? boolean_true_node : boolean_false_node,
8136 arg2);
8138 tree ctype = build_complex_type (type);
8139 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8140 2, arg0, arg1);
8141 tree tgt = save_expr (call);
8142 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8143 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8144 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8146 if (ovf_only)
8147 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
8149 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8150 tree store
8151 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8152 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8155 /* Fold a call to __builtin_FILE to a constant string. */
8157 static inline tree
8158 fold_builtin_FILE (location_t loc)
8160 if (const char *fname = LOCATION_FILE (loc))
8161 return build_string_literal (strlen (fname) + 1, fname);
8163 return build_string_literal (1, "");
8166 /* Fold a call to __builtin_FUNCTION to a constant string. */
8168 static inline tree
8169 fold_builtin_FUNCTION ()
8171 if (current_function_decl)
8173 const char *name = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
8174 return build_string_literal (strlen (name) + 1, name);
8177 return build_string_literal (1, "");
8180 /* Fold a call to __builtin_LINE to an integer constant. */
8182 static inline tree
8183 fold_builtin_LINE (location_t loc, tree type)
8185 return build_int_cst (type, LOCATION_LINE (loc));
8188 /* Fold a call to built-in function FNDECL with 0 arguments.
8189 This function returns NULL_TREE if no simplification was possible. */
8191 static tree
8192 fold_builtin_0 (location_t loc, tree fndecl)
8194 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8195 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8196 switch (fcode)
8198 case BUILT_IN_FILE:
8199 return fold_builtin_FILE (loc);
8201 case BUILT_IN_FUNCTION:
8202 return fold_builtin_FUNCTION ();
8204 case BUILT_IN_LINE:
8205 return fold_builtin_LINE (loc, type);
8207 CASE_FLT_FN (BUILT_IN_INF):
8208 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
8209 case BUILT_IN_INFD32:
8210 case BUILT_IN_INFD64:
8211 case BUILT_IN_INFD128:
8212 return fold_builtin_inf (loc, type, true);
8214 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8215 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
8216 return fold_builtin_inf (loc, type, false);
8218 case BUILT_IN_CLASSIFY_TYPE:
8219 return fold_builtin_classify_type (NULL_TREE);
8221 default:
8222 break;
8224 return NULL_TREE;
8227 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8228 This function returns NULL_TREE if no simplification was possible. */
8230 static tree
8231 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8233 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8234 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8236 if (TREE_CODE (arg0) == ERROR_MARK)
8237 return NULL_TREE;
8239 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
8240 return ret;
8242 switch (fcode)
8244 case BUILT_IN_CONSTANT_P:
8246 tree val = fold_builtin_constant_p (arg0);
8248 /* Gimplification will pull the CALL_EXPR for the builtin out of
8249 an if condition. When not optimizing, we'll not CSE it back.
8250 To avoid link error types of regressions, return false now. */
8251 if (!val && !optimize)
8252 val = integer_zero_node;
8254 return val;
8257 case BUILT_IN_CLASSIFY_TYPE:
8258 return fold_builtin_classify_type (arg0);
8260 case BUILT_IN_STRLEN:
8261 return fold_builtin_strlen (loc, type, arg0);
8263 CASE_FLT_FN (BUILT_IN_FABS):
8264 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8265 case BUILT_IN_FABSD32:
8266 case BUILT_IN_FABSD64:
8267 case BUILT_IN_FABSD128:
8268 return fold_builtin_fabs (loc, arg0, type);
8270 case BUILT_IN_ABS:
8271 case BUILT_IN_LABS:
8272 case BUILT_IN_LLABS:
8273 case BUILT_IN_IMAXABS:
8274 return fold_builtin_abs (loc, arg0, type);
8276 CASE_FLT_FN (BUILT_IN_CONJ):
8277 if (validate_arg (arg0, COMPLEX_TYPE)
8278 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8279 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8280 break;
8282 CASE_FLT_FN (BUILT_IN_CREAL):
8283 if (validate_arg (arg0, COMPLEX_TYPE)
8284 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8285 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8286 break;
8288 CASE_FLT_FN (BUILT_IN_CIMAG):
8289 if (validate_arg (arg0, COMPLEX_TYPE)
8290 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8291 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8292 break;
8294 CASE_FLT_FN (BUILT_IN_CARG):
8295 return fold_builtin_carg (loc, arg0, type);
8297 case BUILT_IN_ISASCII:
8298 return fold_builtin_isascii (loc, arg0);
8300 case BUILT_IN_TOASCII:
8301 return fold_builtin_toascii (loc, arg0);
8303 case BUILT_IN_ISDIGIT:
8304 return fold_builtin_isdigit (loc, arg0);
8306 CASE_FLT_FN (BUILT_IN_FINITE):
8307 case BUILT_IN_FINITED32:
8308 case BUILT_IN_FINITED64:
8309 case BUILT_IN_FINITED128:
8310 case BUILT_IN_ISFINITE:
8312 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
8313 if (ret)
8314 return ret;
8315 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8318 CASE_FLT_FN (BUILT_IN_ISINF):
8319 case BUILT_IN_ISINFD32:
8320 case BUILT_IN_ISINFD64:
8321 case BUILT_IN_ISINFD128:
8323 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
8324 if (ret)
8325 return ret;
8326 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8329 case BUILT_IN_ISNORMAL:
8330 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8332 case BUILT_IN_ISINF_SIGN:
8333 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
8335 CASE_FLT_FN (BUILT_IN_ISNAN):
8336 case BUILT_IN_ISNAND32:
8337 case BUILT_IN_ISNAND64:
8338 case BUILT_IN_ISNAND128:
8339 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
8341 case BUILT_IN_FREE:
8342 if (integer_zerop (arg0))
8343 return build_empty_stmt (loc);
8344 break;
8346 default:
8347 break;
8350 return NULL_TREE;
8354 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8355 This function returns NULL_TREE if no simplification was possible. */
8357 static tree
8358 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
8360 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8361 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8363 if (TREE_CODE (arg0) == ERROR_MARK
8364 || TREE_CODE (arg1) == ERROR_MARK)
8365 return NULL_TREE;
8367 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
8368 return ret;
8370 switch (fcode)
8372 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
8373 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
8374 if (validate_arg (arg0, REAL_TYPE)
8375 && validate_arg (arg1, POINTER_TYPE))
8376 return do_mpfr_lgamma_r (arg0, arg1, type);
8377 break;
8379 CASE_FLT_FN (BUILT_IN_FREXP):
8380 return fold_builtin_frexp (loc, arg0, arg1, type);
8382 CASE_FLT_FN (BUILT_IN_MODF):
8383 return fold_builtin_modf (loc, arg0, arg1, type);
8385 case BUILT_IN_STRSTR:
8386 return fold_builtin_strstr (loc, arg0, arg1, type);
8388 case BUILT_IN_STRSPN:
8389 return fold_builtin_strspn (loc, arg0, arg1);
8391 case BUILT_IN_STRCSPN:
8392 return fold_builtin_strcspn (loc, arg0, arg1);
8394 case BUILT_IN_STRCMP:
8395 return fold_builtin_strcmp (loc, arg0, arg1);
8397 case BUILT_IN_STRPBRK:
8398 return fold_builtin_strpbrk (loc, arg0, arg1, type);
8400 case BUILT_IN_EXPECT:
8401 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
8403 case BUILT_IN_ISGREATER:
8404 return fold_builtin_unordered_cmp (loc, fndecl,
8405 arg0, arg1, UNLE_EXPR, LE_EXPR);
8406 case BUILT_IN_ISGREATEREQUAL:
8407 return fold_builtin_unordered_cmp (loc, fndecl,
8408 arg0, arg1, UNLT_EXPR, LT_EXPR);
8409 case BUILT_IN_ISLESS:
8410 return fold_builtin_unordered_cmp (loc, fndecl,
8411 arg0, arg1, UNGE_EXPR, GE_EXPR);
8412 case BUILT_IN_ISLESSEQUAL:
8413 return fold_builtin_unordered_cmp (loc, fndecl,
8414 arg0, arg1, UNGT_EXPR, GT_EXPR);
8415 case BUILT_IN_ISLESSGREATER:
8416 return fold_builtin_unordered_cmp (loc, fndecl,
8417 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
8418 case BUILT_IN_ISUNORDERED:
8419 return fold_builtin_unordered_cmp (loc, fndecl,
8420 arg0, arg1, UNORDERED_EXPR,
8421 NOP_EXPR);
8423 /* We do the folding for va_start in the expander. */
8424 case BUILT_IN_VA_START:
8425 break;
8427 case BUILT_IN_OBJECT_SIZE:
8428 return fold_builtin_object_size (arg0, arg1);
8430 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8431 return fold_builtin_atomic_always_lock_free (arg0, arg1);
8433 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8434 return fold_builtin_atomic_is_lock_free (arg0, arg1);
8436 default:
8437 break;
8439 return NULL_TREE;
8442 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
8443 and ARG2.
8444 This function returns NULL_TREE if no simplification was possible. */
8446 static tree
8447 fold_builtin_3 (location_t loc, tree fndecl,
8448 tree arg0, tree arg1, tree arg2)
8450 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8451 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8453 if (TREE_CODE (arg0) == ERROR_MARK
8454 || TREE_CODE (arg1) == ERROR_MARK
8455 || TREE_CODE (arg2) == ERROR_MARK)
8456 return NULL_TREE;
8458 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
8459 arg0, arg1, arg2))
8460 return ret;
8462 switch (fcode)
8465 CASE_FLT_FN (BUILT_IN_SINCOS):
8466 return fold_builtin_sincos (loc, arg0, arg1, arg2);
8468 CASE_FLT_FN (BUILT_IN_FMA):
8469 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
8471 CASE_FLT_FN (BUILT_IN_REMQUO):
8472 if (validate_arg (arg0, REAL_TYPE)
8473 && validate_arg (arg1, REAL_TYPE)
8474 && validate_arg (arg2, POINTER_TYPE))
8475 return do_mpfr_remquo (arg0, arg1, arg2);
8476 break;
8478 case BUILT_IN_STRNCMP:
8479 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
8481 case BUILT_IN_MEMCHR:
8482 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
8484 case BUILT_IN_BCMP:
8485 case BUILT_IN_MEMCMP:
8486 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
8488 case BUILT_IN_EXPECT:
8489 return fold_builtin_expect (loc, arg0, arg1, arg2);
8491 case BUILT_IN_ADD_OVERFLOW:
8492 case BUILT_IN_SUB_OVERFLOW:
8493 case BUILT_IN_MUL_OVERFLOW:
8494 case BUILT_IN_ADD_OVERFLOW_P:
8495 case BUILT_IN_SUB_OVERFLOW_P:
8496 case BUILT_IN_MUL_OVERFLOW_P:
8497 case BUILT_IN_SADD_OVERFLOW:
8498 case BUILT_IN_SADDL_OVERFLOW:
8499 case BUILT_IN_SADDLL_OVERFLOW:
8500 case BUILT_IN_SSUB_OVERFLOW:
8501 case BUILT_IN_SSUBL_OVERFLOW:
8502 case BUILT_IN_SSUBLL_OVERFLOW:
8503 case BUILT_IN_SMUL_OVERFLOW:
8504 case BUILT_IN_SMULL_OVERFLOW:
8505 case BUILT_IN_SMULLL_OVERFLOW:
8506 case BUILT_IN_UADD_OVERFLOW:
8507 case BUILT_IN_UADDL_OVERFLOW:
8508 case BUILT_IN_UADDLL_OVERFLOW:
8509 case BUILT_IN_USUB_OVERFLOW:
8510 case BUILT_IN_USUBL_OVERFLOW:
8511 case BUILT_IN_USUBLL_OVERFLOW:
8512 case BUILT_IN_UMUL_OVERFLOW:
8513 case BUILT_IN_UMULL_OVERFLOW:
8514 case BUILT_IN_UMULLL_OVERFLOW:
8515 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
8517 default:
8518 break;
8520 return NULL_TREE;
8523 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
8524 arguments. IGNORE is true if the result of the
8525 function call is ignored. This function returns NULL_TREE if no
8526 simplification was possible. */
8528 tree
8529 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
8531 tree ret = NULL_TREE;
8533 switch (nargs)
8535 case 0:
8536 ret = fold_builtin_0 (loc, fndecl);
8537 break;
8538 case 1:
8539 ret = fold_builtin_1 (loc, fndecl, args[0]);
8540 break;
8541 case 2:
8542 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
8543 break;
8544 case 3:
8545 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
8546 break;
8547 default:
8548 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
8549 break;
8551 if (ret)
8553 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
8554 SET_EXPR_LOCATION (ret, loc);
8555 TREE_NO_WARNING (ret) = 1;
8556 return ret;
8558 return NULL_TREE;
8561 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
8562 list ARGS along with N new arguments in NEWARGS. SKIP is the number
8563 of arguments in ARGS to be omitted. OLDNARGS is the number of
8564 elements in ARGS. */
8566 static tree
8567 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
8568 int skip, tree fndecl, int n, va_list newargs)
8570 int nargs = oldnargs - skip + n;
8571 tree *buffer;
8573 if (n > 0)
8575 int i, j;
8577 buffer = XALLOCAVEC (tree, nargs);
8578 for (i = 0; i < n; i++)
8579 buffer[i] = va_arg (newargs, tree);
8580 for (j = skip; j < oldnargs; j++, i++)
8581 buffer[i] = args[j];
8583 else
8584 buffer = args + skip;
8586 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
8589 /* Return true if FNDECL shouldn't be folded right now.
8590 If a built-in function has an inline attribute always_inline
8591 wrapper, defer folding it after always_inline functions have
8592 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
8593 might not be performed. */
8595 bool
8596 avoid_folding_inline_builtin (tree fndecl)
8598 return (DECL_DECLARED_INLINE_P (fndecl)
8599 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
8600 && cfun
8601 && !cfun->always_inline_functions_inlined
8602 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
8605 /* A wrapper function for builtin folding that prevents warnings for
8606 "statement without effect" and the like, caused by removing the
8607 call node earlier than the warning is generated. */
8609 tree
8610 fold_call_expr (location_t loc, tree exp, bool ignore)
8612 tree ret = NULL_TREE;
8613 tree fndecl = get_callee_fndecl (exp);
8614 if (fndecl
8615 && TREE_CODE (fndecl) == FUNCTION_DECL
8616 && DECL_BUILT_IN (fndecl)
8617 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
8618 yet. Defer folding until we see all the arguments
8619 (after inlining). */
8620 && !CALL_EXPR_VA_ARG_PACK (exp))
8622 int nargs = call_expr_nargs (exp);
8624 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
8625 instead last argument is __builtin_va_arg_pack (). Defer folding
8626 even in that case, until arguments are finalized. */
8627 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
8629 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
8630 if (fndecl2
8631 && TREE_CODE (fndecl2) == FUNCTION_DECL
8632 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8633 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8634 return NULL_TREE;
8637 if (avoid_folding_inline_builtin (fndecl))
8638 return NULL_TREE;
8640 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8641 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
8642 CALL_EXPR_ARGP (exp), ignore);
8643 else
8645 tree *args = CALL_EXPR_ARGP (exp);
8646 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
8647 if (ret)
8648 return ret;
8651 return NULL_TREE;
8654 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
8655 N arguments are passed in the array ARGARRAY. Return a folded
8656 expression or NULL_TREE if no simplification was possible. */
8658 tree
8659 fold_builtin_call_array (location_t loc, tree,
8660 tree fn,
8661 int n,
8662 tree *argarray)
8664 if (TREE_CODE (fn) != ADDR_EXPR)
8665 return NULL_TREE;
8667 tree fndecl = TREE_OPERAND (fn, 0);
8668 if (TREE_CODE (fndecl) == FUNCTION_DECL
8669 && DECL_BUILT_IN (fndecl))
8671 /* If last argument is __builtin_va_arg_pack (), arguments to this
8672 function are not finalized yet. Defer folding until they are. */
8673 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
8675 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
8676 if (fndecl2
8677 && TREE_CODE (fndecl2) == FUNCTION_DECL
8678 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8679 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8680 return NULL_TREE;
8682 if (avoid_folding_inline_builtin (fndecl))
8683 return NULL_TREE;
8684 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8685 return targetm.fold_builtin (fndecl, n, argarray, false);
8686 else
8687 return fold_builtin_n (loc, fndecl, argarray, n, false);
8690 return NULL_TREE;
8693 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
8694 along with N new arguments specified as the "..." parameters. SKIP
8695 is the number of arguments in EXP to be omitted. This function is used
8696 to do varargs-to-varargs transformations. */
8698 static tree
8699 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
8701 va_list ap;
8702 tree t;
8704 va_start (ap, n);
8705 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
8706 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
8707 va_end (ap);
8709 return t;
8712 /* Validate a single argument ARG against a tree code CODE representing
8713 a type. */
8715 static bool
8716 validate_arg (const_tree arg, enum tree_code code)
8718 if (!arg)
8719 return false;
8720 else if (code == POINTER_TYPE)
8721 return POINTER_TYPE_P (TREE_TYPE (arg));
8722 else if (code == INTEGER_TYPE)
8723 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
8724 return code == TREE_CODE (TREE_TYPE (arg));
8727 /* This function validates the types of a function call argument list
8728 against a specified list of tree_codes. If the last specifier is a 0,
8729 that represents an ellipses, otherwise the last specifier must be a
8730 VOID_TYPE.
8732 This is the GIMPLE version of validate_arglist. Eventually we want to
8733 completely convert builtins.c to work from GIMPLEs and the tree based
8734 validate_arglist will then be removed. */
8736 bool
8737 validate_gimple_arglist (const gcall *call, ...)
8739 enum tree_code code;
8740 bool res = 0;
8741 va_list ap;
8742 const_tree arg;
8743 size_t i;
8745 va_start (ap, call);
8746 i = 0;
8750 code = (enum tree_code) va_arg (ap, int);
8751 switch (code)
8753 case 0:
8754 /* This signifies an ellipses, any further arguments are all ok. */
8755 res = true;
8756 goto end;
8757 case VOID_TYPE:
8758 /* This signifies an endlink, if no arguments remain, return
8759 true, otherwise return false. */
8760 res = (i == gimple_call_num_args (call));
8761 goto end;
8762 default:
8763 /* If no parameters remain or the parameter's code does not
8764 match the specified code, return false. Otherwise continue
8765 checking any remaining arguments. */
8766 arg = gimple_call_arg (call, i++);
8767 if (!validate_arg (arg, code))
8768 goto end;
8769 break;
8772 while (1);
8774 /* We need gotos here since we can only have one VA_CLOSE in a
8775 function. */
8776 end: ;
8777 va_end (ap);
8779 return res;
8782 /* Default target-specific builtin expander that does nothing. */
8785 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
8786 rtx target ATTRIBUTE_UNUSED,
8787 rtx subtarget ATTRIBUTE_UNUSED,
8788 machine_mode mode ATTRIBUTE_UNUSED,
8789 int ignore ATTRIBUTE_UNUSED)
8791 return NULL_RTX;
8794 /* Returns true is EXP represents data that would potentially reside
8795 in a readonly section. */
8797 bool
8798 readonly_data_expr (tree exp)
8800 STRIP_NOPS (exp);
8802 if (TREE_CODE (exp) != ADDR_EXPR)
8803 return false;
8805 exp = get_base_address (TREE_OPERAND (exp, 0));
8806 if (!exp)
8807 return false;
8809 /* Make sure we call decl_readonly_section only for trees it
8810 can handle (since it returns true for everything it doesn't
8811 understand). */
8812 if (TREE_CODE (exp) == STRING_CST
8813 || TREE_CODE (exp) == CONSTRUCTOR
8814 || (VAR_P (exp) && TREE_STATIC (exp)))
8815 return decl_readonly_section (exp, 0);
8816 else
8817 return false;
8820 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
8821 to the call, and TYPE is its return type.
8823 Return NULL_TREE if no simplification was possible, otherwise return the
8824 simplified form of the call as a tree.
8826 The simplified form may be a constant or other expression which
8827 computes the same value, but in a more efficient manner (including
8828 calls to other builtin functions).
8830 The call may contain arguments which need to be evaluated, but
8831 which are not useful to determine the result of the call. In
8832 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8833 COMPOUND_EXPR will be an argument which must be evaluated.
8834 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8835 COMPOUND_EXPR in the chain will contain the tree for the simplified
8836 form of the builtin function call. */
8838 static tree
8839 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
8841 if (!validate_arg (s1, POINTER_TYPE)
8842 || !validate_arg (s2, POINTER_TYPE))
8843 return NULL_TREE;
8844 else
8846 tree fn;
8847 const char *p1, *p2;
8849 p2 = c_getstr (s2);
8850 if (p2 == NULL)
8851 return NULL_TREE;
8853 p1 = c_getstr (s1);
8854 if (p1 != NULL)
8856 const char *r = strstr (p1, p2);
8857 tree tem;
8859 if (r == NULL)
8860 return build_int_cst (TREE_TYPE (s1), 0);
8862 /* Return an offset into the constant string argument. */
8863 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8864 return fold_convert_loc (loc, type, tem);
8867 /* The argument is const char *, and the result is char *, so we need
8868 a type conversion here to avoid a warning. */
8869 if (p2[0] == '\0')
8870 return fold_convert_loc (loc, type, s1);
8872 if (p2[1] != '\0')
8873 return NULL_TREE;
8875 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
8876 if (!fn)
8877 return NULL_TREE;
8879 /* New argument list transforming strstr(s1, s2) to
8880 strchr(s1, s2[0]). */
8881 return build_call_expr_loc (loc, fn, 2, s1,
8882 build_int_cst (integer_type_node, p2[0]));
8886 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
8887 to the call, and TYPE is its return type.
8889 Return NULL_TREE if no simplification was possible, otherwise return the
8890 simplified form of the call as a tree.
8892 The simplified form may be a constant or other expression which
8893 computes the same value, but in a more efficient manner (including
8894 calls to other builtin functions).
8896 The call may contain arguments which need to be evaluated, but
8897 which are not useful to determine the result of the call. In
8898 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8899 COMPOUND_EXPR will be an argument which must be evaluated.
8900 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8901 COMPOUND_EXPR in the chain will contain the tree for the simplified
8902 form of the builtin function call. */
8904 static tree
8905 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
8907 if (!validate_arg (s1, POINTER_TYPE)
8908 || !validate_arg (s2, POINTER_TYPE))
8909 return NULL_TREE;
8910 else
8912 tree fn;
8913 const char *p1, *p2;
8915 p2 = c_getstr (s2);
8916 if (p2 == NULL)
8917 return NULL_TREE;
8919 p1 = c_getstr (s1);
8920 if (p1 != NULL)
8922 const char *r = strpbrk (p1, p2);
8923 tree tem;
8925 if (r == NULL)
8926 return build_int_cst (TREE_TYPE (s1), 0);
8928 /* Return an offset into the constant string argument. */
8929 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8930 return fold_convert_loc (loc, type, tem);
8933 if (p2[0] == '\0')
8934 /* strpbrk(x, "") == NULL.
8935 Evaluate and ignore s1 in case it had side-effects. */
8936 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
8938 if (p2[1] != '\0')
8939 return NULL_TREE; /* Really call strpbrk. */
8941 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
8942 if (!fn)
8943 return NULL_TREE;
8945 /* New argument list transforming strpbrk(s1, s2) to
8946 strchr(s1, s2[0]). */
8947 return build_call_expr_loc (loc, fn, 2, s1,
8948 build_int_cst (integer_type_node, p2[0]));
8952 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
8953 to the call.
8955 Return NULL_TREE if no simplification was possible, otherwise return the
8956 simplified form of the call as a tree.
8958 The simplified form may be a constant or other expression which
8959 computes the same value, but in a more efficient manner (including
8960 calls to other builtin functions).
8962 The call may contain arguments which need to be evaluated, but
8963 which are not useful to determine the result of the call. In
8964 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8965 COMPOUND_EXPR will be an argument which must be evaluated.
8966 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8967 COMPOUND_EXPR in the chain will contain the tree for the simplified
8968 form of the builtin function call. */
8970 static tree
8971 fold_builtin_strspn (location_t loc, tree s1, tree s2)
8973 if (!validate_arg (s1, POINTER_TYPE)
8974 || !validate_arg (s2, POINTER_TYPE))
8975 return NULL_TREE;
8976 else
8978 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
8980 /* If either argument is "", return NULL_TREE. */
8981 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
8982 /* Evaluate and ignore both arguments in case either one has
8983 side-effects. */
8984 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
8985 s1, s2);
8986 return NULL_TREE;
8990 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
8991 to the call.
8993 Return NULL_TREE if no simplification was possible, otherwise return the
8994 simplified form of the call as a tree.
8996 The simplified form may be a constant or other expression which
8997 computes the same value, but in a more efficient manner (including
8998 calls to other builtin functions).
9000 The call may contain arguments which need to be evaluated, but
9001 which are not useful to determine the result of the call. In
9002 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9003 COMPOUND_EXPR will be an argument which must be evaluated.
9004 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9005 COMPOUND_EXPR in the chain will contain the tree for the simplified
9006 form of the builtin function call. */
9008 static tree
9009 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9011 if (!validate_arg (s1, POINTER_TYPE)
9012 || !validate_arg (s2, POINTER_TYPE))
9013 return NULL_TREE;
9014 else
9016 /* If the first argument is "", return NULL_TREE. */
9017 const char *p1 = c_getstr (s1);
9018 if (p1 && *p1 == '\0')
9020 /* Evaluate and ignore argument s2 in case it has
9021 side-effects. */
9022 return omit_one_operand_loc (loc, size_type_node,
9023 size_zero_node, s2);
9026 /* If the second argument is "", return __builtin_strlen(s1). */
9027 const char *p2 = c_getstr (s2);
9028 if (p2 && *p2 == '\0')
9030 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9032 /* If the replacement _DECL isn't initialized, don't do the
9033 transformation. */
9034 if (!fn)
9035 return NULL_TREE;
9037 return build_call_expr_loc (loc, fn, 1, s1);
9039 return NULL_TREE;
9043 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9044 produced. False otherwise. This is done so that we don't output the error
9045 or warning twice or three times. */
9047 bool
9048 fold_builtin_next_arg (tree exp, bool va_start_p)
9050 tree fntype = TREE_TYPE (current_function_decl);
9051 int nargs = call_expr_nargs (exp);
9052 tree arg;
9053 /* There is good chance the current input_location points inside the
9054 definition of the va_start macro (perhaps on the token for
9055 builtin) in a system header, so warnings will not be emitted.
9056 Use the location in real source code. */
9057 source_location current_location =
9058 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9059 NULL);
9061 if (!stdarg_p (fntype))
9063 error ("%<va_start%> used in function with fixed args");
9064 return true;
9067 if (va_start_p)
9069 if (va_start_p && (nargs != 2))
9071 error ("wrong number of arguments to function %<va_start%>");
9072 return true;
9074 arg = CALL_EXPR_ARG (exp, 1);
9076 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9077 when we checked the arguments and if needed issued a warning. */
9078 else
9080 if (nargs == 0)
9082 /* Evidently an out of date version of <stdarg.h>; can't validate
9083 va_start's second argument, but can still work as intended. */
9084 warning_at (current_location,
9085 OPT_Wvarargs,
9086 "%<__builtin_next_arg%> called without an argument");
9087 return true;
9089 else if (nargs > 1)
9091 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9092 return true;
9094 arg = CALL_EXPR_ARG (exp, 0);
9097 if (TREE_CODE (arg) == SSA_NAME)
9098 arg = SSA_NAME_VAR (arg);
9100 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9101 or __builtin_next_arg (0) the first time we see it, after checking
9102 the arguments and if needed issuing a warning. */
9103 if (!integer_zerop (arg))
9105 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9107 /* Strip off all nops for the sake of the comparison. This
9108 is not quite the same as STRIP_NOPS. It does more.
9109 We must also strip off INDIRECT_EXPR for C++ reference
9110 parameters. */
9111 while (CONVERT_EXPR_P (arg)
9112 || TREE_CODE (arg) == INDIRECT_REF)
9113 arg = TREE_OPERAND (arg, 0);
9114 if (arg != last_parm)
9116 /* FIXME: Sometimes with the tree optimizers we can get the
9117 not the last argument even though the user used the last
9118 argument. We just warn and set the arg to be the last
9119 argument so that we will get wrong-code because of
9120 it. */
9121 warning_at (current_location,
9122 OPT_Wvarargs,
9123 "second parameter of %<va_start%> not last named argument");
9126 /* Undefined by C99 7.15.1.4p4 (va_start):
9127 "If the parameter parmN is declared with the register storage
9128 class, with a function or array type, or with a type that is
9129 not compatible with the type that results after application of
9130 the default argument promotions, the behavior is undefined."
9132 else if (DECL_REGISTER (arg))
9134 warning_at (current_location,
9135 OPT_Wvarargs,
9136 "undefined behavior when second parameter of "
9137 "%<va_start%> is declared with %<register%> storage");
9140 /* We want to verify the second parameter just once before the tree
9141 optimizers are run and then avoid keeping it in the tree,
9142 as otherwise we could warn even for correct code like:
9143 void foo (int i, ...)
9144 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9145 if (va_start_p)
9146 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9147 else
9148 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9150 return false;
9154 /* Expand a call EXP to __builtin_object_size. */
9156 static rtx
9157 expand_builtin_object_size (tree exp)
9159 tree ost;
9160 int object_size_type;
9161 tree fndecl = get_callee_fndecl (exp);
9163 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9165 error ("%Kfirst argument of %D must be a pointer, second integer constant",
9166 exp, fndecl);
9167 expand_builtin_trap ();
9168 return const0_rtx;
9171 ost = CALL_EXPR_ARG (exp, 1);
9172 STRIP_NOPS (ost);
9174 if (TREE_CODE (ost) != INTEGER_CST
9175 || tree_int_cst_sgn (ost) < 0
9176 || compare_tree_int (ost, 3) > 0)
9178 error ("%Klast argument of %D is not integer constant between 0 and 3",
9179 exp, fndecl);
9180 expand_builtin_trap ();
9181 return const0_rtx;
9184 object_size_type = tree_to_shwi (ost);
9186 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9189 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9190 FCODE is the BUILT_IN_* to use.
9191 Return NULL_RTX if we failed; the caller should emit a normal call,
9192 otherwise try to get the result in TARGET, if convenient (and in
9193 mode MODE if that's convenient). */
9195 static rtx
9196 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9197 enum built_in_function fcode)
9199 tree dest, src, len, size;
9201 if (!validate_arglist (exp,
9202 POINTER_TYPE,
9203 fcode == BUILT_IN_MEMSET_CHK
9204 ? INTEGER_TYPE : POINTER_TYPE,
9205 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9206 return NULL_RTX;
9208 dest = CALL_EXPR_ARG (exp, 0);
9209 src = CALL_EXPR_ARG (exp, 1);
9210 len = CALL_EXPR_ARG (exp, 2);
9211 size = CALL_EXPR_ARG (exp, 3);
9213 if (! tree_fits_uhwi_p (size))
9214 return NULL_RTX;
9216 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
9218 tree fn;
9220 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
9222 warning_at (tree_nonartificial_location (exp),
9223 0, "%Kcall to %D will always overflow destination buffer",
9224 exp, get_callee_fndecl (exp));
9225 return NULL_RTX;
9228 fn = NULL_TREE;
9229 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9230 mem{cpy,pcpy,move,set} is available. */
9231 switch (fcode)
9233 case BUILT_IN_MEMCPY_CHK:
9234 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
9235 break;
9236 case BUILT_IN_MEMPCPY_CHK:
9237 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
9238 break;
9239 case BUILT_IN_MEMMOVE_CHK:
9240 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
9241 break;
9242 case BUILT_IN_MEMSET_CHK:
9243 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
9244 break;
9245 default:
9246 break;
9249 if (! fn)
9250 return NULL_RTX;
9252 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
9253 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9254 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9255 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9257 else if (fcode == BUILT_IN_MEMSET_CHK)
9258 return NULL_RTX;
9259 else
9261 unsigned int dest_align = get_pointer_alignment (dest);
9263 /* If DEST is not a pointer type, call the normal function. */
9264 if (dest_align == 0)
9265 return NULL_RTX;
9267 /* If SRC and DEST are the same (and not volatile), do nothing. */
9268 if (operand_equal_p (src, dest, 0))
9270 tree expr;
9272 if (fcode != BUILT_IN_MEMPCPY_CHK)
9274 /* Evaluate and ignore LEN in case it has side-effects. */
9275 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9276 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9279 expr = fold_build_pointer_plus (dest, len);
9280 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9283 /* __memmove_chk special case. */
9284 if (fcode == BUILT_IN_MEMMOVE_CHK)
9286 unsigned int src_align = get_pointer_alignment (src);
9288 if (src_align == 0)
9289 return NULL_RTX;
9291 /* If src is categorized for a readonly section we can use
9292 normal __memcpy_chk. */
9293 if (readonly_data_expr (src))
9295 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
9296 if (!fn)
9297 return NULL_RTX;
9298 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9299 dest, src, len, size);
9300 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9301 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9302 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9305 return NULL_RTX;
9309 /* Emit warning if a buffer overflow is detected at compile time. */
9311 static void
9312 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9314 int is_strlen = 0;
9315 tree len, size;
9316 location_t loc = tree_nonartificial_location (exp);
9318 switch (fcode)
9320 case BUILT_IN_STRCPY_CHK:
9321 case BUILT_IN_STPCPY_CHK:
9322 /* For __strcat_chk the warning will be emitted only if overflowing
9323 by at least strlen (dest) + 1 bytes. */
9324 case BUILT_IN_STRCAT_CHK:
9325 len = CALL_EXPR_ARG (exp, 1);
9326 size = CALL_EXPR_ARG (exp, 2);
9327 is_strlen = 1;
9328 break;
9329 case BUILT_IN_STRNCAT_CHK:
9330 case BUILT_IN_STRNCPY_CHK:
9331 case BUILT_IN_STPNCPY_CHK:
9332 len = CALL_EXPR_ARG (exp, 2);
9333 size = CALL_EXPR_ARG (exp, 3);
9334 break;
9335 case BUILT_IN_SNPRINTF_CHK:
9336 case BUILT_IN_VSNPRINTF_CHK:
9337 len = CALL_EXPR_ARG (exp, 1);
9338 size = CALL_EXPR_ARG (exp, 3);
9339 break;
9340 default:
9341 gcc_unreachable ();
9344 if (!len || !size)
9345 return;
9347 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9348 return;
9350 if (is_strlen)
9352 len = c_strlen (len, 1);
9353 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
9354 return;
9356 else if (fcode == BUILT_IN_STRNCAT_CHK)
9358 tree src = CALL_EXPR_ARG (exp, 1);
9359 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
9360 return;
9361 src = c_strlen (src, 1);
9362 if (! src || ! tree_fits_uhwi_p (src))
9364 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
9365 exp, get_callee_fndecl (exp));
9366 return;
9368 else if (tree_int_cst_lt (src, size))
9369 return;
9371 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
9372 return;
9374 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
9375 exp, get_callee_fndecl (exp));
9378 /* Emit warning if a buffer overflow is detected at compile time
9379 in __sprintf_chk/__vsprintf_chk calls. */
9381 static void
9382 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
9384 tree size, len, fmt;
9385 const char *fmt_str;
9386 int nargs = call_expr_nargs (exp);
9388 /* Verify the required arguments in the original call. */
9390 if (nargs < 4)
9391 return;
9392 size = CALL_EXPR_ARG (exp, 2);
9393 fmt = CALL_EXPR_ARG (exp, 3);
9395 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9396 return;
9398 /* Check whether the format is a literal string constant. */
9399 fmt_str = c_getstr (fmt);
9400 if (fmt_str == NULL)
9401 return;
9403 if (!init_target_chars ())
9404 return;
9406 /* If the format doesn't contain % args or %%, we know its size. */
9407 if (strchr (fmt_str, target_percent) == 0)
9408 len = build_int_cstu (size_type_node, strlen (fmt_str));
9409 /* If the format is "%s" and first ... argument is a string literal,
9410 we know it too. */
9411 else if (fcode == BUILT_IN_SPRINTF_CHK
9412 && strcmp (fmt_str, target_percent_s) == 0)
9414 tree arg;
9416 if (nargs < 5)
9417 return;
9418 arg = CALL_EXPR_ARG (exp, 4);
9419 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
9420 return;
9422 len = c_strlen (arg, 1);
9423 if (!len || ! tree_fits_uhwi_p (len))
9424 return;
9426 else
9427 return;
9429 if (! tree_int_cst_lt (len, size))
9430 warning_at (tree_nonartificial_location (exp),
9431 0, "%Kcall to %D will always overflow destination buffer",
9432 exp, get_callee_fndecl (exp));
9435 /* Emit warning if a free is called with address of a variable. */
9437 static void
9438 maybe_emit_free_warning (tree exp)
9440 tree arg = CALL_EXPR_ARG (exp, 0);
9442 STRIP_NOPS (arg);
9443 if (TREE_CODE (arg) != ADDR_EXPR)
9444 return;
9446 arg = get_base_address (TREE_OPERAND (arg, 0));
9447 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
9448 return;
9450 if (SSA_VAR_P (arg))
9451 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9452 "%Kattempt to free a non-heap object %qD", exp, arg);
9453 else
9454 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9455 "%Kattempt to free a non-heap object", exp);
9458 /* Fold a call to __builtin_object_size with arguments PTR and OST,
9459 if possible. */
9461 static tree
9462 fold_builtin_object_size (tree ptr, tree ost)
9464 unsigned HOST_WIDE_INT bytes;
9465 int object_size_type;
9467 if (!validate_arg (ptr, POINTER_TYPE)
9468 || !validate_arg (ost, INTEGER_TYPE))
9469 return NULL_TREE;
9471 STRIP_NOPS (ost);
9473 if (TREE_CODE (ost) != INTEGER_CST
9474 || tree_int_cst_sgn (ost) < 0
9475 || compare_tree_int (ost, 3) > 0)
9476 return NULL_TREE;
9478 object_size_type = tree_to_shwi (ost);
9480 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
9481 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
9482 and (size_t) 0 for types 2 and 3. */
9483 if (TREE_SIDE_EFFECTS (ptr))
9484 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
9486 if (TREE_CODE (ptr) == ADDR_EXPR)
9488 compute_builtin_object_size (ptr, object_size_type, &bytes);
9489 if (wi::fits_to_tree_p (bytes, size_type_node))
9490 return build_int_cstu (size_type_node, bytes);
9492 else if (TREE_CODE (ptr) == SSA_NAME)
9494 /* If object size is not known yet, delay folding until
9495 later. Maybe subsequent passes will help determining
9496 it. */
9497 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
9498 && wi::fits_to_tree_p (bytes, size_type_node))
9499 return build_int_cstu (size_type_node, bytes);
9502 return NULL_TREE;
9505 /* Builtins with folding operations that operate on "..." arguments
9506 need special handling; we need to store the arguments in a convenient
9507 data structure before attempting any folding. Fortunately there are
9508 only a few builtins that fall into this category. FNDECL is the
9509 function, EXP is the CALL_EXPR for the call. */
9511 static tree
9512 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
9514 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9515 tree ret = NULL_TREE;
9517 switch (fcode)
9519 case BUILT_IN_FPCLASSIFY:
9520 ret = fold_builtin_fpclassify (loc, args, nargs);
9521 break;
9523 default:
9524 break;
9526 if (ret)
9528 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9529 SET_EXPR_LOCATION (ret, loc);
9530 TREE_NO_WARNING (ret) = 1;
9531 return ret;
9533 return NULL_TREE;
9536 /* Initialize format string characters in the target charset. */
9538 bool
9539 init_target_chars (void)
9541 static bool init;
9542 if (!init)
9544 target_newline = lang_hooks.to_target_charset ('\n');
9545 target_percent = lang_hooks.to_target_charset ('%');
9546 target_c = lang_hooks.to_target_charset ('c');
9547 target_s = lang_hooks.to_target_charset ('s');
9548 if (target_newline == 0 || target_percent == 0 || target_c == 0
9549 || target_s == 0)
9550 return false;
9552 target_percent_c[0] = target_percent;
9553 target_percent_c[1] = target_c;
9554 target_percent_c[2] = '\0';
9556 target_percent_s[0] = target_percent;
9557 target_percent_s[1] = target_s;
9558 target_percent_s[2] = '\0';
9560 target_percent_s_newline[0] = target_percent;
9561 target_percent_s_newline[1] = target_s;
9562 target_percent_s_newline[2] = target_newline;
9563 target_percent_s_newline[3] = '\0';
9565 init = true;
9567 return true;
9570 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
9571 and no overflow/underflow occurred. INEXACT is true if M was not
9572 exactly calculated. TYPE is the tree type for the result. This
9573 function assumes that you cleared the MPFR flags and then
9574 calculated M to see if anything subsequently set a flag prior to
9575 entering this function. Return NULL_TREE if any checks fail. */
9577 static tree
9578 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
9580 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9581 overflow/underflow occurred. If -frounding-math, proceed iff the
9582 result of calling FUNC was exact. */
9583 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
9584 && (!flag_rounding_math || !inexact))
9586 REAL_VALUE_TYPE rr;
9588 real_from_mpfr (&rr, m, type, GMP_RNDN);
9589 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
9590 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9591 but the mpft_t is not, then we underflowed in the
9592 conversion. */
9593 if (real_isfinite (&rr)
9594 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
9596 REAL_VALUE_TYPE rmode;
9598 real_convert (&rmode, TYPE_MODE (type), &rr);
9599 /* Proceed iff the specified mode can hold the value. */
9600 if (real_identical (&rmode, &rr))
9601 return build_real (type, rmode);
9604 return NULL_TREE;
9607 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
9608 number and no overflow/underflow occurred. INEXACT is true if M
9609 was not exactly calculated. TYPE is the tree type for the result.
9610 This function assumes that you cleared the MPFR flags and then
9611 calculated M to see if anything subsequently set a flag prior to
9612 entering this function. Return NULL_TREE if any checks fail, if
9613 FORCE_CONVERT is true, then bypass the checks. */
9615 static tree
9616 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
9618 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9619 overflow/underflow occurred. If -frounding-math, proceed iff the
9620 result of calling FUNC was exact. */
9621 if (force_convert
9622 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
9623 && !mpfr_overflow_p () && !mpfr_underflow_p ()
9624 && (!flag_rounding_math || !inexact)))
9626 REAL_VALUE_TYPE re, im;
9628 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
9629 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
9630 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
9631 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9632 but the mpft_t is not, then we underflowed in the
9633 conversion. */
9634 if (force_convert
9635 || (real_isfinite (&re) && real_isfinite (&im)
9636 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
9637 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
9639 REAL_VALUE_TYPE re_mode, im_mode;
9641 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
9642 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
9643 /* Proceed iff the specified mode can hold the value. */
9644 if (force_convert
9645 || (real_identical (&re_mode, &re)
9646 && real_identical (&im_mode, &im)))
9647 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
9648 build_real (TREE_TYPE (type), im_mode));
9651 return NULL_TREE;
9654 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
9655 the pointer *(ARG_QUO) and return the result. The type is taken
9656 from the type of ARG0 and is used for setting the precision of the
9657 calculation and results. */
9659 static tree
9660 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
9662 tree const type = TREE_TYPE (arg0);
9663 tree result = NULL_TREE;
9665 STRIP_NOPS (arg0);
9666 STRIP_NOPS (arg1);
9668 /* To proceed, MPFR must exactly represent the target floating point
9669 format, which only happens when the target base equals two. */
9670 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9671 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9672 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
9674 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
9675 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
9677 if (real_isfinite (ra0) && real_isfinite (ra1))
9679 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9680 const int prec = fmt->p;
9681 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9682 tree result_rem;
9683 long integer_quo;
9684 mpfr_t m0, m1;
9686 mpfr_inits2 (prec, m0, m1, NULL);
9687 mpfr_from_real (m0, ra0, GMP_RNDN);
9688 mpfr_from_real (m1, ra1, GMP_RNDN);
9689 mpfr_clear_flags ();
9690 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
9691 /* Remquo is independent of the rounding mode, so pass
9692 inexact=0 to do_mpfr_ckconv(). */
9693 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
9694 mpfr_clears (m0, m1, NULL);
9695 if (result_rem)
9697 /* MPFR calculates quo in the host's long so it may
9698 return more bits in quo than the target int can hold
9699 if sizeof(host long) > sizeof(target int). This can
9700 happen even for native compilers in LP64 mode. In
9701 these cases, modulo the quo value with the largest
9702 number that the target int can hold while leaving one
9703 bit for the sign. */
9704 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
9705 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
9707 /* Dereference the quo pointer argument. */
9708 arg_quo = build_fold_indirect_ref (arg_quo);
9709 /* Proceed iff a valid pointer type was passed in. */
9710 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
9712 /* Set the value. */
9713 tree result_quo
9714 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
9715 build_int_cst (TREE_TYPE (arg_quo),
9716 integer_quo));
9717 TREE_SIDE_EFFECTS (result_quo) = 1;
9718 /* Combine the quo assignment with the rem. */
9719 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9720 result_quo, result_rem));
9725 return result;
9728 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
9729 resulting value as a tree with type TYPE. The mpfr precision is
9730 set to the precision of TYPE. We assume that this mpfr function
9731 returns zero if the result could be calculated exactly within the
9732 requested precision. In addition, the integer pointer represented
9733 by ARG_SG will be dereferenced and set to the appropriate signgam
9734 (-1,1) value. */
9736 static tree
9737 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
9739 tree result = NULL_TREE;
9741 STRIP_NOPS (arg);
9743 /* To proceed, MPFR must exactly represent the target floating point
9744 format, which only happens when the target base equals two. Also
9745 verify ARG is a constant and that ARG_SG is an int pointer. */
9746 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9747 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
9748 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
9749 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
9751 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
9753 /* In addition to NaN and Inf, the argument cannot be zero or a
9754 negative integer. */
9755 if (real_isfinite (ra)
9756 && ra->cl != rvc_zero
9757 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
9759 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9760 const int prec = fmt->p;
9761 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9762 int inexact, sg;
9763 mpfr_t m;
9764 tree result_lg;
9766 mpfr_init2 (m, prec);
9767 mpfr_from_real (m, ra, GMP_RNDN);
9768 mpfr_clear_flags ();
9769 inexact = mpfr_lgamma (m, &sg, m, rnd);
9770 result_lg = do_mpfr_ckconv (m, type, inexact);
9771 mpfr_clear (m);
9772 if (result_lg)
9774 tree result_sg;
9776 /* Dereference the arg_sg pointer argument. */
9777 arg_sg = build_fold_indirect_ref (arg_sg);
9778 /* Assign the signgam value into *arg_sg. */
9779 result_sg = fold_build2 (MODIFY_EXPR,
9780 TREE_TYPE (arg_sg), arg_sg,
9781 build_int_cst (TREE_TYPE (arg_sg), sg));
9782 TREE_SIDE_EFFECTS (result_sg) = 1;
9783 /* Combine the signgam assignment with the lgamma result. */
9784 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9785 result_sg, result_lg));
9790 return result;
9793 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
9794 mpc function FUNC on it and return the resulting value as a tree
9795 with type TYPE. The mpfr precision is set to the precision of
9796 TYPE. We assume that function FUNC returns zero if the result
9797 could be calculated exactly within the requested precision. If
9798 DO_NONFINITE is true, then fold expressions containing Inf or NaN
9799 in the arguments and/or results. */
9801 tree
9802 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
9803 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
9805 tree result = NULL_TREE;
9807 STRIP_NOPS (arg0);
9808 STRIP_NOPS (arg1);
9810 /* To proceed, MPFR must exactly represent the target floating point
9811 format, which only happens when the target base equals two. */
9812 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
9813 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
9814 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
9815 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
9816 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
9818 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
9819 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
9820 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
9821 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
9823 if (do_nonfinite
9824 || (real_isfinite (re0) && real_isfinite (im0)
9825 && real_isfinite (re1) && real_isfinite (im1)))
9827 const struct real_format *const fmt =
9828 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
9829 const int prec = fmt->p;
9830 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
9831 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
9832 int inexact;
9833 mpc_t m0, m1;
9835 mpc_init2 (m0, prec);
9836 mpc_init2 (m1, prec);
9837 mpfr_from_real (mpc_realref (m0), re0, rnd);
9838 mpfr_from_real (mpc_imagref (m0), im0, rnd);
9839 mpfr_from_real (mpc_realref (m1), re1, rnd);
9840 mpfr_from_real (mpc_imagref (m1), im1, rnd);
9841 mpfr_clear_flags ();
9842 inexact = func (m0, m0, m1, crnd);
9843 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
9844 mpc_clear (m0);
9845 mpc_clear (m1);
9849 return result;
9852 /* A wrapper function for builtin folding that prevents warnings for
9853 "statement without effect" and the like, caused by removing the
9854 call node earlier than the warning is generated. */
9856 tree
9857 fold_call_stmt (gcall *stmt, bool ignore)
9859 tree ret = NULL_TREE;
9860 tree fndecl = gimple_call_fndecl (stmt);
9861 location_t loc = gimple_location (stmt);
9862 if (fndecl
9863 && TREE_CODE (fndecl) == FUNCTION_DECL
9864 && DECL_BUILT_IN (fndecl)
9865 && !gimple_call_va_arg_pack_p (stmt))
9867 int nargs = gimple_call_num_args (stmt);
9868 tree *args = (nargs > 0
9869 ? gimple_call_arg_ptr (stmt, 0)
9870 : &error_mark_node);
9872 if (avoid_folding_inline_builtin (fndecl))
9873 return NULL_TREE;
9874 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9876 return targetm.fold_builtin (fndecl, nargs, args, ignore);
9878 else
9880 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9881 if (ret)
9883 /* Propagate location information from original call to
9884 expansion of builtin. Otherwise things like
9885 maybe_emit_chk_warning, that operate on the expansion
9886 of a builtin, will use the wrong location information. */
9887 if (gimple_has_location (stmt))
9889 tree realret = ret;
9890 if (TREE_CODE (ret) == NOP_EXPR)
9891 realret = TREE_OPERAND (ret, 0);
9892 if (CAN_HAVE_LOCATION_P (realret)
9893 && !EXPR_HAS_LOCATION (realret))
9894 SET_EXPR_LOCATION (realret, loc);
9895 return realret;
9897 return ret;
9901 return NULL_TREE;
9904 /* Look up the function in builtin_decl that corresponds to DECL
9905 and set ASMSPEC as its user assembler name. DECL must be a
9906 function decl that declares a builtin. */
9908 void
9909 set_builtin_user_assembler_name (tree decl, const char *asmspec)
9911 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
9912 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
9913 && asmspec != 0);
9915 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
9916 set_user_assembler_name (builtin, asmspec);
9918 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
9919 && INT_TYPE_SIZE < BITS_PER_WORD)
9921 set_user_assembler_libfunc ("ffs", asmspec);
9922 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE, MODE_INT, 0),
9923 "ffs");
9927 /* Return true if DECL is a builtin that expands to a constant or similarly
9928 simple code. */
9929 bool
9930 is_simple_builtin (tree decl)
9932 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
9933 switch (DECL_FUNCTION_CODE (decl))
9935 /* Builtins that expand to constants. */
9936 case BUILT_IN_CONSTANT_P:
9937 case BUILT_IN_EXPECT:
9938 case BUILT_IN_OBJECT_SIZE:
9939 case BUILT_IN_UNREACHABLE:
9940 /* Simple register moves or loads from stack. */
9941 case BUILT_IN_ASSUME_ALIGNED:
9942 case BUILT_IN_RETURN_ADDRESS:
9943 case BUILT_IN_EXTRACT_RETURN_ADDR:
9944 case BUILT_IN_FROB_RETURN_ADDR:
9945 case BUILT_IN_RETURN:
9946 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
9947 case BUILT_IN_FRAME_ADDRESS:
9948 case BUILT_IN_VA_END:
9949 case BUILT_IN_STACK_SAVE:
9950 case BUILT_IN_STACK_RESTORE:
9951 /* Exception state returns or moves registers around. */
9952 case BUILT_IN_EH_FILTER:
9953 case BUILT_IN_EH_POINTER:
9954 case BUILT_IN_EH_COPY_VALUES:
9955 return true;
9957 default:
9958 return false;
9961 return false;
9964 /* Return true if DECL is a builtin that is not expensive, i.e., they are
9965 most probably expanded inline into reasonably simple code. This is a
9966 superset of is_simple_builtin. */
9967 bool
9968 is_inexpensive_builtin (tree decl)
9970 if (!decl)
9971 return false;
9972 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
9973 return true;
9974 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
9975 switch (DECL_FUNCTION_CODE (decl))
9977 case BUILT_IN_ABS:
9978 case BUILT_IN_ALLOCA:
9979 case BUILT_IN_ALLOCA_WITH_ALIGN:
9980 case BUILT_IN_BSWAP16:
9981 case BUILT_IN_BSWAP32:
9982 case BUILT_IN_BSWAP64:
9983 case BUILT_IN_CLZ:
9984 case BUILT_IN_CLZIMAX:
9985 case BUILT_IN_CLZL:
9986 case BUILT_IN_CLZLL:
9987 case BUILT_IN_CTZ:
9988 case BUILT_IN_CTZIMAX:
9989 case BUILT_IN_CTZL:
9990 case BUILT_IN_CTZLL:
9991 case BUILT_IN_FFS:
9992 case BUILT_IN_FFSIMAX:
9993 case BUILT_IN_FFSL:
9994 case BUILT_IN_FFSLL:
9995 case BUILT_IN_IMAXABS:
9996 case BUILT_IN_FINITE:
9997 case BUILT_IN_FINITEF:
9998 case BUILT_IN_FINITEL:
9999 case BUILT_IN_FINITED32:
10000 case BUILT_IN_FINITED64:
10001 case BUILT_IN_FINITED128:
10002 case BUILT_IN_FPCLASSIFY:
10003 case BUILT_IN_ISFINITE:
10004 case BUILT_IN_ISINF_SIGN:
10005 case BUILT_IN_ISINF:
10006 case BUILT_IN_ISINFF:
10007 case BUILT_IN_ISINFL:
10008 case BUILT_IN_ISINFD32:
10009 case BUILT_IN_ISINFD64:
10010 case BUILT_IN_ISINFD128:
10011 case BUILT_IN_ISNAN:
10012 case BUILT_IN_ISNANF:
10013 case BUILT_IN_ISNANL:
10014 case BUILT_IN_ISNAND32:
10015 case BUILT_IN_ISNAND64:
10016 case BUILT_IN_ISNAND128:
10017 case BUILT_IN_ISNORMAL:
10018 case BUILT_IN_ISGREATER:
10019 case BUILT_IN_ISGREATEREQUAL:
10020 case BUILT_IN_ISLESS:
10021 case BUILT_IN_ISLESSEQUAL:
10022 case BUILT_IN_ISLESSGREATER:
10023 case BUILT_IN_ISUNORDERED:
10024 case BUILT_IN_VA_ARG_PACK:
10025 case BUILT_IN_VA_ARG_PACK_LEN:
10026 case BUILT_IN_VA_COPY:
10027 case BUILT_IN_TRAP:
10028 case BUILT_IN_SAVEREGS:
10029 case BUILT_IN_POPCOUNTL:
10030 case BUILT_IN_POPCOUNTLL:
10031 case BUILT_IN_POPCOUNTIMAX:
10032 case BUILT_IN_POPCOUNT:
10033 case BUILT_IN_PARITYL:
10034 case BUILT_IN_PARITYLL:
10035 case BUILT_IN_PARITYIMAX:
10036 case BUILT_IN_PARITY:
10037 case BUILT_IN_LABS:
10038 case BUILT_IN_LLABS:
10039 case BUILT_IN_PREFETCH:
10040 case BUILT_IN_ACC_ON_DEVICE:
10041 return true;
10043 default:
10044 return is_simple_builtin (decl);
10047 return false;