* gcc-interface/trans.c (process_freeze_entity): Be prepared for a
[official-gcc.git] / gcc / builtins.c
blob6b25253950ce390609a8e9fa895bb01a22aa018a
1 /* Expand builtin functions.
2 Copyright (C) 1988-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "stor-layout.h"
47 #include "calls.h"
48 #include "varasm.h"
49 #include "tree-object-size.h"
50 #include "realmpfr.h"
51 #include "cfgrtl.h"
52 #include "except.h"
53 #include "dojump.h"
54 #include "explow.h"
55 #include "stmt.h"
56 #include "expr.h"
57 #include "libfuncs.h"
58 #include "output.h"
59 #include "typeclass.h"
60 #include "langhooks.h"
61 #include "value-prof.h"
62 #include "builtins.h"
63 #include "stringpool.h"
64 #include "attribs.h"
65 #include "asan.h"
66 #include "tree-chkp.h"
67 #include "rtl-chkp.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
73 struct target_builtins default_target_builtins;
74 #if SWITCHABLE_TARGET
75 struct target_builtins *this_target_builtins = &default_target_builtins;
76 #endif
78 /* Define the names of the builtin function types and codes. */
79 const char *const built_in_class_names[BUILT_IN_LAST]
80 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
82 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
83 const char * built_in_names[(int) END_BUILTINS] =
85 #include "builtins.def"
88 /* Setup an array of builtin_info_type, make sure each element decl is
89 initialized to NULL_TREE. */
90 builtin_info_type builtin_info[(int)END_BUILTINS];
92 /* Non-zero if __builtin_constant_p should be folded right away. */
93 bool force_folding_builtin_constant_p;
95 static rtx c_readstr (const char *, scalar_int_mode);
96 static int target_char_cast (tree, char *);
97 static rtx get_memory_rtx (tree, tree);
98 static int apply_args_size (void);
99 static int apply_result_size (void);
100 static rtx result_vector (int, rtx);
101 static void expand_builtin_prefetch (tree);
102 static rtx expand_builtin_apply_args (void);
103 static rtx expand_builtin_apply_args_1 (void);
104 static rtx expand_builtin_apply (rtx, rtx, rtx);
105 static void expand_builtin_return (rtx);
106 static enum type_class type_to_class (tree);
107 static rtx expand_builtin_classify_type (tree);
108 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
109 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
110 static rtx expand_builtin_interclass_mathfn (tree, rtx);
111 static rtx expand_builtin_sincos (tree);
112 static rtx expand_builtin_cexpi (tree, rtx);
113 static rtx expand_builtin_int_roundingfn (tree, rtx);
114 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
115 static rtx expand_builtin_next_arg (void);
116 static rtx expand_builtin_va_start (tree);
117 static rtx expand_builtin_va_end (tree);
118 static rtx expand_builtin_va_copy (tree);
119 static rtx expand_builtin_strcmp (tree, rtx);
120 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
121 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
122 static rtx expand_builtin_memchr (tree, rtx);
123 static rtx expand_builtin_memcpy (tree, rtx);
124 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
125 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
126 rtx target, tree exp, int endp);
127 static rtx expand_builtin_memmove (tree, rtx);
128 static rtx expand_builtin_mempcpy (tree, rtx);
129 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx);
130 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, int);
131 static rtx expand_builtin_strcat (tree, rtx);
132 static rtx expand_builtin_strcpy (tree, rtx);
133 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
134 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
135 static rtx expand_builtin_stpncpy (tree, rtx);
136 static rtx expand_builtin_strncat (tree, rtx);
137 static rtx expand_builtin_strncpy (tree, rtx);
138 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
139 static rtx expand_builtin_memset (tree, rtx, machine_mode);
140 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
141 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
142 static rtx expand_builtin_bzero (tree);
143 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
144 static rtx expand_builtin_alloca (tree);
145 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
146 static rtx expand_builtin_frame_address (tree, tree);
147 static tree stabilize_va_list_loc (location_t, tree, int);
148 static rtx expand_builtin_expect (tree, rtx);
149 static tree fold_builtin_constant_p (tree);
150 static tree fold_builtin_classify_type (tree);
151 static tree fold_builtin_strlen (location_t, tree, tree);
152 static tree fold_builtin_inf (location_t, tree, int);
153 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
154 static bool validate_arg (const_tree, enum tree_code code);
155 static rtx expand_builtin_fabs (tree, rtx, rtx);
156 static rtx expand_builtin_signbit (tree, rtx);
157 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
158 static tree fold_builtin_isascii (location_t, tree);
159 static tree fold_builtin_toascii (location_t, tree);
160 static tree fold_builtin_isdigit (location_t, tree);
161 static tree fold_builtin_fabs (location_t, tree, tree);
162 static tree fold_builtin_abs (location_t, tree, tree);
163 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
164 enum tree_code);
165 static tree fold_builtin_0 (location_t, tree);
166 static tree fold_builtin_1 (location_t, tree, tree);
167 static tree fold_builtin_2 (location_t, tree, tree, tree);
168 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
169 static tree fold_builtin_varargs (location_t, tree, tree*, int);
171 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
172 static tree fold_builtin_strspn (location_t, tree, tree);
173 static tree fold_builtin_strcspn (location_t, tree, tree);
175 static rtx expand_builtin_object_size (tree);
176 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
177 enum built_in_function);
178 static void maybe_emit_chk_warning (tree, enum built_in_function);
179 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
180 static void maybe_emit_free_warning (tree);
181 static tree fold_builtin_object_size (tree, tree);
183 unsigned HOST_WIDE_INT target_newline;
184 unsigned HOST_WIDE_INT target_percent;
185 static unsigned HOST_WIDE_INT target_c;
186 static unsigned HOST_WIDE_INT target_s;
187 char target_percent_c[3];
188 char target_percent_s[3];
189 char target_percent_s_newline[4];
190 static tree do_mpfr_remquo (tree, tree, tree);
191 static tree do_mpfr_lgamma_r (tree, tree, tree);
192 static void expand_builtin_sync_synchronize (void);
194 /* Return true if NAME starts with __builtin_ or __sync_. */
196 static bool
197 is_builtin_name (const char *name)
199 if (strncmp (name, "__builtin_", 10) == 0)
200 return true;
201 if (strncmp (name, "__sync_", 7) == 0)
202 return true;
203 if (strncmp (name, "__atomic_", 9) == 0)
204 return true;
205 return false;
209 /* Return true if DECL is a function symbol representing a built-in. */
211 bool
212 is_builtin_fn (tree decl)
214 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
217 /* Return true if NODE should be considered for inline expansion regardless
218 of the optimization level. This means whenever a function is invoked with
219 its "internal" name, which normally contains the prefix "__builtin". */
221 bool
222 called_as_built_in (tree node)
224 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
225 we want the name used to call the function, not the name it
226 will have. */
227 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
228 return is_builtin_name (name);
231 /* Compute values M and N such that M divides (address of EXP - N) and such
232 that N < M. If these numbers can be determined, store M in alignp and N in
233 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
234 *alignp and any bit-offset to *bitposp.
236 Note that the address (and thus the alignment) computed here is based
237 on the address to which a symbol resolves, whereas DECL_ALIGN is based
238 on the address at which an object is actually located. These two
239 addresses are not always the same. For example, on ARM targets,
240 the address &foo of a Thumb function foo() has the lowest bit set,
241 whereas foo() itself starts on an even address.
243 If ADDR_P is true we are taking the address of the memory reference EXP
244 and thus cannot rely on the access taking place. */
246 static bool
247 get_object_alignment_2 (tree exp, unsigned int *alignp,
248 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
250 HOST_WIDE_INT bitsize, bitpos;
251 tree offset;
252 machine_mode mode;
253 int unsignedp, reversep, volatilep;
254 unsigned int align = BITS_PER_UNIT;
255 bool known_alignment = false;
257 /* Get the innermost object and the constant (bitpos) and possibly
258 variable (offset) offset of the access. */
259 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
260 &unsignedp, &reversep, &volatilep);
262 /* Extract alignment information from the innermost object and
263 possibly adjust bitpos and offset. */
264 if (TREE_CODE (exp) == FUNCTION_DECL)
266 /* Function addresses can encode extra information besides their
267 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
268 allows the low bit to be used as a virtual bit, we know
269 that the address itself must be at least 2-byte aligned. */
270 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
271 align = 2 * BITS_PER_UNIT;
273 else if (TREE_CODE (exp) == LABEL_DECL)
275 else if (TREE_CODE (exp) == CONST_DECL)
277 /* The alignment of a CONST_DECL is determined by its initializer. */
278 exp = DECL_INITIAL (exp);
279 align = TYPE_ALIGN (TREE_TYPE (exp));
280 if (CONSTANT_CLASS_P (exp))
281 align = targetm.constant_alignment (exp, align);
283 known_alignment = true;
285 else if (DECL_P (exp))
287 align = DECL_ALIGN (exp);
288 known_alignment = true;
290 else if (TREE_CODE (exp) == INDIRECT_REF
291 || TREE_CODE (exp) == MEM_REF
292 || TREE_CODE (exp) == TARGET_MEM_REF)
294 tree addr = TREE_OPERAND (exp, 0);
295 unsigned ptr_align;
296 unsigned HOST_WIDE_INT ptr_bitpos;
297 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
299 /* If the address is explicitely aligned, handle that. */
300 if (TREE_CODE (addr) == BIT_AND_EXPR
301 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
303 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
304 ptr_bitmask *= BITS_PER_UNIT;
305 align = least_bit_hwi (ptr_bitmask);
306 addr = TREE_OPERAND (addr, 0);
309 known_alignment
310 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
311 align = MAX (ptr_align, align);
313 /* Re-apply explicit alignment to the bitpos. */
314 ptr_bitpos &= ptr_bitmask;
316 /* The alignment of the pointer operand in a TARGET_MEM_REF
317 has to take the variable offset parts into account. */
318 if (TREE_CODE (exp) == TARGET_MEM_REF)
320 if (TMR_INDEX (exp))
322 unsigned HOST_WIDE_INT step = 1;
323 if (TMR_STEP (exp))
324 step = TREE_INT_CST_LOW (TMR_STEP (exp));
325 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
327 if (TMR_INDEX2 (exp))
328 align = BITS_PER_UNIT;
329 known_alignment = false;
332 /* When EXP is an actual memory reference then we can use
333 TYPE_ALIGN of a pointer indirection to derive alignment.
334 Do so only if get_pointer_alignment_1 did not reveal absolute
335 alignment knowledge and if using that alignment would
336 improve the situation. */
337 unsigned int talign;
338 if (!addr_p && !known_alignment
339 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
340 && talign > align)
341 align = talign;
342 else
344 /* Else adjust bitpos accordingly. */
345 bitpos += ptr_bitpos;
346 if (TREE_CODE (exp) == MEM_REF
347 || TREE_CODE (exp) == TARGET_MEM_REF)
348 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
351 else if (TREE_CODE (exp) == STRING_CST)
353 /* STRING_CST are the only constant objects we allow to be not
354 wrapped inside a CONST_DECL. */
355 align = TYPE_ALIGN (TREE_TYPE (exp));
356 if (CONSTANT_CLASS_P (exp))
357 align = targetm.constant_alignment (exp, align);
359 known_alignment = true;
362 /* If there is a non-constant offset part extract the maximum
363 alignment that can prevail. */
364 if (offset)
366 unsigned int trailing_zeros = tree_ctz (offset);
367 if (trailing_zeros < HOST_BITS_PER_INT)
369 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
370 if (inner)
371 align = MIN (align, inner);
375 *alignp = align;
376 *bitposp = bitpos & (*alignp - 1);
377 return known_alignment;
380 /* For a memory reference expression EXP compute values M and N such that M
381 divides (&EXP - N) and such that N < M. If these numbers can be determined,
382 store M in alignp and N in *BITPOSP and return true. Otherwise return false
383 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
385 bool
386 get_object_alignment_1 (tree exp, unsigned int *alignp,
387 unsigned HOST_WIDE_INT *bitposp)
389 return get_object_alignment_2 (exp, alignp, bitposp, false);
392 /* Return the alignment in bits of EXP, an object. */
394 unsigned int
395 get_object_alignment (tree exp)
397 unsigned HOST_WIDE_INT bitpos = 0;
398 unsigned int align;
400 get_object_alignment_1 (exp, &align, &bitpos);
402 /* align and bitpos now specify known low bits of the pointer.
403 ptr & (align - 1) == bitpos. */
405 if (bitpos != 0)
406 align = least_bit_hwi (bitpos);
407 return align;
410 /* For a pointer valued expression EXP compute values M and N such that M
411 divides (EXP - N) and such that N < M. If these numbers can be determined,
412 store M in alignp and N in *BITPOSP and return true. Return false if
413 the results are just a conservative approximation.
415 If EXP is not a pointer, false is returned too. */
417 bool
418 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
419 unsigned HOST_WIDE_INT *bitposp)
421 STRIP_NOPS (exp);
423 if (TREE_CODE (exp) == ADDR_EXPR)
424 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
425 alignp, bitposp, true);
426 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
428 unsigned int align;
429 unsigned HOST_WIDE_INT bitpos;
430 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
431 &align, &bitpos);
432 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
433 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
434 else
436 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
437 if (trailing_zeros < HOST_BITS_PER_INT)
439 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
440 if (inner)
441 align = MIN (align, inner);
444 *alignp = align;
445 *bitposp = bitpos & (align - 1);
446 return res;
448 else if (TREE_CODE (exp) == SSA_NAME
449 && POINTER_TYPE_P (TREE_TYPE (exp)))
451 unsigned int ptr_align, ptr_misalign;
452 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
454 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
456 *bitposp = ptr_misalign * BITS_PER_UNIT;
457 *alignp = ptr_align * BITS_PER_UNIT;
458 /* Make sure to return a sensible alignment when the multiplication
459 by BITS_PER_UNIT overflowed. */
460 if (*alignp == 0)
461 *alignp = 1u << (HOST_BITS_PER_INT - 1);
462 /* We cannot really tell whether this result is an approximation. */
463 return false;
465 else
467 *bitposp = 0;
468 *alignp = BITS_PER_UNIT;
469 return false;
472 else if (TREE_CODE (exp) == INTEGER_CST)
474 *alignp = BIGGEST_ALIGNMENT;
475 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
476 & (BIGGEST_ALIGNMENT - 1));
477 return true;
480 *bitposp = 0;
481 *alignp = BITS_PER_UNIT;
482 return false;
485 /* Return the alignment in bits of EXP, a pointer valued expression.
486 The alignment returned is, by default, the alignment of the thing that
487 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
489 Otherwise, look at the expression to see if we can do better, i.e., if the
490 expression is actually pointing at an object whose alignment is tighter. */
492 unsigned int
493 get_pointer_alignment (tree exp)
495 unsigned HOST_WIDE_INT bitpos = 0;
496 unsigned int align;
498 get_pointer_alignment_1 (exp, &align, &bitpos);
500 /* align and bitpos now specify known low bits of the pointer.
501 ptr & (align - 1) == bitpos. */
503 if (bitpos != 0)
504 align = least_bit_hwi (bitpos);
506 return align;
509 /* Return the number of non-zero elements in the sequence
510 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
511 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
513 static unsigned
514 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
516 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
518 unsigned n;
520 if (eltsize == 1)
522 /* Optimize the common case of plain char. */
523 for (n = 0; n < maxelts; n++)
525 const char *elt = (const char*) ptr + n;
526 if (!*elt)
527 break;
530 else
532 for (n = 0; n < maxelts; n++)
534 const char *elt = (const char*) ptr + n * eltsize;
535 if (!memcmp (elt, "\0\0\0\0", eltsize))
536 break;
539 return n;
542 /* Compute the length of a null-terminated character string or wide
543 character string handling character sizes of 1, 2, and 4 bytes.
544 TREE_STRING_LENGTH is not the right way because it evaluates to
545 the size of the character array in bytes (as opposed to characters)
546 and because it can contain a zero byte in the middle.
548 ONLY_VALUE should be nonzero if the result is not going to be emitted
549 into the instruction stream and zero if it is going to be expanded.
550 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
551 is returned, otherwise NULL, since
552 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
553 evaluate the side-effects.
555 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
556 accesses. Note that this implies the result is not going to be emitted
557 into the instruction stream.
559 The value returned is of type `ssizetype'.
561 Unfortunately, string_constant can't access the values of const char
562 arrays with initializers, so neither can we do so here. */
564 tree
565 c_strlen (tree src, int only_value)
567 STRIP_NOPS (src);
568 if (TREE_CODE (src) == COND_EXPR
569 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
571 tree len1, len2;
573 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
574 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
575 if (tree_int_cst_equal (len1, len2))
576 return len1;
579 if (TREE_CODE (src) == COMPOUND_EXPR
580 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
581 return c_strlen (TREE_OPERAND (src, 1), only_value);
583 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
585 /* Offset from the beginning of the string in bytes. */
586 tree byteoff;
587 src = string_constant (src, &byteoff);
588 if (src == 0)
589 return NULL_TREE;
591 /* Determine the size of the string element. */
592 unsigned eltsize
593 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src))));
595 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
596 length of SRC. */
597 unsigned maxelts = TREE_STRING_LENGTH (src) / eltsize - 1;
599 /* PTR can point to the byte representation of any string type, including
600 char* and wchar_t*. */
601 const char *ptr = TREE_STRING_POINTER (src);
603 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
605 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
606 compute the offset to the following null if we don't know where to
607 start searching for it. */
608 if (string_length (ptr, eltsize, maxelts) < maxelts)
610 /* Return when an embedded null character is found. */
611 return NULL_TREE;
614 /* We don't know the starting offset, but we do know that the string
615 has no internal zero bytes. We can assume that the offset falls
616 within the bounds of the string; otherwise, the programmer deserves
617 what he gets. Subtract the offset from the length of the string,
618 and return that. This would perhaps not be valid if we were dealing
619 with named arrays in addition to literal string constants. */
621 return size_diffop_loc (loc, size_int (maxelts * eltsize), byteoff);
624 /* Offset from the beginning of the string in elements. */
625 HOST_WIDE_INT eltoff;
627 /* We have a known offset into the string. Start searching there for
628 a null character if we can represent it as a single HOST_WIDE_INT. */
629 if (byteoff == 0)
630 eltoff = 0;
631 else if (! tree_fits_shwi_p (byteoff))
632 eltoff = -1;
633 else
634 eltoff = tree_to_shwi (byteoff) / eltsize;
636 /* If the offset is known to be out of bounds, warn, and call strlen at
637 runtime. */
638 if (eltoff < 0 || eltoff > maxelts)
640 /* Suppress multiple warnings for propagated constant strings. */
641 if (only_value != 2
642 && !TREE_NO_WARNING (src))
644 warning_at (loc, 0, "offset %qwi outside bounds of constant string",
645 eltoff);
646 TREE_NO_WARNING (src) = 1;
648 return NULL_TREE;
651 /* Use strlen to search for the first zero byte. Since any strings
652 constructed with build_string will have nulls appended, we win even
653 if we get handed something like (char[4])"abcd".
655 Since ELTOFF is our starting index into the string, no further
656 calculation is needed. */
657 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
658 maxelts - eltoff);
660 return ssize_int (len);
663 /* Return a constant integer corresponding to target reading
664 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
666 static rtx
667 c_readstr (const char *str, scalar_int_mode mode)
669 HOST_WIDE_INT ch;
670 unsigned int i, j;
671 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
673 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
674 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
675 / HOST_BITS_PER_WIDE_INT;
677 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
678 for (i = 0; i < len; i++)
679 tmp[i] = 0;
681 ch = 1;
682 for (i = 0; i < GET_MODE_SIZE (mode); i++)
684 j = i;
685 if (WORDS_BIG_ENDIAN)
686 j = GET_MODE_SIZE (mode) - i - 1;
687 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
688 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
689 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
690 j *= BITS_PER_UNIT;
692 if (ch)
693 ch = (unsigned char) str[i];
694 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
697 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
698 return immed_wide_int_const (c, mode);
701 /* Cast a target constant CST to target CHAR and if that value fits into
702 host char type, return zero and put that value into variable pointed to by
703 P. */
705 static int
706 target_char_cast (tree cst, char *p)
708 unsigned HOST_WIDE_INT val, hostval;
710 if (TREE_CODE (cst) != INTEGER_CST
711 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
712 return 1;
714 /* Do not care if it fits or not right here. */
715 val = TREE_INT_CST_LOW (cst);
717 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
718 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
720 hostval = val;
721 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
722 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
724 if (val != hostval)
725 return 1;
727 *p = hostval;
728 return 0;
731 /* Similar to save_expr, but assumes that arbitrary code is not executed
732 in between the multiple evaluations. In particular, we assume that a
733 non-addressable local variable will not be modified. */
735 static tree
736 builtin_save_expr (tree exp)
738 if (TREE_CODE (exp) == SSA_NAME
739 || (TREE_ADDRESSABLE (exp) == 0
740 && (TREE_CODE (exp) == PARM_DECL
741 || (VAR_P (exp) && !TREE_STATIC (exp)))))
742 return exp;
744 return save_expr (exp);
747 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
748 times to get the address of either a higher stack frame, or a return
749 address located within it (depending on FNDECL_CODE). */
751 static rtx
752 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
754 int i;
755 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
756 if (tem == NULL_RTX)
758 /* For a zero count with __builtin_return_address, we don't care what
759 frame address we return, because target-specific definitions will
760 override us. Therefore frame pointer elimination is OK, and using
761 the soft frame pointer is OK.
763 For a nonzero count, or a zero count with __builtin_frame_address,
764 we require a stable offset from the current frame pointer to the
765 previous one, so we must use the hard frame pointer, and
766 we must disable frame pointer elimination. */
767 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
768 tem = frame_pointer_rtx;
769 else
771 tem = hard_frame_pointer_rtx;
773 /* Tell reload not to eliminate the frame pointer. */
774 crtl->accesses_prior_frames = 1;
778 if (count > 0)
779 SETUP_FRAME_ADDRESSES ();
781 /* On the SPARC, the return address is not in the frame, it is in a
782 register. There is no way to access it off of the current frame
783 pointer, but it can be accessed off the previous frame pointer by
784 reading the value from the register window save area. */
785 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
786 count--;
788 /* Scan back COUNT frames to the specified frame. */
789 for (i = 0; i < count; i++)
791 /* Assume the dynamic chain pointer is in the word that the
792 frame address points to, unless otherwise specified. */
793 tem = DYNAMIC_CHAIN_ADDRESS (tem);
794 tem = memory_address (Pmode, tem);
795 tem = gen_frame_mem (Pmode, tem);
796 tem = copy_to_reg (tem);
799 /* For __builtin_frame_address, return what we've got. But, on
800 the SPARC for example, we may have to add a bias. */
801 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
802 return FRAME_ADDR_RTX (tem);
804 /* For __builtin_return_address, get the return address from that frame. */
805 #ifdef RETURN_ADDR_RTX
806 tem = RETURN_ADDR_RTX (count, tem);
807 #else
808 tem = memory_address (Pmode,
809 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
810 tem = gen_frame_mem (Pmode, tem);
811 #endif
812 return tem;
815 /* Alias set used for setjmp buffer. */
816 static alias_set_type setjmp_alias_set = -1;
818 /* Construct the leading half of a __builtin_setjmp call. Control will
819 return to RECEIVER_LABEL. This is also called directly by the SJLJ
820 exception handling code. */
822 void
823 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
825 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
826 rtx stack_save;
827 rtx mem;
829 if (setjmp_alias_set == -1)
830 setjmp_alias_set = new_alias_set ();
832 buf_addr = convert_memory_address (Pmode, buf_addr);
834 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
836 /* We store the frame pointer and the address of receiver_label in
837 the buffer and use the rest of it for the stack save area, which
838 is machine-dependent. */
840 mem = gen_rtx_MEM (Pmode, buf_addr);
841 set_mem_alias_set (mem, setjmp_alias_set);
842 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
844 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
845 GET_MODE_SIZE (Pmode))),
846 set_mem_alias_set (mem, setjmp_alias_set);
848 emit_move_insn (validize_mem (mem),
849 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
851 stack_save = gen_rtx_MEM (sa_mode,
852 plus_constant (Pmode, buf_addr,
853 2 * GET_MODE_SIZE (Pmode)));
854 set_mem_alias_set (stack_save, setjmp_alias_set);
855 emit_stack_save (SAVE_NONLOCAL, &stack_save);
857 /* If there is further processing to do, do it. */
858 if (targetm.have_builtin_setjmp_setup ())
859 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
861 /* We have a nonlocal label. */
862 cfun->has_nonlocal_label = 1;
865 /* Construct the trailing part of a __builtin_setjmp call. This is
866 also called directly by the SJLJ exception handling code.
867 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
869 void
870 expand_builtin_setjmp_receiver (rtx receiver_label)
872 rtx chain;
874 /* Mark the FP as used when we get here, so we have to make sure it's
875 marked as used by this function. */
876 emit_use (hard_frame_pointer_rtx);
878 /* Mark the static chain as clobbered here so life information
879 doesn't get messed up for it. */
880 chain = targetm.calls.static_chain (current_function_decl, true);
881 if (chain && REG_P (chain))
882 emit_clobber (chain);
884 /* Now put in the code to restore the frame pointer, and argument
885 pointer, if needed. */
886 if (! targetm.have_nonlocal_goto ())
888 /* First adjust our frame pointer to its actual value. It was
889 previously set to the start of the virtual area corresponding to
890 the stacked variables when we branched here and now needs to be
891 adjusted to the actual hardware fp value.
893 Assignments to virtual registers are converted by
894 instantiate_virtual_regs into the corresponding assignment
895 to the underlying register (fp in this case) that makes
896 the original assignment true.
897 So the following insn will actually be decrementing fp by
898 TARGET_STARTING_FRAME_OFFSET. */
899 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
901 /* Restoring the frame pointer also modifies the hard frame pointer.
902 Mark it used (so that the previous assignment remains live once
903 the frame pointer is eliminated) and clobbered (to represent the
904 implicit update from the assignment). */
905 emit_use (hard_frame_pointer_rtx);
906 emit_clobber (hard_frame_pointer_rtx);
909 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
911 /* If the argument pointer can be eliminated in favor of the
912 frame pointer, we don't need to restore it. We assume here
913 that if such an elimination is present, it can always be used.
914 This is the case on all known machines; if we don't make this
915 assumption, we do unnecessary saving on many machines. */
916 size_t i;
917 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
919 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
920 if (elim_regs[i].from == ARG_POINTER_REGNUM
921 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
922 break;
924 if (i == ARRAY_SIZE (elim_regs))
926 /* Now restore our arg pointer from the address at which it
927 was saved in our stack frame. */
928 emit_move_insn (crtl->args.internal_arg_pointer,
929 copy_to_reg (get_arg_pointer_save_area ()));
933 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
934 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
935 else if (targetm.have_nonlocal_goto_receiver ())
936 emit_insn (targetm.gen_nonlocal_goto_receiver ());
937 else
938 { /* Nothing */ }
940 /* We must not allow the code we just generated to be reordered by
941 scheduling. Specifically, the update of the frame pointer must
942 happen immediately, not later. */
943 emit_insn (gen_blockage ());
946 /* __builtin_longjmp is passed a pointer to an array of five words (not
947 all will be used on all machines). It operates similarly to the C
948 library function of the same name, but is more efficient. Much of
949 the code below is copied from the handling of non-local gotos. */
951 static void
952 expand_builtin_longjmp (rtx buf_addr, rtx value)
954 rtx fp, lab, stack;
955 rtx_insn *insn, *last;
956 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
958 /* DRAP is needed for stack realign if longjmp is expanded to current
959 function */
960 if (SUPPORTS_STACK_ALIGNMENT)
961 crtl->need_drap = true;
963 if (setjmp_alias_set == -1)
964 setjmp_alias_set = new_alias_set ();
966 buf_addr = convert_memory_address (Pmode, buf_addr);
968 buf_addr = force_reg (Pmode, buf_addr);
970 /* We require that the user must pass a second argument of 1, because
971 that is what builtin_setjmp will return. */
972 gcc_assert (value == const1_rtx);
974 last = get_last_insn ();
975 if (targetm.have_builtin_longjmp ())
976 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
977 else
979 fp = gen_rtx_MEM (Pmode, buf_addr);
980 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
981 GET_MODE_SIZE (Pmode)));
983 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
984 2 * GET_MODE_SIZE (Pmode)));
985 set_mem_alias_set (fp, setjmp_alias_set);
986 set_mem_alias_set (lab, setjmp_alias_set);
987 set_mem_alias_set (stack, setjmp_alias_set);
989 /* Pick up FP, label, and SP from the block and jump. This code is
990 from expand_goto in stmt.c; see there for detailed comments. */
991 if (targetm.have_nonlocal_goto ())
992 /* We have to pass a value to the nonlocal_goto pattern that will
993 get copied into the static_chain pointer, but it does not matter
994 what that value is, because builtin_setjmp does not use it. */
995 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
996 else
998 lab = copy_to_reg (lab);
1000 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1001 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1003 emit_move_insn (hard_frame_pointer_rtx, fp);
1004 emit_stack_restore (SAVE_NONLOCAL, stack);
1006 emit_use (hard_frame_pointer_rtx);
1007 emit_use (stack_pointer_rtx);
1008 emit_indirect_jump (lab);
1012 /* Search backwards and mark the jump insn as a non-local goto.
1013 Note that this precludes the use of __builtin_longjmp to a
1014 __builtin_setjmp target in the same function. However, we've
1015 already cautioned the user that these functions are for
1016 internal exception handling use only. */
1017 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1019 gcc_assert (insn != last);
1021 if (JUMP_P (insn))
1023 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1024 break;
1026 else if (CALL_P (insn))
1027 break;
1031 static inline bool
1032 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1034 return (iter->i < iter->n);
1037 /* This function validates the types of a function call argument list
1038 against a specified list of tree_codes. If the last specifier is a 0,
1039 that represents an ellipsis, otherwise the last specifier must be a
1040 VOID_TYPE. */
1042 static bool
1043 validate_arglist (const_tree callexpr, ...)
1045 enum tree_code code;
1046 bool res = 0;
1047 va_list ap;
1048 const_call_expr_arg_iterator iter;
1049 const_tree arg;
1051 va_start (ap, callexpr);
1052 init_const_call_expr_arg_iterator (callexpr, &iter);
1054 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1055 tree fn = CALL_EXPR_FN (callexpr);
1056 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1058 for (unsigned argno = 1; ; ++argno)
1060 code = (enum tree_code) va_arg (ap, int);
1062 switch (code)
1064 case 0:
1065 /* This signifies an ellipses, any further arguments are all ok. */
1066 res = true;
1067 goto end;
1068 case VOID_TYPE:
1069 /* This signifies an endlink, if no arguments remain, return
1070 true, otherwise return false. */
1071 res = !more_const_call_expr_args_p (&iter);
1072 goto end;
1073 case POINTER_TYPE:
1074 /* The actual argument must be nonnull when either the whole
1075 called function has been declared nonnull, or when the formal
1076 argument corresponding to the actual argument has been. */
1077 if (argmap
1078 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1080 arg = next_const_call_expr_arg (&iter);
1081 if (!validate_arg (arg, code) || integer_zerop (arg))
1082 goto end;
1083 break;
1085 /* FALLTHRU */
1086 default:
1087 /* If no parameters remain or the parameter's code does not
1088 match the specified code, return false. Otherwise continue
1089 checking any remaining arguments. */
1090 arg = next_const_call_expr_arg (&iter);
1091 if (!validate_arg (arg, code))
1092 goto end;
1093 break;
1097 /* We need gotos here since we can only have one VA_CLOSE in a
1098 function. */
1099 end: ;
1100 va_end (ap);
1102 BITMAP_FREE (argmap);
1104 return res;
1107 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1108 and the address of the save area. */
1110 static rtx
1111 expand_builtin_nonlocal_goto (tree exp)
1113 tree t_label, t_save_area;
1114 rtx r_label, r_save_area, r_fp, r_sp;
1115 rtx_insn *insn;
1117 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1118 return NULL_RTX;
1120 t_label = CALL_EXPR_ARG (exp, 0);
1121 t_save_area = CALL_EXPR_ARG (exp, 1);
1123 r_label = expand_normal (t_label);
1124 r_label = convert_memory_address (Pmode, r_label);
1125 r_save_area = expand_normal (t_save_area);
1126 r_save_area = convert_memory_address (Pmode, r_save_area);
1127 /* Copy the address of the save location to a register just in case it was
1128 based on the frame pointer. */
1129 r_save_area = copy_to_reg (r_save_area);
1130 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1131 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1132 plus_constant (Pmode, r_save_area,
1133 GET_MODE_SIZE (Pmode)));
1135 crtl->has_nonlocal_goto = 1;
1137 /* ??? We no longer need to pass the static chain value, afaik. */
1138 if (targetm.have_nonlocal_goto ())
1139 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1140 else
1142 r_label = copy_to_reg (r_label);
1144 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1145 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1147 /* Restore frame pointer for containing function. */
1148 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1149 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1151 /* USE of hard_frame_pointer_rtx added for consistency;
1152 not clear if really needed. */
1153 emit_use (hard_frame_pointer_rtx);
1154 emit_use (stack_pointer_rtx);
1156 /* If the architecture is using a GP register, we must
1157 conservatively assume that the target function makes use of it.
1158 The prologue of functions with nonlocal gotos must therefore
1159 initialize the GP register to the appropriate value, and we
1160 must then make sure that this value is live at the point
1161 of the jump. (Note that this doesn't necessarily apply
1162 to targets with a nonlocal_goto pattern; they are free
1163 to implement it in their own way. Note also that this is
1164 a no-op if the GP register is a global invariant.) */
1165 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1166 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1167 emit_use (pic_offset_table_rtx);
1169 emit_indirect_jump (r_label);
1172 /* Search backwards to the jump insn and mark it as a
1173 non-local goto. */
1174 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1176 if (JUMP_P (insn))
1178 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1179 break;
1181 else if (CALL_P (insn))
1182 break;
1185 return const0_rtx;
1188 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1189 (not all will be used on all machines) that was passed to __builtin_setjmp.
1190 It updates the stack pointer in that block to the current value. This is
1191 also called directly by the SJLJ exception handling code. */
1193 void
1194 expand_builtin_update_setjmp_buf (rtx buf_addr)
1196 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1197 buf_addr = convert_memory_address (Pmode, buf_addr);
1198 rtx stack_save
1199 = gen_rtx_MEM (sa_mode,
1200 memory_address
1201 (sa_mode,
1202 plus_constant (Pmode, buf_addr,
1203 2 * GET_MODE_SIZE (Pmode))));
1205 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1208 /* Expand a call to __builtin_prefetch. For a target that does not support
1209 data prefetch, evaluate the memory address argument in case it has side
1210 effects. */
1212 static void
1213 expand_builtin_prefetch (tree exp)
1215 tree arg0, arg1, arg2;
1216 int nargs;
1217 rtx op0, op1, op2;
1219 if (!validate_arglist (exp, POINTER_TYPE, 0))
1220 return;
1222 arg0 = CALL_EXPR_ARG (exp, 0);
1224 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1225 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1226 locality). */
1227 nargs = call_expr_nargs (exp);
1228 if (nargs > 1)
1229 arg1 = CALL_EXPR_ARG (exp, 1);
1230 else
1231 arg1 = integer_zero_node;
1232 if (nargs > 2)
1233 arg2 = CALL_EXPR_ARG (exp, 2);
1234 else
1235 arg2 = integer_three_node;
1237 /* Argument 0 is an address. */
1238 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1240 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1241 if (TREE_CODE (arg1) != INTEGER_CST)
1243 error ("second argument to %<__builtin_prefetch%> must be a constant");
1244 arg1 = integer_zero_node;
1246 op1 = expand_normal (arg1);
1247 /* Argument 1 must be either zero or one. */
1248 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1250 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1251 " using zero");
1252 op1 = const0_rtx;
1255 /* Argument 2 (locality) must be a compile-time constant int. */
1256 if (TREE_CODE (arg2) != INTEGER_CST)
1258 error ("third argument to %<__builtin_prefetch%> must be a constant");
1259 arg2 = integer_zero_node;
1261 op2 = expand_normal (arg2);
1262 /* Argument 2 must be 0, 1, 2, or 3. */
1263 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1265 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1266 op2 = const0_rtx;
1269 if (targetm.have_prefetch ())
1271 struct expand_operand ops[3];
1273 create_address_operand (&ops[0], op0);
1274 create_integer_operand (&ops[1], INTVAL (op1));
1275 create_integer_operand (&ops[2], INTVAL (op2));
1276 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1277 return;
1280 /* Don't do anything with direct references to volatile memory, but
1281 generate code to handle other side effects. */
1282 if (!MEM_P (op0) && side_effects_p (op0))
1283 emit_insn (op0);
1286 /* Get a MEM rtx for expression EXP which is the address of an operand
1287 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1288 the maximum length of the block of memory that might be accessed or
1289 NULL if unknown. */
1291 static rtx
1292 get_memory_rtx (tree exp, tree len)
1294 tree orig_exp = exp;
1295 rtx addr, mem;
1297 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1298 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1299 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1300 exp = TREE_OPERAND (exp, 0);
1302 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1303 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1305 /* Get an expression we can use to find the attributes to assign to MEM.
1306 First remove any nops. */
1307 while (CONVERT_EXPR_P (exp)
1308 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1309 exp = TREE_OPERAND (exp, 0);
1311 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1312 (as builtin stringops may alias with anything). */
1313 exp = fold_build2 (MEM_REF,
1314 build_array_type (char_type_node,
1315 build_range_type (sizetype,
1316 size_one_node, len)),
1317 exp, build_int_cst (ptr_type_node, 0));
1319 /* If the MEM_REF has no acceptable address, try to get the base object
1320 from the original address we got, and build an all-aliasing
1321 unknown-sized access to that one. */
1322 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1323 set_mem_attributes (mem, exp, 0);
1324 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1325 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1326 0))))
1328 exp = build_fold_addr_expr (exp);
1329 exp = fold_build2 (MEM_REF,
1330 build_array_type (char_type_node,
1331 build_range_type (sizetype,
1332 size_zero_node,
1333 NULL)),
1334 exp, build_int_cst (ptr_type_node, 0));
1335 set_mem_attributes (mem, exp, 0);
1337 set_mem_alias_set (mem, 0);
1338 return mem;
1341 /* Built-in functions to perform an untyped call and return. */
1343 #define apply_args_mode \
1344 (this_target_builtins->x_apply_args_mode)
1345 #define apply_result_mode \
1346 (this_target_builtins->x_apply_result_mode)
1348 /* Return the size required for the block returned by __builtin_apply_args,
1349 and initialize apply_args_mode. */
1351 static int
1352 apply_args_size (void)
1354 static int size = -1;
1355 int align;
1356 unsigned int regno;
1357 machine_mode mode;
1359 /* The values computed by this function never change. */
1360 if (size < 0)
1362 /* The first value is the incoming arg-pointer. */
1363 size = GET_MODE_SIZE (Pmode);
1365 /* The second value is the structure value address unless this is
1366 passed as an "invisible" first argument. */
1367 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1368 size += GET_MODE_SIZE (Pmode);
1370 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1371 if (FUNCTION_ARG_REGNO_P (regno))
1373 mode = targetm.calls.get_raw_arg_mode (regno);
1375 gcc_assert (mode != VOIDmode);
1377 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1378 if (size % align != 0)
1379 size = CEIL (size, align) * align;
1380 size += GET_MODE_SIZE (mode);
1381 apply_args_mode[regno] = mode;
1383 else
1385 apply_args_mode[regno] = VOIDmode;
1388 return size;
1391 /* Return the size required for the block returned by __builtin_apply,
1392 and initialize apply_result_mode. */
1394 static int
1395 apply_result_size (void)
1397 static int size = -1;
1398 int align, regno;
1399 machine_mode mode;
1401 /* The values computed by this function never change. */
1402 if (size < 0)
1404 size = 0;
1406 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1407 if (targetm.calls.function_value_regno_p (regno))
1409 mode = targetm.calls.get_raw_result_mode (regno);
1411 gcc_assert (mode != VOIDmode);
1413 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1414 if (size % align != 0)
1415 size = CEIL (size, align) * align;
1416 size += GET_MODE_SIZE (mode);
1417 apply_result_mode[regno] = mode;
1419 else
1420 apply_result_mode[regno] = VOIDmode;
1422 /* Allow targets that use untyped_call and untyped_return to override
1423 the size so that machine-specific information can be stored here. */
1424 #ifdef APPLY_RESULT_SIZE
1425 size = APPLY_RESULT_SIZE;
1426 #endif
1428 return size;
1431 /* Create a vector describing the result block RESULT. If SAVEP is true,
1432 the result block is used to save the values; otherwise it is used to
1433 restore the values. */
1435 static rtx
1436 result_vector (int savep, rtx result)
1438 int regno, size, align, nelts;
1439 machine_mode mode;
1440 rtx reg, mem;
1441 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1443 size = nelts = 0;
1444 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1445 if ((mode = apply_result_mode[regno]) != VOIDmode)
1447 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1448 if (size % align != 0)
1449 size = CEIL (size, align) * align;
1450 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1451 mem = adjust_address (result, mode, size);
1452 savevec[nelts++] = (savep
1453 ? gen_rtx_SET (mem, reg)
1454 : gen_rtx_SET (reg, mem));
1455 size += GET_MODE_SIZE (mode);
1457 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1460 /* Save the state required to perform an untyped call with the same
1461 arguments as were passed to the current function. */
1463 static rtx
1464 expand_builtin_apply_args_1 (void)
1466 rtx registers, tem;
1467 int size, align, regno;
1468 machine_mode mode;
1469 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1471 /* Create a block where the arg-pointer, structure value address,
1472 and argument registers can be saved. */
1473 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1475 /* Walk past the arg-pointer and structure value address. */
1476 size = GET_MODE_SIZE (Pmode);
1477 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1478 size += GET_MODE_SIZE (Pmode);
1480 /* Save each register used in calling a function to the block. */
1481 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1482 if ((mode = apply_args_mode[regno]) != VOIDmode)
1484 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1485 if (size % align != 0)
1486 size = CEIL (size, align) * align;
1488 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1490 emit_move_insn (adjust_address (registers, mode, size), tem);
1491 size += GET_MODE_SIZE (mode);
1494 /* Save the arg pointer to the block. */
1495 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1496 /* We need the pointer as the caller actually passed them to us, not
1497 as we might have pretended they were passed. Make sure it's a valid
1498 operand, as emit_move_insn isn't expected to handle a PLUS. */
1499 if (STACK_GROWS_DOWNWARD)
1501 = force_operand (plus_constant (Pmode, tem,
1502 crtl->args.pretend_args_size),
1503 NULL_RTX);
1504 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1506 size = GET_MODE_SIZE (Pmode);
1508 /* Save the structure value address unless this is passed as an
1509 "invisible" first argument. */
1510 if (struct_incoming_value)
1512 emit_move_insn (adjust_address (registers, Pmode, size),
1513 copy_to_reg (struct_incoming_value));
1514 size += GET_MODE_SIZE (Pmode);
1517 /* Return the address of the block. */
1518 return copy_addr_to_reg (XEXP (registers, 0));
1521 /* __builtin_apply_args returns block of memory allocated on
1522 the stack into which is stored the arg pointer, structure
1523 value address, static chain, and all the registers that might
1524 possibly be used in performing a function call. The code is
1525 moved to the start of the function so the incoming values are
1526 saved. */
1528 static rtx
1529 expand_builtin_apply_args (void)
1531 /* Don't do __builtin_apply_args more than once in a function.
1532 Save the result of the first call and reuse it. */
1533 if (apply_args_value != 0)
1534 return apply_args_value;
1536 /* When this function is called, it means that registers must be
1537 saved on entry to this function. So we migrate the
1538 call to the first insn of this function. */
1539 rtx temp;
1541 start_sequence ();
1542 temp = expand_builtin_apply_args_1 ();
1543 rtx_insn *seq = get_insns ();
1544 end_sequence ();
1546 apply_args_value = temp;
1548 /* Put the insns after the NOTE that starts the function.
1549 If this is inside a start_sequence, make the outer-level insn
1550 chain current, so the code is placed at the start of the
1551 function. If internal_arg_pointer is a non-virtual pseudo,
1552 it needs to be placed after the function that initializes
1553 that pseudo. */
1554 push_topmost_sequence ();
1555 if (REG_P (crtl->args.internal_arg_pointer)
1556 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1557 emit_insn_before (seq, parm_birth_insn);
1558 else
1559 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1560 pop_topmost_sequence ();
1561 return temp;
1565 /* Perform an untyped call and save the state required to perform an
1566 untyped return of whatever value was returned by the given function. */
1568 static rtx
1569 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1571 int size, align, regno;
1572 machine_mode mode;
1573 rtx incoming_args, result, reg, dest, src;
1574 rtx_call_insn *call_insn;
1575 rtx old_stack_level = 0;
1576 rtx call_fusage = 0;
1577 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1579 arguments = convert_memory_address (Pmode, arguments);
1581 /* Create a block where the return registers can be saved. */
1582 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1584 /* Fetch the arg pointer from the ARGUMENTS block. */
1585 incoming_args = gen_reg_rtx (Pmode);
1586 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1587 if (!STACK_GROWS_DOWNWARD)
1588 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1589 incoming_args, 0, OPTAB_LIB_WIDEN);
1591 /* Push a new argument block and copy the arguments. Do not allow
1592 the (potential) memcpy call below to interfere with our stack
1593 manipulations. */
1594 do_pending_stack_adjust ();
1595 NO_DEFER_POP;
1597 /* Save the stack with nonlocal if available. */
1598 if (targetm.have_save_stack_nonlocal ())
1599 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1600 else
1601 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1603 /* Allocate a block of memory onto the stack and copy the memory
1604 arguments to the outgoing arguments address. We can pass TRUE
1605 as the 4th argument because we just saved the stack pointer
1606 and will restore it right after the call. */
1607 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1609 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1610 may have already set current_function_calls_alloca to true.
1611 current_function_calls_alloca won't be set if argsize is zero,
1612 so we have to guarantee need_drap is true here. */
1613 if (SUPPORTS_STACK_ALIGNMENT)
1614 crtl->need_drap = true;
1616 dest = virtual_outgoing_args_rtx;
1617 if (!STACK_GROWS_DOWNWARD)
1619 if (CONST_INT_P (argsize))
1620 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1621 else
1622 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1624 dest = gen_rtx_MEM (BLKmode, dest);
1625 set_mem_align (dest, PARM_BOUNDARY);
1626 src = gen_rtx_MEM (BLKmode, incoming_args);
1627 set_mem_align (src, PARM_BOUNDARY);
1628 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1630 /* Refer to the argument block. */
1631 apply_args_size ();
1632 arguments = gen_rtx_MEM (BLKmode, arguments);
1633 set_mem_align (arguments, PARM_BOUNDARY);
1635 /* Walk past the arg-pointer and structure value address. */
1636 size = GET_MODE_SIZE (Pmode);
1637 if (struct_value)
1638 size += GET_MODE_SIZE (Pmode);
1640 /* Restore each of the registers previously saved. Make USE insns
1641 for each of these registers for use in making the call. */
1642 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1643 if ((mode = apply_args_mode[regno]) != VOIDmode)
1645 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1646 if (size % align != 0)
1647 size = CEIL (size, align) * align;
1648 reg = gen_rtx_REG (mode, regno);
1649 emit_move_insn (reg, adjust_address (arguments, mode, size));
1650 use_reg (&call_fusage, reg);
1651 size += GET_MODE_SIZE (mode);
1654 /* Restore the structure value address unless this is passed as an
1655 "invisible" first argument. */
1656 size = GET_MODE_SIZE (Pmode);
1657 if (struct_value)
1659 rtx value = gen_reg_rtx (Pmode);
1660 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1661 emit_move_insn (struct_value, value);
1662 if (REG_P (struct_value))
1663 use_reg (&call_fusage, struct_value);
1664 size += GET_MODE_SIZE (Pmode);
1667 /* All arguments and registers used for the call are set up by now! */
1668 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1670 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1671 and we don't want to load it into a register as an optimization,
1672 because prepare_call_address already did it if it should be done. */
1673 if (GET_CODE (function) != SYMBOL_REF)
1674 function = memory_address (FUNCTION_MODE, function);
1676 /* Generate the actual call instruction and save the return value. */
1677 if (targetm.have_untyped_call ())
1679 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1680 emit_call_insn (targetm.gen_untyped_call (mem, result,
1681 result_vector (1, result)));
1683 else if (targetm.have_call_value ())
1685 rtx valreg = 0;
1687 /* Locate the unique return register. It is not possible to
1688 express a call that sets more than one return register using
1689 call_value; use untyped_call for that. In fact, untyped_call
1690 only needs to save the return registers in the given block. */
1691 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1692 if ((mode = apply_result_mode[regno]) != VOIDmode)
1694 gcc_assert (!valreg); /* have_untyped_call required. */
1696 valreg = gen_rtx_REG (mode, regno);
1699 emit_insn (targetm.gen_call_value (valreg,
1700 gen_rtx_MEM (FUNCTION_MODE, function),
1701 const0_rtx, NULL_RTX, const0_rtx));
1703 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1705 else
1706 gcc_unreachable ();
1708 /* Find the CALL insn we just emitted, and attach the register usage
1709 information. */
1710 call_insn = last_call_insn ();
1711 add_function_usage_to (call_insn, call_fusage);
1713 /* Restore the stack. */
1714 if (targetm.have_save_stack_nonlocal ())
1715 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1716 else
1717 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1718 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1720 OK_DEFER_POP;
1722 /* Return the address of the result block. */
1723 result = copy_addr_to_reg (XEXP (result, 0));
1724 return convert_memory_address (ptr_mode, result);
1727 /* Perform an untyped return. */
1729 static void
1730 expand_builtin_return (rtx result)
1732 int size, align, regno;
1733 machine_mode mode;
1734 rtx reg;
1735 rtx_insn *call_fusage = 0;
1737 result = convert_memory_address (Pmode, result);
1739 apply_result_size ();
1740 result = gen_rtx_MEM (BLKmode, result);
1742 if (targetm.have_untyped_return ())
1744 rtx vector = result_vector (0, result);
1745 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1746 emit_barrier ();
1747 return;
1750 /* Restore the return value and note that each value is used. */
1751 size = 0;
1752 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1753 if ((mode = apply_result_mode[regno]) != VOIDmode)
1755 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1756 if (size % align != 0)
1757 size = CEIL (size, align) * align;
1758 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1759 emit_move_insn (reg, adjust_address (result, mode, size));
1761 push_to_sequence (call_fusage);
1762 emit_use (reg);
1763 call_fusage = get_insns ();
1764 end_sequence ();
1765 size += GET_MODE_SIZE (mode);
1768 /* Put the USE insns before the return. */
1769 emit_insn (call_fusage);
1771 /* Return whatever values was restored by jumping directly to the end
1772 of the function. */
1773 expand_naked_return ();
1776 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1778 static enum type_class
1779 type_to_class (tree type)
1781 switch (TREE_CODE (type))
1783 case VOID_TYPE: return void_type_class;
1784 case INTEGER_TYPE: return integer_type_class;
1785 case ENUMERAL_TYPE: return enumeral_type_class;
1786 case BOOLEAN_TYPE: return boolean_type_class;
1787 case POINTER_TYPE: return pointer_type_class;
1788 case REFERENCE_TYPE: return reference_type_class;
1789 case OFFSET_TYPE: return offset_type_class;
1790 case REAL_TYPE: return real_type_class;
1791 case COMPLEX_TYPE: return complex_type_class;
1792 case FUNCTION_TYPE: return function_type_class;
1793 case METHOD_TYPE: return method_type_class;
1794 case RECORD_TYPE: return record_type_class;
1795 case UNION_TYPE:
1796 case QUAL_UNION_TYPE: return union_type_class;
1797 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1798 ? string_type_class : array_type_class);
1799 case LANG_TYPE: return lang_type_class;
1800 default: return no_type_class;
1804 /* Expand a call EXP to __builtin_classify_type. */
1806 static rtx
1807 expand_builtin_classify_type (tree exp)
1809 if (call_expr_nargs (exp))
1810 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1811 return GEN_INT (no_type_class);
1814 /* This helper macro, meant to be used in mathfn_built_in below, determines
1815 which among a set of builtin math functions is appropriate for a given type
1816 mode. The `F' (float) and `L' (long double) are automatically generated
1817 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1818 types, there are additional types that are considered with 'F32', 'F64',
1819 'F128', etc. suffixes. */
1820 #define CASE_MATHFN(MATHFN) \
1821 CASE_CFN_##MATHFN: \
1822 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1823 fcodel = BUILT_IN_##MATHFN##L ; break;
1824 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1825 types. */
1826 #define CASE_MATHFN_FLOATN(MATHFN) \
1827 CASE_CFN_##MATHFN: \
1828 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1829 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1830 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1831 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1832 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1833 break;
1834 /* Similar to above, but appends _R after any F/L suffix. */
1835 #define CASE_MATHFN_REENT(MATHFN) \
1836 case CFN_BUILT_IN_##MATHFN##_R: \
1837 case CFN_BUILT_IN_##MATHFN##F_R: \
1838 case CFN_BUILT_IN_##MATHFN##L_R: \
1839 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1840 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1842 /* Return a function equivalent to FN but operating on floating-point
1843 values of type TYPE, or END_BUILTINS if no such function exists.
1844 This is purely an operation on function codes; it does not guarantee
1845 that the target actually has an implementation of the function. */
1847 static built_in_function
1848 mathfn_built_in_2 (tree type, combined_fn fn)
1850 tree mtype;
1851 built_in_function fcode, fcodef, fcodel;
1852 built_in_function fcodef16 = END_BUILTINS;
1853 built_in_function fcodef32 = END_BUILTINS;
1854 built_in_function fcodef64 = END_BUILTINS;
1855 built_in_function fcodef128 = END_BUILTINS;
1856 built_in_function fcodef32x = END_BUILTINS;
1857 built_in_function fcodef64x = END_BUILTINS;
1858 built_in_function fcodef128x = END_BUILTINS;
1860 switch (fn)
1862 CASE_MATHFN (ACOS)
1863 CASE_MATHFN (ACOSH)
1864 CASE_MATHFN (ASIN)
1865 CASE_MATHFN (ASINH)
1866 CASE_MATHFN (ATAN)
1867 CASE_MATHFN (ATAN2)
1868 CASE_MATHFN (ATANH)
1869 CASE_MATHFN (CBRT)
1870 CASE_MATHFN (CEIL)
1871 CASE_MATHFN (CEXPI)
1872 CASE_MATHFN_FLOATN (COPYSIGN)
1873 CASE_MATHFN (COS)
1874 CASE_MATHFN (COSH)
1875 CASE_MATHFN (DREM)
1876 CASE_MATHFN (ERF)
1877 CASE_MATHFN (ERFC)
1878 CASE_MATHFN (EXP)
1879 CASE_MATHFN (EXP10)
1880 CASE_MATHFN (EXP2)
1881 CASE_MATHFN (EXPM1)
1882 CASE_MATHFN (FABS)
1883 CASE_MATHFN (FDIM)
1884 CASE_MATHFN (FLOOR)
1885 CASE_MATHFN_FLOATN (FMA)
1886 CASE_MATHFN_FLOATN (FMAX)
1887 CASE_MATHFN_FLOATN (FMIN)
1888 CASE_MATHFN (FMOD)
1889 CASE_MATHFN (FREXP)
1890 CASE_MATHFN (GAMMA)
1891 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1892 CASE_MATHFN (HUGE_VAL)
1893 CASE_MATHFN (HYPOT)
1894 CASE_MATHFN (ILOGB)
1895 CASE_MATHFN (ICEIL)
1896 CASE_MATHFN (IFLOOR)
1897 CASE_MATHFN (INF)
1898 CASE_MATHFN (IRINT)
1899 CASE_MATHFN (IROUND)
1900 CASE_MATHFN (ISINF)
1901 CASE_MATHFN (J0)
1902 CASE_MATHFN (J1)
1903 CASE_MATHFN (JN)
1904 CASE_MATHFN (LCEIL)
1905 CASE_MATHFN (LDEXP)
1906 CASE_MATHFN (LFLOOR)
1907 CASE_MATHFN (LGAMMA)
1908 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1909 CASE_MATHFN (LLCEIL)
1910 CASE_MATHFN (LLFLOOR)
1911 CASE_MATHFN (LLRINT)
1912 CASE_MATHFN (LLROUND)
1913 CASE_MATHFN (LOG)
1914 CASE_MATHFN (LOG10)
1915 CASE_MATHFN (LOG1P)
1916 CASE_MATHFN (LOG2)
1917 CASE_MATHFN (LOGB)
1918 CASE_MATHFN (LRINT)
1919 CASE_MATHFN (LROUND)
1920 CASE_MATHFN (MODF)
1921 CASE_MATHFN (NAN)
1922 CASE_MATHFN (NANS)
1923 CASE_MATHFN (NEARBYINT)
1924 CASE_MATHFN (NEXTAFTER)
1925 CASE_MATHFN (NEXTTOWARD)
1926 CASE_MATHFN (POW)
1927 CASE_MATHFN (POWI)
1928 CASE_MATHFN (POW10)
1929 CASE_MATHFN (REMAINDER)
1930 CASE_MATHFN (REMQUO)
1931 CASE_MATHFN (RINT)
1932 CASE_MATHFN (ROUND)
1933 CASE_MATHFN (SCALB)
1934 CASE_MATHFN (SCALBLN)
1935 CASE_MATHFN (SCALBN)
1936 CASE_MATHFN (SIGNBIT)
1937 CASE_MATHFN (SIGNIFICAND)
1938 CASE_MATHFN (SIN)
1939 CASE_MATHFN (SINCOS)
1940 CASE_MATHFN (SINH)
1941 CASE_MATHFN_FLOATN (SQRT)
1942 CASE_MATHFN (TAN)
1943 CASE_MATHFN (TANH)
1944 CASE_MATHFN (TGAMMA)
1945 CASE_MATHFN (TRUNC)
1946 CASE_MATHFN (Y0)
1947 CASE_MATHFN (Y1)
1948 CASE_MATHFN (YN)
1950 default:
1951 return END_BUILTINS;
1954 mtype = TYPE_MAIN_VARIANT (type);
1955 if (mtype == double_type_node)
1956 return fcode;
1957 else if (mtype == float_type_node)
1958 return fcodef;
1959 else if (mtype == long_double_type_node)
1960 return fcodel;
1961 else if (mtype == float16_type_node)
1962 return fcodef16;
1963 else if (mtype == float32_type_node)
1964 return fcodef32;
1965 else if (mtype == float64_type_node)
1966 return fcodef64;
1967 else if (mtype == float128_type_node)
1968 return fcodef128;
1969 else if (mtype == float32x_type_node)
1970 return fcodef32x;
1971 else if (mtype == float64x_type_node)
1972 return fcodef64x;
1973 else if (mtype == float128x_type_node)
1974 return fcodef128x;
1975 else
1976 return END_BUILTINS;
1979 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1980 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1981 otherwise use the explicit declaration. If we can't do the conversion,
1982 return null. */
1984 static tree
1985 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
1987 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1988 if (fcode2 == END_BUILTINS)
1989 return NULL_TREE;
1991 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1992 return NULL_TREE;
1994 return builtin_decl_explicit (fcode2);
1997 /* Like mathfn_built_in_1, but always use the implicit array. */
1999 tree
2000 mathfn_built_in (tree type, combined_fn fn)
2002 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2005 /* Like mathfn_built_in_1, but take a built_in_function and
2006 always use the implicit array. */
2008 tree
2009 mathfn_built_in (tree type, enum built_in_function fn)
2011 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2014 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2015 return its code, otherwise return IFN_LAST. Note that this function
2016 only tests whether the function is defined in internals.def, not whether
2017 it is actually available on the target. */
2019 internal_fn
2020 associated_internal_fn (tree fndecl)
2022 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2023 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2024 switch (DECL_FUNCTION_CODE (fndecl))
2026 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2027 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2028 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2029 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2030 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2031 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2032 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2033 #include "internal-fn.def"
2035 CASE_FLT_FN (BUILT_IN_POW10):
2036 return IFN_EXP10;
2038 CASE_FLT_FN (BUILT_IN_DREM):
2039 return IFN_REMAINDER;
2041 CASE_FLT_FN (BUILT_IN_SCALBN):
2042 CASE_FLT_FN (BUILT_IN_SCALBLN):
2043 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2044 return IFN_LDEXP;
2045 return IFN_LAST;
2047 default:
2048 return IFN_LAST;
2052 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2053 on the current target by a call to an internal function, return the
2054 code of that internal function, otherwise return IFN_LAST. The caller
2055 is responsible for ensuring that any side-effects of the built-in
2056 call are dealt with correctly. E.g. if CALL sets errno, the caller
2057 must decide that the errno result isn't needed or make it available
2058 in some other way. */
2060 internal_fn
2061 replacement_internal_fn (gcall *call)
2063 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2065 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2066 if (ifn != IFN_LAST)
2068 tree_pair types = direct_internal_fn_types (ifn, call);
2069 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2070 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2071 return ifn;
2074 return IFN_LAST;
2077 /* Expand a call to the builtin trinary math functions (fma).
2078 Return NULL_RTX if a normal call should be emitted rather than expanding the
2079 function in-line. EXP is the expression that is a call to the builtin
2080 function; if convenient, the result should be placed in TARGET.
2081 SUBTARGET may be used as the target for computing one of EXP's
2082 operands. */
2084 static rtx
2085 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2087 optab builtin_optab;
2088 rtx op0, op1, op2, result;
2089 rtx_insn *insns;
2090 tree fndecl = get_callee_fndecl (exp);
2091 tree arg0, arg1, arg2;
2092 machine_mode mode;
2094 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2095 return NULL_RTX;
2097 arg0 = CALL_EXPR_ARG (exp, 0);
2098 arg1 = CALL_EXPR_ARG (exp, 1);
2099 arg2 = CALL_EXPR_ARG (exp, 2);
2101 switch (DECL_FUNCTION_CODE (fndecl))
2103 CASE_FLT_FN (BUILT_IN_FMA):
2104 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2105 builtin_optab = fma_optab; break;
2106 default:
2107 gcc_unreachable ();
2110 /* Make a suitable register to place result in. */
2111 mode = TYPE_MODE (TREE_TYPE (exp));
2113 /* Before working hard, check whether the instruction is available. */
2114 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2115 return NULL_RTX;
2117 result = gen_reg_rtx (mode);
2119 /* Always stabilize the argument list. */
2120 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2121 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2122 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2124 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2125 op1 = expand_normal (arg1);
2126 op2 = expand_normal (arg2);
2128 start_sequence ();
2130 /* Compute into RESULT.
2131 Set RESULT to wherever the result comes back. */
2132 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2133 result, 0);
2135 /* If we were unable to expand via the builtin, stop the sequence
2136 (without outputting the insns) and call to the library function
2137 with the stabilized argument list. */
2138 if (result == 0)
2140 end_sequence ();
2141 return expand_call (exp, target, target == const0_rtx);
2144 /* Output the entire sequence. */
2145 insns = get_insns ();
2146 end_sequence ();
2147 emit_insn (insns);
2149 return result;
2152 /* Expand a call to the builtin sin and cos math functions.
2153 Return NULL_RTX if a normal call should be emitted rather than expanding the
2154 function in-line. EXP is the expression that is a call to the builtin
2155 function; if convenient, the result should be placed in TARGET.
2156 SUBTARGET may be used as the target for computing one of EXP's
2157 operands. */
2159 static rtx
2160 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2162 optab builtin_optab;
2163 rtx op0;
2164 rtx_insn *insns;
2165 tree fndecl = get_callee_fndecl (exp);
2166 machine_mode mode;
2167 tree arg;
2169 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2170 return NULL_RTX;
2172 arg = CALL_EXPR_ARG (exp, 0);
2174 switch (DECL_FUNCTION_CODE (fndecl))
2176 CASE_FLT_FN (BUILT_IN_SIN):
2177 CASE_FLT_FN (BUILT_IN_COS):
2178 builtin_optab = sincos_optab; break;
2179 default:
2180 gcc_unreachable ();
2183 /* Make a suitable register to place result in. */
2184 mode = TYPE_MODE (TREE_TYPE (exp));
2186 /* Check if sincos insn is available, otherwise fallback
2187 to sin or cos insn. */
2188 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2189 switch (DECL_FUNCTION_CODE (fndecl))
2191 CASE_FLT_FN (BUILT_IN_SIN):
2192 builtin_optab = sin_optab; break;
2193 CASE_FLT_FN (BUILT_IN_COS):
2194 builtin_optab = cos_optab; break;
2195 default:
2196 gcc_unreachable ();
2199 /* Before working hard, check whether the instruction is available. */
2200 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2202 rtx result = gen_reg_rtx (mode);
2204 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2205 need to expand the argument again. This way, we will not perform
2206 side-effects more the once. */
2207 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2209 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2211 start_sequence ();
2213 /* Compute into RESULT.
2214 Set RESULT to wherever the result comes back. */
2215 if (builtin_optab == sincos_optab)
2217 int ok;
2219 switch (DECL_FUNCTION_CODE (fndecl))
2221 CASE_FLT_FN (BUILT_IN_SIN):
2222 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2223 break;
2224 CASE_FLT_FN (BUILT_IN_COS):
2225 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2226 break;
2227 default:
2228 gcc_unreachable ();
2230 gcc_assert (ok);
2232 else
2233 result = expand_unop (mode, builtin_optab, op0, result, 0);
2235 if (result != 0)
2237 /* Output the entire sequence. */
2238 insns = get_insns ();
2239 end_sequence ();
2240 emit_insn (insns);
2241 return result;
2244 /* If we were unable to expand via the builtin, stop the sequence
2245 (without outputting the insns) and call to the library function
2246 with the stabilized argument list. */
2247 end_sequence ();
2250 return expand_call (exp, target, target == const0_rtx);
2253 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2254 return an RTL instruction code that implements the functionality.
2255 If that isn't possible or available return CODE_FOR_nothing. */
2257 static enum insn_code
2258 interclass_mathfn_icode (tree arg, tree fndecl)
2260 bool errno_set = false;
2261 optab builtin_optab = unknown_optab;
2262 machine_mode mode;
2264 switch (DECL_FUNCTION_CODE (fndecl))
2266 CASE_FLT_FN (BUILT_IN_ILOGB):
2267 errno_set = true; builtin_optab = ilogb_optab; break;
2268 CASE_FLT_FN (BUILT_IN_ISINF):
2269 builtin_optab = isinf_optab; break;
2270 case BUILT_IN_ISNORMAL:
2271 case BUILT_IN_ISFINITE:
2272 CASE_FLT_FN (BUILT_IN_FINITE):
2273 case BUILT_IN_FINITED32:
2274 case BUILT_IN_FINITED64:
2275 case BUILT_IN_FINITED128:
2276 case BUILT_IN_ISINFD32:
2277 case BUILT_IN_ISINFD64:
2278 case BUILT_IN_ISINFD128:
2279 /* These builtins have no optabs (yet). */
2280 break;
2281 default:
2282 gcc_unreachable ();
2285 /* There's no easy way to detect the case we need to set EDOM. */
2286 if (flag_errno_math && errno_set)
2287 return CODE_FOR_nothing;
2289 /* Optab mode depends on the mode of the input argument. */
2290 mode = TYPE_MODE (TREE_TYPE (arg));
2292 if (builtin_optab)
2293 return optab_handler (builtin_optab, mode);
2294 return CODE_FOR_nothing;
2297 /* Expand a call to one of the builtin math functions that operate on
2298 floating point argument and output an integer result (ilogb, isinf,
2299 isnan, etc).
2300 Return 0 if a normal call should be emitted rather than expanding the
2301 function in-line. EXP is the expression that is a call to the builtin
2302 function; if convenient, the result should be placed in TARGET. */
2304 static rtx
2305 expand_builtin_interclass_mathfn (tree exp, rtx target)
2307 enum insn_code icode = CODE_FOR_nothing;
2308 rtx op0;
2309 tree fndecl = get_callee_fndecl (exp);
2310 machine_mode mode;
2311 tree arg;
2313 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2314 return NULL_RTX;
2316 arg = CALL_EXPR_ARG (exp, 0);
2317 icode = interclass_mathfn_icode (arg, fndecl);
2318 mode = TYPE_MODE (TREE_TYPE (arg));
2320 if (icode != CODE_FOR_nothing)
2322 struct expand_operand ops[1];
2323 rtx_insn *last = get_last_insn ();
2324 tree orig_arg = arg;
2326 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2327 need to expand the argument again. This way, we will not perform
2328 side-effects more the once. */
2329 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2331 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2333 if (mode != GET_MODE (op0))
2334 op0 = convert_to_mode (mode, op0, 0);
2336 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2337 if (maybe_legitimize_operands (icode, 0, 1, ops)
2338 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2339 return ops[0].value;
2341 delete_insns_since (last);
2342 CALL_EXPR_ARG (exp, 0) = orig_arg;
2345 return NULL_RTX;
2348 /* Expand a call to the builtin sincos math function.
2349 Return NULL_RTX if a normal call should be emitted rather than expanding the
2350 function in-line. EXP is the expression that is a call to the builtin
2351 function. */
2353 static rtx
2354 expand_builtin_sincos (tree exp)
2356 rtx op0, op1, op2, target1, target2;
2357 machine_mode mode;
2358 tree arg, sinp, cosp;
2359 int result;
2360 location_t loc = EXPR_LOCATION (exp);
2361 tree alias_type, alias_off;
2363 if (!validate_arglist (exp, REAL_TYPE,
2364 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2365 return NULL_RTX;
2367 arg = CALL_EXPR_ARG (exp, 0);
2368 sinp = CALL_EXPR_ARG (exp, 1);
2369 cosp = CALL_EXPR_ARG (exp, 2);
2371 /* Make a suitable register to place result in. */
2372 mode = TYPE_MODE (TREE_TYPE (arg));
2374 /* Check if sincos insn is available, otherwise emit the call. */
2375 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2376 return NULL_RTX;
2378 target1 = gen_reg_rtx (mode);
2379 target2 = gen_reg_rtx (mode);
2381 op0 = expand_normal (arg);
2382 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2383 alias_off = build_int_cst (alias_type, 0);
2384 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2385 sinp, alias_off));
2386 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2387 cosp, alias_off));
2389 /* Compute into target1 and target2.
2390 Set TARGET to wherever the result comes back. */
2391 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2392 gcc_assert (result);
2394 /* Move target1 and target2 to the memory locations indicated
2395 by op1 and op2. */
2396 emit_move_insn (op1, target1);
2397 emit_move_insn (op2, target2);
2399 return const0_rtx;
2402 /* Expand a call to the internal cexpi builtin to the sincos math function.
2403 EXP is the expression that is a call to the builtin function; if convenient,
2404 the result should be placed in TARGET. */
2406 static rtx
2407 expand_builtin_cexpi (tree exp, rtx target)
2409 tree fndecl = get_callee_fndecl (exp);
2410 tree arg, type;
2411 machine_mode mode;
2412 rtx op0, op1, op2;
2413 location_t loc = EXPR_LOCATION (exp);
2415 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2416 return NULL_RTX;
2418 arg = CALL_EXPR_ARG (exp, 0);
2419 type = TREE_TYPE (arg);
2420 mode = TYPE_MODE (TREE_TYPE (arg));
2422 /* Try expanding via a sincos optab, fall back to emitting a libcall
2423 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2424 is only generated from sincos, cexp or if we have either of them. */
2425 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2427 op1 = gen_reg_rtx (mode);
2428 op2 = gen_reg_rtx (mode);
2430 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2432 /* Compute into op1 and op2. */
2433 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2435 else if (targetm.libc_has_function (function_sincos))
2437 tree call, fn = NULL_TREE;
2438 tree top1, top2;
2439 rtx op1a, op2a;
2441 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2442 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2443 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2444 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2445 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2446 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2447 else
2448 gcc_unreachable ();
2450 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2451 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2452 op1a = copy_addr_to_reg (XEXP (op1, 0));
2453 op2a = copy_addr_to_reg (XEXP (op2, 0));
2454 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2455 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2457 /* Make sure not to fold the sincos call again. */
2458 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2459 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2460 call, 3, arg, top1, top2));
2462 else
2464 tree call, fn = NULL_TREE, narg;
2465 tree ctype = build_complex_type (type);
2467 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2468 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2469 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2470 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2471 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2472 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2473 else
2474 gcc_unreachable ();
2476 /* If we don't have a decl for cexp create one. This is the
2477 friendliest fallback if the user calls __builtin_cexpi
2478 without full target C99 function support. */
2479 if (fn == NULL_TREE)
2481 tree fntype;
2482 const char *name = NULL;
2484 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2485 name = "cexpf";
2486 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2487 name = "cexp";
2488 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2489 name = "cexpl";
2491 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2492 fn = build_fn_decl (name, fntype);
2495 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2496 build_real (type, dconst0), arg);
2498 /* Make sure not to fold the cexp call again. */
2499 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2500 return expand_expr (build_call_nary (ctype, call, 1, narg),
2501 target, VOIDmode, EXPAND_NORMAL);
2504 /* Now build the proper return type. */
2505 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2506 make_tree (TREE_TYPE (arg), op2),
2507 make_tree (TREE_TYPE (arg), op1)),
2508 target, VOIDmode, EXPAND_NORMAL);
2511 /* Conveniently construct a function call expression. FNDECL names the
2512 function to be called, N is the number of arguments, and the "..."
2513 parameters are the argument expressions. Unlike build_call_exr
2514 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2516 static tree
2517 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2519 va_list ap;
2520 tree fntype = TREE_TYPE (fndecl);
2521 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2523 va_start (ap, n);
2524 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2525 va_end (ap);
2526 SET_EXPR_LOCATION (fn, loc);
2527 return fn;
2530 /* Expand a call to one of the builtin rounding functions gcc defines
2531 as an extension (lfloor and lceil). As these are gcc extensions we
2532 do not need to worry about setting errno to EDOM.
2533 If expanding via optab fails, lower expression to (int)(floor(x)).
2534 EXP is the expression that is a call to the builtin function;
2535 if convenient, the result should be placed in TARGET. */
2537 static rtx
2538 expand_builtin_int_roundingfn (tree exp, rtx target)
2540 convert_optab builtin_optab;
2541 rtx op0, tmp;
2542 rtx_insn *insns;
2543 tree fndecl = get_callee_fndecl (exp);
2544 enum built_in_function fallback_fn;
2545 tree fallback_fndecl;
2546 machine_mode mode;
2547 tree arg;
2549 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2550 gcc_unreachable ();
2552 arg = CALL_EXPR_ARG (exp, 0);
2554 switch (DECL_FUNCTION_CODE (fndecl))
2556 CASE_FLT_FN (BUILT_IN_ICEIL):
2557 CASE_FLT_FN (BUILT_IN_LCEIL):
2558 CASE_FLT_FN (BUILT_IN_LLCEIL):
2559 builtin_optab = lceil_optab;
2560 fallback_fn = BUILT_IN_CEIL;
2561 break;
2563 CASE_FLT_FN (BUILT_IN_IFLOOR):
2564 CASE_FLT_FN (BUILT_IN_LFLOOR):
2565 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2566 builtin_optab = lfloor_optab;
2567 fallback_fn = BUILT_IN_FLOOR;
2568 break;
2570 default:
2571 gcc_unreachable ();
2574 /* Make a suitable register to place result in. */
2575 mode = TYPE_MODE (TREE_TYPE (exp));
2577 target = gen_reg_rtx (mode);
2579 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2580 need to expand the argument again. This way, we will not perform
2581 side-effects more the once. */
2582 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2584 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2586 start_sequence ();
2588 /* Compute into TARGET. */
2589 if (expand_sfix_optab (target, op0, builtin_optab))
2591 /* Output the entire sequence. */
2592 insns = get_insns ();
2593 end_sequence ();
2594 emit_insn (insns);
2595 return target;
2598 /* If we were unable to expand via the builtin, stop the sequence
2599 (without outputting the insns). */
2600 end_sequence ();
2602 /* Fall back to floating point rounding optab. */
2603 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2605 /* For non-C99 targets we may end up without a fallback fndecl here
2606 if the user called __builtin_lfloor directly. In this case emit
2607 a call to the floor/ceil variants nevertheless. This should result
2608 in the best user experience for not full C99 targets. */
2609 if (fallback_fndecl == NULL_TREE)
2611 tree fntype;
2612 const char *name = NULL;
2614 switch (DECL_FUNCTION_CODE (fndecl))
2616 case BUILT_IN_ICEIL:
2617 case BUILT_IN_LCEIL:
2618 case BUILT_IN_LLCEIL:
2619 name = "ceil";
2620 break;
2621 case BUILT_IN_ICEILF:
2622 case BUILT_IN_LCEILF:
2623 case BUILT_IN_LLCEILF:
2624 name = "ceilf";
2625 break;
2626 case BUILT_IN_ICEILL:
2627 case BUILT_IN_LCEILL:
2628 case BUILT_IN_LLCEILL:
2629 name = "ceill";
2630 break;
2631 case BUILT_IN_IFLOOR:
2632 case BUILT_IN_LFLOOR:
2633 case BUILT_IN_LLFLOOR:
2634 name = "floor";
2635 break;
2636 case BUILT_IN_IFLOORF:
2637 case BUILT_IN_LFLOORF:
2638 case BUILT_IN_LLFLOORF:
2639 name = "floorf";
2640 break;
2641 case BUILT_IN_IFLOORL:
2642 case BUILT_IN_LFLOORL:
2643 case BUILT_IN_LLFLOORL:
2644 name = "floorl";
2645 break;
2646 default:
2647 gcc_unreachable ();
2650 fntype = build_function_type_list (TREE_TYPE (arg),
2651 TREE_TYPE (arg), NULL_TREE);
2652 fallback_fndecl = build_fn_decl (name, fntype);
2655 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2657 tmp = expand_normal (exp);
2658 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2660 /* Truncate the result of floating point optab to integer
2661 via expand_fix (). */
2662 target = gen_reg_rtx (mode);
2663 expand_fix (target, tmp, 0);
2665 return target;
2668 /* Expand a call to one of the builtin math functions doing integer
2669 conversion (lrint).
2670 Return 0 if a normal call should be emitted rather than expanding the
2671 function in-line. EXP is the expression that is a call to the builtin
2672 function; if convenient, the result should be placed in TARGET. */
2674 static rtx
2675 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2677 convert_optab builtin_optab;
2678 rtx op0;
2679 rtx_insn *insns;
2680 tree fndecl = get_callee_fndecl (exp);
2681 tree arg;
2682 machine_mode mode;
2683 enum built_in_function fallback_fn = BUILT_IN_NONE;
2685 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2686 gcc_unreachable ();
2688 arg = CALL_EXPR_ARG (exp, 0);
2690 switch (DECL_FUNCTION_CODE (fndecl))
2692 CASE_FLT_FN (BUILT_IN_IRINT):
2693 fallback_fn = BUILT_IN_LRINT;
2694 gcc_fallthrough ();
2695 CASE_FLT_FN (BUILT_IN_LRINT):
2696 CASE_FLT_FN (BUILT_IN_LLRINT):
2697 builtin_optab = lrint_optab;
2698 break;
2700 CASE_FLT_FN (BUILT_IN_IROUND):
2701 fallback_fn = BUILT_IN_LROUND;
2702 gcc_fallthrough ();
2703 CASE_FLT_FN (BUILT_IN_LROUND):
2704 CASE_FLT_FN (BUILT_IN_LLROUND):
2705 builtin_optab = lround_optab;
2706 break;
2708 default:
2709 gcc_unreachable ();
2712 /* There's no easy way to detect the case we need to set EDOM. */
2713 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2714 return NULL_RTX;
2716 /* Make a suitable register to place result in. */
2717 mode = TYPE_MODE (TREE_TYPE (exp));
2719 /* There's no easy way to detect the case we need to set EDOM. */
2720 if (!flag_errno_math)
2722 rtx result = gen_reg_rtx (mode);
2724 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2725 need to expand the argument again. This way, we will not perform
2726 side-effects more the once. */
2727 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2729 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2731 start_sequence ();
2733 if (expand_sfix_optab (result, op0, builtin_optab))
2735 /* Output the entire sequence. */
2736 insns = get_insns ();
2737 end_sequence ();
2738 emit_insn (insns);
2739 return result;
2742 /* If we were unable to expand via the builtin, stop the sequence
2743 (without outputting the insns) and call to the library function
2744 with the stabilized argument list. */
2745 end_sequence ();
2748 if (fallback_fn != BUILT_IN_NONE)
2750 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2751 targets, (int) round (x) should never be transformed into
2752 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2753 a call to lround in the hope that the target provides at least some
2754 C99 functions. This should result in the best user experience for
2755 not full C99 targets. */
2756 tree fallback_fndecl = mathfn_built_in_1
2757 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2759 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2760 fallback_fndecl, 1, arg);
2762 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2763 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2764 return convert_to_mode (mode, target, 0);
2767 return expand_call (exp, target, target == const0_rtx);
2770 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2771 a normal call should be emitted rather than expanding the function
2772 in-line. EXP is the expression that is a call to the builtin
2773 function; if convenient, the result should be placed in TARGET. */
2775 static rtx
2776 expand_builtin_powi (tree exp, rtx target)
2778 tree arg0, arg1;
2779 rtx op0, op1;
2780 machine_mode mode;
2781 machine_mode mode2;
2783 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2784 return NULL_RTX;
2786 arg0 = CALL_EXPR_ARG (exp, 0);
2787 arg1 = CALL_EXPR_ARG (exp, 1);
2788 mode = TYPE_MODE (TREE_TYPE (exp));
2790 /* Emit a libcall to libgcc. */
2792 /* Mode of the 2nd argument must match that of an int. */
2793 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2795 if (target == NULL_RTX)
2796 target = gen_reg_rtx (mode);
2798 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2799 if (GET_MODE (op0) != mode)
2800 op0 = convert_to_mode (mode, op0, 0);
2801 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2802 if (GET_MODE (op1) != mode2)
2803 op1 = convert_to_mode (mode2, op1, 0);
2805 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2806 target, LCT_CONST, mode,
2807 op0, mode, op1, mode2);
2809 return target;
2812 /* Expand expression EXP which is a call to the strlen builtin. Return
2813 NULL_RTX if we failed the caller should emit a normal call, otherwise
2814 try to get the result in TARGET, if convenient. */
2816 static rtx
2817 expand_builtin_strlen (tree exp, rtx target,
2818 machine_mode target_mode)
2820 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2821 return NULL_RTX;
2822 else
2824 struct expand_operand ops[4];
2825 rtx pat;
2826 tree len;
2827 tree src = CALL_EXPR_ARG (exp, 0);
2828 rtx src_reg;
2829 rtx_insn *before_strlen;
2830 machine_mode insn_mode;
2831 enum insn_code icode = CODE_FOR_nothing;
2832 unsigned int align;
2834 /* If the length can be computed at compile-time, return it. */
2835 len = c_strlen (src, 0);
2836 if (len)
2837 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2839 /* If the length can be computed at compile-time and is constant
2840 integer, but there are side-effects in src, evaluate
2841 src for side-effects, then return len.
2842 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2843 can be optimized into: i++; x = 3; */
2844 len = c_strlen (src, 1);
2845 if (len && TREE_CODE (len) == INTEGER_CST)
2847 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2848 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2851 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2853 /* If SRC is not a pointer type, don't do this operation inline. */
2854 if (align == 0)
2855 return NULL_RTX;
2857 /* Bail out if we can't compute strlen in the right mode. */
2858 FOR_EACH_MODE_FROM (insn_mode, target_mode)
2860 icode = optab_handler (strlen_optab, insn_mode);
2861 if (icode != CODE_FOR_nothing)
2862 break;
2864 if (insn_mode == VOIDmode)
2865 return NULL_RTX;
2867 /* Make a place to hold the source address. We will not expand
2868 the actual source until we are sure that the expansion will
2869 not fail -- there are trees that cannot be expanded twice. */
2870 src_reg = gen_reg_rtx (Pmode);
2872 /* Mark the beginning of the strlen sequence so we can emit the
2873 source operand later. */
2874 before_strlen = get_last_insn ();
2876 create_output_operand (&ops[0], target, insn_mode);
2877 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2878 create_integer_operand (&ops[2], 0);
2879 create_integer_operand (&ops[3], align);
2880 if (!maybe_expand_insn (icode, 4, ops))
2881 return NULL_RTX;
2883 /* Check to see if the argument was declared attribute nonstring
2884 and if so, issue a warning since at this point it's not known
2885 to be nul-terminated. */
2886 maybe_warn_nonstring_arg (TREE_OPERAND (CALL_EXPR_FN (exp), 0), exp);
2888 /* Now that we are assured of success, expand the source. */
2889 start_sequence ();
2890 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2891 if (pat != src_reg)
2893 #ifdef POINTERS_EXTEND_UNSIGNED
2894 if (GET_MODE (pat) != Pmode)
2895 pat = convert_to_mode (Pmode, pat,
2896 POINTERS_EXTEND_UNSIGNED);
2897 #endif
2898 emit_move_insn (src_reg, pat);
2900 pat = get_insns ();
2901 end_sequence ();
2903 if (before_strlen)
2904 emit_insn_after (pat, before_strlen);
2905 else
2906 emit_insn_before (pat, get_insns ());
2908 /* Return the value in the proper mode for this function. */
2909 if (GET_MODE (ops[0].value) == target_mode)
2910 target = ops[0].value;
2911 else if (target != 0)
2912 convert_move (target, ops[0].value, 0);
2913 else
2914 target = convert_to_mode (target_mode, ops[0].value, 0);
2916 return target;
2920 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2921 bytes from constant string DATA + OFFSET and return it as target
2922 constant. */
2924 static rtx
2925 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2926 scalar_int_mode mode)
2928 const char *str = (const char *) data;
2930 gcc_assert (offset >= 0
2931 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2932 <= strlen (str) + 1));
2934 return c_readstr (str + offset, mode);
2937 /* LEN specify length of the block of memcpy/memset operation.
2938 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2939 In some cases we can make very likely guess on max size, then we
2940 set it into PROBABLE_MAX_SIZE. */
2942 static void
2943 determine_block_size (tree len, rtx len_rtx,
2944 unsigned HOST_WIDE_INT *min_size,
2945 unsigned HOST_WIDE_INT *max_size,
2946 unsigned HOST_WIDE_INT *probable_max_size)
2948 if (CONST_INT_P (len_rtx))
2950 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
2951 return;
2953 else
2955 wide_int min, max;
2956 enum value_range_type range_type = VR_UNDEFINED;
2958 /* Determine bounds from the type. */
2959 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2960 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2961 else
2962 *min_size = 0;
2963 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2964 *probable_max_size = *max_size
2965 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
2966 else
2967 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2969 if (TREE_CODE (len) == SSA_NAME)
2970 range_type = get_range_info (len, &min, &max);
2971 if (range_type == VR_RANGE)
2973 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
2974 *min_size = min.to_uhwi ();
2975 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
2976 *probable_max_size = *max_size = max.to_uhwi ();
2978 else if (range_type == VR_ANTI_RANGE)
2980 /* Anti range 0...N lets us to determine minimal size to N+1. */
2981 if (min == 0)
2983 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2984 *min_size = max.to_uhwi () + 1;
2986 /* Code like
2988 int n;
2989 if (n < 100)
2990 memcpy (a, b, n)
2992 Produce anti range allowing negative values of N. We still
2993 can use the information and make a guess that N is not negative.
2995 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
2996 *probable_max_size = min.to_uhwi () - 1;
2999 gcc_checking_assert (*max_size <=
3000 (unsigned HOST_WIDE_INT)
3001 GET_MODE_MASK (GET_MODE (len_rtx)));
3004 /* Try to verify that the sizes and lengths of the arguments to a string
3005 manipulation function given by EXP are within valid bounds and that
3006 the operation does not lead to buffer overflow. Arguments other than
3007 EXP may be null. When non-null, the arguments have the following
3008 meaning:
3009 SIZE is the user-supplied size argument to the function (such as in
3010 memcpy(d, s, SIZE) or strncpy(d, s, SIZE). It specifies the exact
3011 number of bytes to write.
3012 MAXLEN is the user-supplied bound on the length of the source sequence
3013 (such as in strncat(d, s, N). It specifies the upper limit on the number
3014 of bytes to write.
3015 SRC is the source string (such as in strcpy(d, s)) when the expression
3016 EXP is a string function call (as opposed to a memory call like memcpy).
3017 As an exception, SRC can also be an integer denoting the precomputed
3018 size of the source string or object (for functions like memcpy).
3019 OBJSIZE is the size of the destination object specified by the last
3020 argument to the _chk builtins, typically resulting from the expansion
3021 of __builtin_object_size (such as in __builtin___strcpy_chk(d, s,
3022 OBJSIZE).
3024 When SIZE is null LEN is checked to verify that it doesn't exceed
3025 SIZE_MAX.
3027 If the call is successfully verified as safe from buffer overflow
3028 the function returns true, otherwise false.. */
3030 static bool
3031 check_sizes (int opt, tree exp, tree size, tree maxlen, tree src, tree objsize)
3033 /* The size of the largest object is half the address space, or
3034 SSIZE_MAX. (This is way too permissive.) */
3035 tree maxobjsize = TYPE_MAX_VALUE (ssizetype);
3037 tree slen = NULL_TREE;
3039 tree range[2] = { NULL_TREE, NULL_TREE };
3041 /* Set to true when the exact number of bytes written by a string
3042 function like strcpy is not known and the only thing that is
3043 known is that it must be at least one (for the terminating nul). */
3044 bool at_least_one = false;
3045 if (src)
3047 /* SRC is normally a pointer to string but as a special case
3048 it can be an integer denoting the length of a string. */
3049 if (POINTER_TYPE_P (TREE_TYPE (src)))
3051 /* Try to determine the range of lengths the source string
3052 refers to. If it can be determined and is less than
3053 the upper bound given by MAXLEN add one to it for
3054 the terminating nul. Otherwise, set it to one for
3055 the same reason, or to MAXLEN as appropriate. */
3056 get_range_strlen (src, range);
3057 if (range[0] && (!maxlen || TREE_CODE (maxlen) == INTEGER_CST))
3059 if (maxlen && tree_int_cst_le (maxlen, range[0]))
3060 range[0] = range[1] = maxlen;
3061 else
3062 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3063 range[0], size_one_node);
3065 if (maxlen && tree_int_cst_le (maxlen, range[1]))
3066 range[1] = maxlen;
3067 else if (!integer_all_onesp (range[1]))
3068 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3069 range[1], size_one_node);
3071 slen = range[0];
3073 else
3075 at_least_one = true;
3076 slen = size_one_node;
3079 else
3080 slen = src;
3083 if (!size && !maxlen)
3085 /* When the only available piece of data is the object size
3086 there is nothing to do. */
3087 if (!slen)
3088 return true;
3090 /* Otherwise, when the length of the source sequence is known
3091 (as with with strlen), set SIZE to it. */
3092 if (!range[0])
3093 size = slen;
3096 if (!objsize)
3097 objsize = maxobjsize;
3099 /* The SIZE is exact if it's non-null, constant, and in range of
3100 unsigned HOST_WIDE_INT. */
3101 bool exactsize = size && tree_fits_uhwi_p (size);
3103 if (size)
3104 get_size_range (size, range);
3106 /* First check the number of bytes to be written against the maximum
3107 object size. */
3108 if (range[0] && tree_int_cst_lt (maxobjsize, range[0]))
3110 location_t loc = tree_nonartificial_location (exp);
3111 loc = expansion_point_location_if_in_system_header (loc);
3113 if (range[0] == range[1])
3114 warning_at (loc, opt,
3115 "%K%qD specified size %E "
3116 "exceeds maximum object size %E",
3117 exp, get_callee_fndecl (exp), range[0], maxobjsize);
3118 else
3119 warning_at (loc, opt,
3120 "%K%qD specified size between %E and %E "
3121 "exceeds maximum object size %E",
3122 exp, get_callee_fndecl (exp),
3123 range[0], range[1], maxobjsize);
3124 return false;
3127 /* Next check the number of bytes to be written against the destination
3128 object size. */
3129 if (range[0] || !exactsize || integer_all_onesp (size))
3131 if (range[0]
3132 && ((tree_fits_uhwi_p (objsize)
3133 && tree_int_cst_lt (objsize, range[0]))
3134 || (tree_fits_uhwi_p (size)
3135 && tree_int_cst_lt (size, range[0]))))
3137 location_t loc = tree_nonartificial_location (exp);
3138 loc = expansion_point_location_if_in_system_header (loc);
3140 if (size == slen && at_least_one)
3142 /* This is a call to strcpy with a destination of 0 size
3143 and a source of unknown length. The call will write
3144 at least one byte past the end of the destination. */
3145 warning_at (loc, opt,
3146 "%K%qD writing %E or more bytes into a region "
3147 "of size %E overflows the destination",
3148 exp, get_callee_fndecl (exp), range[0], objsize);
3150 else if (tree_int_cst_equal (range[0], range[1]))
3151 warning_at (loc, opt,
3152 (integer_onep (range[0])
3153 ? G_("%K%qD writing %E byte into a region "
3154 "of size %E overflows the destination")
3155 : G_("%K%qD writing %E bytes into a region "
3156 "of size %E overflows the destination")),
3157 exp, get_callee_fndecl (exp), range[0], objsize);
3158 else if (tree_int_cst_sign_bit (range[1]))
3160 /* Avoid printing the upper bound if it's invalid. */
3161 warning_at (loc, opt,
3162 "%K%qD writing %E or more bytes into a region "
3163 "of size %E overflows the destination",
3164 exp, get_callee_fndecl (exp), range[0], objsize);
3166 else
3167 warning_at (loc, opt,
3168 "%K%qD writing between %E and %E bytes into "
3169 "a region of size %E overflows the destination",
3170 exp, get_callee_fndecl (exp), range[0], range[1],
3171 objsize);
3173 /* Return error when an overflow has been detected. */
3174 return false;
3178 /* Check the maximum length of the source sequence against the size
3179 of the destination object if known, or against the maximum size
3180 of an object. */
3181 if (maxlen)
3183 get_size_range (maxlen, range);
3185 if (range[0] && objsize && tree_fits_uhwi_p (objsize))
3187 location_t loc = tree_nonartificial_location (exp);
3188 loc = expansion_point_location_if_in_system_header (loc);
3190 if (tree_int_cst_lt (maxobjsize, range[0]))
3192 /* Warn about crazy big sizes first since that's more
3193 likely to be meaningful than saying that the bound
3194 is greater than the object size if both are big. */
3195 if (range[0] == range[1])
3196 warning_at (loc, opt,
3197 "%K%qD specified bound %E "
3198 "exceeds maximum object size %E",
3199 exp, get_callee_fndecl (exp),
3200 range[0], maxobjsize);
3201 else
3202 warning_at (loc, opt,
3203 "%K%qD specified bound between %E and %E "
3204 "exceeds maximum object size %E",
3205 exp, get_callee_fndecl (exp),
3206 range[0], range[1], maxobjsize);
3208 return false;
3211 if (objsize != maxobjsize && tree_int_cst_lt (objsize, range[0]))
3213 if (tree_int_cst_equal (range[0], range[1]))
3214 warning_at (loc, opt,
3215 "%K%qD specified bound %E "
3216 "exceeds destination size %E",
3217 exp, get_callee_fndecl (exp),
3218 range[0], objsize);
3219 else
3220 warning_at (loc, opt,
3221 "%K%qD specified bound between %E and %E "
3222 "exceeds destination size %E",
3223 exp, get_callee_fndecl (exp),
3224 range[0], range[1], objsize);
3225 return false;
3230 if (slen
3231 && slen == src
3232 && size && range[0]
3233 && tree_int_cst_lt (slen, range[0]))
3235 location_t loc = tree_nonartificial_location (exp);
3237 if (tree_int_cst_equal (range[0], range[1]))
3238 warning_at (loc, opt,
3239 (tree_int_cst_equal (range[0], integer_one_node)
3240 ? G_("%K%qD reading %E byte from a region of size %E")
3241 : G_("%K%qD reading %E bytes from a region of size %E")),
3242 exp, get_callee_fndecl (exp), range[0], slen);
3243 else if (tree_int_cst_sign_bit (range[1]))
3245 /* Avoid printing the upper bound if it's invalid. */
3246 warning_at (loc, opt,
3247 "%K%qD reading %E or more bytes from a region "
3248 "of size %E",
3249 exp, get_callee_fndecl (exp), range[0], slen);
3251 else
3252 warning_at (loc, opt,
3253 "%K%qD reading between %E and %E bytes from a region "
3254 "of size %E",
3255 exp, get_callee_fndecl (exp), range[0], range[1], slen);
3256 return false;
3259 return true;
3262 /* Helper to compute the size of the object referenced by the DEST
3263 expression which must have pointer type, using Object Size type
3264 OSTYPE (only the least significant 2 bits are used). Return
3265 the size of the object if successful or NULL when the size cannot
3266 be determined. */
3268 tree
3269 compute_objsize (tree dest, int ostype)
3271 unsigned HOST_WIDE_INT size;
3273 /* Only the two least significant bits are meaningful. */
3274 ostype &= 3;
3276 if (compute_builtin_object_size (dest, ostype, &size))
3277 return build_int_cst (sizetype, size);
3279 /* Unless computing the largest size (for memcpy and other raw memory
3280 functions), try to determine the size of the object from its type. */
3281 if (!ostype)
3282 return NULL_TREE;
3284 if (TREE_CODE (dest) == SSA_NAME)
3286 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3287 if (!is_gimple_assign (stmt))
3288 return NULL_TREE;
3290 tree_code code = gimple_assign_rhs_code (stmt);
3291 if (code != ADDR_EXPR && code != POINTER_PLUS_EXPR)
3292 return NULL_TREE;
3294 dest = gimple_assign_rhs1 (stmt);
3297 if (TREE_CODE (dest) != ADDR_EXPR)
3298 return NULL_TREE;
3300 tree type = TREE_TYPE (dest);
3301 if (TREE_CODE (type) == POINTER_TYPE)
3302 type = TREE_TYPE (type);
3304 type = TYPE_MAIN_VARIANT (type);
3306 if (TREE_CODE (type) == ARRAY_TYPE
3307 && !array_at_struct_end_p (dest))
3309 /* Return the constant size unless it's zero (that's a zero-length
3310 array likely at the end of a struct). */
3311 tree size = TYPE_SIZE_UNIT (type);
3312 if (size && TREE_CODE (size) == INTEGER_CST
3313 && !integer_zerop (size))
3314 return size;
3317 return NULL_TREE;
3320 /* Helper to determine and check the sizes of the source and the destination
3321 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3322 call expression, DEST is the destination argument, SRC is the source
3323 argument or null, and LEN is the number of bytes. Use Object Size type-0
3324 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3325 (no overflow or invalid sizes), false otherwise. */
3327 static bool
3328 check_memop_sizes (tree exp, tree dest, tree src, tree size)
3330 if (!warn_stringop_overflow)
3331 return true;
3333 /* For functions like memset and memcpy that operate on raw memory
3334 try to determine the size of the largest source and destination
3335 object using type-0 Object Size regardless of the object size
3336 type specified by the option. */
3337 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3338 tree dstsize = compute_objsize (dest, 0);
3340 return check_sizes (OPT_Wstringop_overflow_, exp,
3341 size, /*maxlen=*/NULL_TREE, srcsize, dstsize);
3344 /* Validate memchr arguments without performing any expansion.
3345 Return NULL_RTX. */
3347 static rtx
3348 expand_builtin_memchr (tree exp, rtx)
3350 if (!validate_arglist (exp,
3351 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3352 return NULL_RTX;
3354 tree arg1 = CALL_EXPR_ARG (exp, 0);
3355 tree len = CALL_EXPR_ARG (exp, 2);
3357 /* Diagnose calls where the specified length exceeds the size
3358 of the object. */
3359 if (warn_stringop_overflow)
3361 tree size = compute_objsize (arg1, 0);
3362 check_sizes (OPT_Wstringop_overflow_,
3363 exp, len, /*maxlen=*/NULL_TREE,
3364 size, /*objsize=*/NULL_TREE);
3367 return NULL_RTX;
3370 /* Expand a call EXP to the memcpy builtin.
3371 Return NULL_RTX if we failed, the caller should emit a normal call,
3372 otherwise try to get the result in TARGET, if convenient (and in
3373 mode MODE if that's convenient). */
3375 static rtx
3376 expand_builtin_memcpy (tree exp, rtx target)
3378 if (!validate_arglist (exp,
3379 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3380 return NULL_RTX;
3382 tree dest = CALL_EXPR_ARG (exp, 0);
3383 tree src = CALL_EXPR_ARG (exp, 1);
3384 tree len = CALL_EXPR_ARG (exp, 2);
3386 check_memop_sizes (exp, dest, src, len);
3388 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3389 /*endp=*/ 0);
3392 /* Check a call EXP to the memmove built-in for validity.
3393 Return NULL_RTX on both success and failure. */
3395 static rtx
3396 expand_builtin_memmove (tree exp, rtx)
3398 if (!validate_arglist (exp,
3399 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3400 return NULL_RTX;
3402 tree dest = CALL_EXPR_ARG (exp, 0);
3403 tree src = CALL_EXPR_ARG (exp, 1);
3404 tree len = CALL_EXPR_ARG (exp, 2);
3406 check_memop_sizes (exp, dest, src, len);
3408 return NULL_RTX;
3411 /* Expand an instrumented call EXP to the memcpy builtin.
3412 Return NULL_RTX if we failed, the caller should emit a normal call,
3413 otherwise try to get the result in TARGET, if convenient (and in
3414 mode MODE if that's convenient). */
3416 static rtx
3417 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3419 if (!validate_arglist (exp,
3420 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3421 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3422 INTEGER_TYPE, VOID_TYPE))
3423 return NULL_RTX;
3424 else
3426 tree dest = CALL_EXPR_ARG (exp, 0);
3427 tree src = CALL_EXPR_ARG (exp, 2);
3428 tree len = CALL_EXPR_ARG (exp, 4);
3429 rtx res = expand_builtin_memory_copy_args (dest, src, len, target, exp,
3430 /*end_p=*/ 0);
3432 /* Return src bounds with the result. */
3433 if (res)
3435 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3436 expand_normal (CALL_EXPR_ARG (exp, 1)));
3437 res = chkp_join_splitted_slot (res, bnd);
3439 return res;
3443 /* Expand a call EXP to the mempcpy builtin.
3444 Return NULL_RTX if we failed; the caller should emit a normal call,
3445 otherwise try to get the result in TARGET, if convenient (and in
3446 mode MODE if that's convenient). If ENDP is 0 return the
3447 destination pointer, if ENDP is 1 return the end pointer ala
3448 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3449 stpcpy. */
3451 static rtx
3452 expand_builtin_mempcpy (tree exp, rtx target)
3454 if (!validate_arglist (exp,
3455 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3456 return NULL_RTX;
3458 tree dest = CALL_EXPR_ARG (exp, 0);
3459 tree src = CALL_EXPR_ARG (exp, 1);
3460 tree len = CALL_EXPR_ARG (exp, 2);
3462 /* Avoid expanding mempcpy into memcpy when the call is determined
3463 to overflow the buffer. This also prevents the same overflow
3464 from being diagnosed again when expanding memcpy. */
3465 if (!check_memop_sizes (exp, dest, src, len))
3466 return NULL_RTX;
3468 return expand_builtin_mempcpy_args (dest, src, len,
3469 target, exp, /*endp=*/ 1);
3472 /* Expand an instrumented call EXP to the mempcpy builtin.
3473 Return NULL_RTX if we failed, the caller should emit a normal call,
3474 otherwise try to get the result in TARGET, if convenient (and in
3475 mode MODE if that's convenient). */
3477 static rtx
3478 expand_builtin_mempcpy_with_bounds (tree exp, rtx target)
3480 if (!validate_arglist (exp,
3481 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3482 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3483 INTEGER_TYPE, VOID_TYPE))
3484 return NULL_RTX;
3485 else
3487 tree dest = CALL_EXPR_ARG (exp, 0);
3488 tree src = CALL_EXPR_ARG (exp, 2);
3489 tree len = CALL_EXPR_ARG (exp, 4);
3490 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3491 exp, 1);
3493 /* Return src bounds with the result. */
3494 if (res)
3496 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3497 expand_normal (CALL_EXPR_ARG (exp, 1)));
3498 res = chkp_join_splitted_slot (res, bnd);
3500 return res;
3504 /* Helper function to do the actual work for expand of memory copy family
3505 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3506 of memory from SRC to DEST and assign to TARGET if convenient.
3507 If ENDP is 0 return the
3508 destination pointer, if ENDP is 1 return the end pointer ala
3509 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3510 stpcpy. */
3512 static rtx
3513 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3514 rtx target, tree exp, int endp)
3516 const char *src_str;
3517 unsigned int src_align = get_pointer_alignment (src);
3518 unsigned int dest_align = get_pointer_alignment (dest);
3519 rtx dest_mem, src_mem, dest_addr, len_rtx;
3520 HOST_WIDE_INT expected_size = -1;
3521 unsigned int expected_align = 0;
3522 unsigned HOST_WIDE_INT min_size;
3523 unsigned HOST_WIDE_INT max_size;
3524 unsigned HOST_WIDE_INT probable_max_size;
3526 /* If DEST is not a pointer type, call the normal function. */
3527 if (dest_align == 0)
3528 return NULL_RTX;
3530 /* If either SRC is not a pointer type, don't do this
3531 operation in-line. */
3532 if (src_align == 0)
3533 return NULL_RTX;
3535 if (currently_expanding_gimple_stmt)
3536 stringop_block_profile (currently_expanding_gimple_stmt,
3537 &expected_align, &expected_size);
3539 if (expected_align < dest_align)
3540 expected_align = dest_align;
3541 dest_mem = get_memory_rtx (dest, len);
3542 set_mem_align (dest_mem, dest_align);
3543 len_rtx = expand_normal (len);
3544 determine_block_size (len, len_rtx, &min_size, &max_size,
3545 &probable_max_size);
3546 src_str = c_getstr (src);
3548 /* If SRC is a string constant and block move would be done
3549 by pieces, we can avoid loading the string from memory
3550 and only stored the computed constants. */
3551 if (src_str
3552 && CONST_INT_P (len_rtx)
3553 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3554 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3555 CONST_CAST (char *, src_str),
3556 dest_align, false))
3558 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3559 builtin_memcpy_read_str,
3560 CONST_CAST (char *, src_str),
3561 dest_align, false, endp);
3562 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3563 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3564 return dest_mem;
3567 src_mem = get_memory_rtx (src, len);
3568 set_mem_align (src_mem, src_align);
3570 /* Copy word part most expediently. */
3571 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3572 CALL_EXPR_TAILCALL (exp)
3573 && (endp == 0 || target == const0_rtx)
3574 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3575 expected_align, expected_size,
3576 min_size, max_size, probable_max_size);
3578 if (dest_addr == 0)
3580 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3581 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3584 if (endp && target != const0_rtx)
3586 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3587 /* stpcpy pointer to last byte. */
3588 if (endp == 2)
3589 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3592 return dest_addr;
3595 static rtx
3596 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3597 rtx target, tree orig_exp, int endp)
3599 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3600 endp);
3603 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3604 we failed, the caller should emit a normal call, otherwise try to
3605 get the result in TARGET, if convenient. If ENDP is 0 return the
3606 destination pointer, if ENDP is 1 return the end pointer ala
3607 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3608 stpcpy. */
3610 static rtx
3611 expand_movstr (tree dest, tree src, rtx target, int endp)
3613 struct expand_operand ops[3];
3614 rtx dest_mem;
3615 rtx src_mem;
3617 if (!targetm.have_movstr ())
3618 return NULL_RTX;
3620 dest_mem = get_memory_rtx (dest, NULL);
3621 src_mem = get_memory_rtx (src, NULL);
3622 if (!endp)
3624 target = force_reg (Pmode, XEXP (dest_mem, 0));
3625 dest_mem = replace_equiv_address (dest_mem, target);
3628 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3629 create_fixed_operand (&ops[1], dest_mem);
3630 create_fixed_operand (&ops[2], src_mem);
3631 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3632 return NULL_RTX;
3634 if (endp && target != const0_rtx)
3636 target = ops[0].value;
3637 /* movstr is supposed to set end to the address of the NUL
3638 terminator. If the caller requested a mempcpy-like return value,
3639 adjust it. */
3640 if (endp == 1)
3642 rtx tem = plus_constant (GET_MODE (target),
3643 gen_lowpart (GET_MODE (target), target), 1);
3644 emit_move_insn (target, force_operand (tem, NULL_RTX));
3647 return target;
3650 /* Do some very basic size validation of a call to the strcpy builtin
3651 given by EXP. Return NULL_RTX to have the built-in expand to a call
3652 to the library function. */
3654 static rtx
3655 expand_builtin_strcat (tree exp, rtx)
3657 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3658 || !warn_stringop_overflow)
3659 return NULL_RTX;
3661 tree dest = CALL_EXPR_ARG (exp, 0);
3662 tree src = CALL_EXPR_ARG (exp, 1);
3664 /* There is no way here to determine the length of the string in
3665 the destination to which the SRC string is being appended so
3666 just diagnose cases when the souce string is longer than
3667 the destination object. */
3669 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3671 check_sizes (OPT_Wstringop_overflow_,
3672 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3674 return NULL_RTX;
3677 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3678 NULL_RTX if we failed the caller should emit a normal call, otherwise
3679 try to get the result in TARGET, if convenient (and in mode MODE if that's
3680 convenient). */
3682 static rtx
3683 expand_builtin_strcpy (tree exp, rtx target)
3685 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3686 return NULL_RTX;
3688 tree dest = CALL_EXPR_ARG (exp, 0);
3689 tree src = CALL_EXPR_ARG (exp, 1);
3691 if (warn_stringop_overflow)
3693 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3694 check_sizes (OPT_Wstringop_overflow_,
3695 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3698 return expand_builtin_strcpy_args (dest, src, target);
3701 /* Helper function to do the actual work for expand_builtin_strcpy. The
3702 arguments to the builtin_strcpy call DEST and SRC are broken out
3703 so that this can also be called without constructing an actual CALL_EXPR.
3704 The other arguments and return value are the same as for
3705 expand_builtin_strcpy. */
3707 static rtx
3708 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3710 return expand_movstr (dest, src, target, /*endp=*/0);
3713 /* Expand a call EXP to the stpcpy builtin.
3714 Return NULL_RTX if we failed the caller should emit a normal call,
3715 otherwise try to get the result in TARGET, if convenient (and in
3716 mode MODE if that's convenient). */
3718 static rtx
3719 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3721 tree dst, src;
3722 location_t loc = EXPR_LOCATION (exp);
3724 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3725 return NULL_RTX;
3727 dst = CALL_EXPR_ARG (exp, 0);
3728 src = CALL_EXPR_ARG (exp, 1);
3730 if (warn_stringop_overflow)
3732 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
3733 check_sizes (OPT_Wstringop_overflow_,
3734 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3737 /* If return value is ignored, transform stpcpy into strcpy. */
3738 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3740 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3741 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3742 return expand_expr (result, target, mode, EXPAND_NORMAL);
3744 else
3746 tree len, lenp1;
3747 rtx ret;
3749 /* Ensure we get an actual string whose length can be evaluated at
3750 compile-time, not an expression containing a string. This is
3751 because the latter will potentially produce pessimized code
3752 when used to produce the return value. */
3753 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3754 return expand_movstr (dst, src, target, /*endp=*/2);
3756 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3757 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3758 target, exp, /*endp=*/2);
3760 if (ret)
3761 return ret;
3763 if (TREE_CODE (len) == INTEGER_CST)
3765 rtx len_rtx = expand_normal (len);
3767 if (CONST_INT_P (len_rtx))
3769 ret = expand_builtin_strcpy_args (dst, src, target);
3771 if (ret)
3773 if (! target)
3775 if (mode != VOIDmode)
3776 target = gen_reg_rtx (mode);
3777 else
3778 target = gen_reg_rtx (GET_MODE (ret));
3780 if (GET_MODE (target) != GET_MODE (ret))
3781 ret = gen_lowpart (GET_MODE (target), ret);
3783 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3784 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3785 gcc_assert (ret);
3787 return target;
3792 return expand_movstr (dst, src, target, /*endp=*/2);
3796 /* Check a call EXP to the stpncpy built-in for validity.
3797 Return NULL_RTX on both success and failure. */
3799 static rtx
3800 expand_builtin_stpncpy (tree exp, rtx)
3802 if (!validate_arglist (exp,
3803 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3804 || !warn_stringop_overflow)
3805 return NULL_RTX;
3807 /* The source and destination of the call. */
3808 tree dest = CALL_EXPR_ARG (exp, 0);
3809 tree src = CALL_EXPR_ARG (exp, 1);
3811 /* The exact number of bytes to write (not the maximum). */
3812 tree len = CALL_EXPR_ARG (exp, 2);
3814 /* The size of the destination object. */
3815 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3817 check_sizes (OPT_Wstringop_overflow_,
3818 exp, len, /*maxlen=*/NULL_TREE, src, destsize);
3820 return NULL_RTX;
3823 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3824 bytes from constant string DATA + OFFSET and return it as target
3825 constant. */
3828 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3829 scalar_int_mode mode)
3831 const char *str = (const char *) data;
3833 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3834 return const0_rtx;
3836 return c_readstr (str + offset, mode);
3839 /* Helper to check the sizes of sequences and the destination of calls
3840 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3841 success (no overflow or invalid sizes), false otherwise. */
3843 static bool
3844 check_strncat_sizes (tree exp, tree objsize)
3846 tree dest = CALL_EXPR_ARG (exp, 0);
3847 tree src = CALL_EXPR_ARG (exp, 1);
3848 tree maxlen = CALL_EXPR_ARG (exp, 2);
3850 /* Try to determine the range of lengths that the source expression
3851 refers to. */
3852 tree lenrange[2];
3853 get_range_strlen (src, lenrange);
3855 /* Try to verify that the destination is big enough for the shortest
3856 string. */
3858 if (!objsize && warn_stringop_overflow)
3860 /* If it hasn't been provided by __strncat_chk, try to determine
3861 the size of the destination object into which the source is
3862 being copied. */
3863 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
3866 /* Add one for the terminating nul. */
3867 tree srclen = (lenrange[0]
3868 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3869 size_one_node)
3870 : NULL_TREE);
3872 /* Strncat copies at most MAXLEN bytes and always appends the terminating
3873 nul so the specified upper bound should never be equal to (or greater
3874 than) the size of the destination. */
3875 if (tree_fits_uhwi_p (maxlen) && tree_fits_uhwi_p (objsize)
3876 && tree_int_cst_equal (objsize, maxlen))
3878 location_t loc = tree_nonartificial_location (exp);
3879 loc = expansion_point_location_if_in_system_header (loc);
3881 warning_at (loc, OPT_Wstringop_overflow_,
3882 "%K%qD specified bound %E equals destination size",
3883 exp, get_callee_fndecl (exp), maxlen);
3885 return false;
3888 if (!srclen
3889 || (maxlen && tree_fits_uhwi_p (maxlen)
3890 && tree_fits_uhwi_p (srclen)
3891 && tree_int_cst_lt (maxlen, srclen)))
3892 srclen = maxlen;
3894 /* The number of bytes to write is LEN but check_sizes will also
3895 check SRCLEN if LEN's value isn't known. */
3896 return check_sizes (OPT_Wstringop_overflow_,
3897 exp, /*size=*/NULL_TREE, maxlen, srclen, objsize);
3900 /* Similar to expand_builtin_strcat, do some very basic size validation
3901 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
3902 the built-in expand to a call to the library function. */
3904 static rtx
3905 expand_builtin_strncat (tree exp, rtx)
3907 if (!validate_arglist (exp,
3908 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3909 || !warn_stringop_overflow)
3910 return NULL_RTX;
3912 tree dest = CALL_EXPR_ARG (exp, 0);
3913 tree src = CALL_EXPR_ARG (exp, 1);
3914 /* The upper bound on the number of bytes to write. */
3915 tree maxlen = CALL_EXPR_ARG (exp, 2);
3916 /* The length of the source sequence. */
3917 tree slen = c_strlen (src, 1);
3919 /* Try to determine the range of lengths that the source expression
3920 refers to. */
3921 tree lenrange[2];
3922 if (slen)
3923 lenrange[0] = lenrange[1] = slen;
3924 else
3925 get_range_strlen (src, lenrange);
3927 /* Try to verify that the destination is big enough for the shortest
3928 string. First try to determine the size of the destination object
3929 into which the source is being copied. */
3930 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3932 /* Add one for the terminating nul. */
3933 tree srclen = (lenrange[0]
3934 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3935 size_one_node)
3936 : NULL_TREE);
3938 /* Strncat copies at most MAXLEN bytes and always appends the terminating
3939 nul so the specified upper bound should never be equal to (or greater
3940 than) the size of the destination. */
3941 if (tree_fits_uhwi_p (maxlen) && tree_fits_uhwi_p (destsize)
3942 && tree_int_cst_equal (destsize, maxlen))
3944 location_t loc = tree_nonartificial_location (exp);
3945 loc = expansion_point_location_if_in_system_header (loc);
3947 warning_at (loc, OPT_Wstringop_overflow_,
3948 "%K%qD specified bound %E equals destination size",
3949 exp, get_callee_fndecl (exp), maxlen);
3951 return NULL_RTX;
3954 if (!srclen
3955 || (maxlen && tree_fits_uhwi_p (maxlen)
3956 && tree_fits_uhwi_p (srclen)
3957 && tree_int_cst_lt (maxlen, srclen)))
3958 srclen = maxlen;
3960 /* The number of bytes to write is LEN but check_sizes will also
3961 check SRCLEN if LEN's value isn't known. */
3962 check_sizes (OPT_Wstringop_overflow_,
3963 exp, /*size=*/NULL_TREE, maxlen, srclen, destsize);
3965 return NULL_RTX;
3968 /* Helper to check the sizes of sequences and the destination of calls
3969 to __builtin_strncpy (DST, SRC, CNT) and __builtin___strncpy_chk.
3970 Returns true on success (no overflow warning), false otherwise. */
3972 static bool
3973 check_strncpy_sizes (tree exp, tree dst, tree src, tree cnt)
3975 tree dstsize = compute_objsize (dst, warn_stringop_overflow - 1);
3977 if (!check_sizes (OPT_Wstringop_overflow_,
3978 exp, cnt, /*maxlen=*/NULL_TREE, src, dstsize))
3979 return false;
3981 return true;
3984 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3985 NULL_RTX if we failed the caller should emit a normal call. */
3987 static rtx
3988 expand_builtin_strncpy (tree exp, rtx target)
3990 location_t loc = EXPR_LOCATION (exp);
3992 if (validate_arglist (exp,
3993 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3995 tree dest = CALL_EXPR_ARG (exp, 0);
3996 tree src = CALL_EXPR_ARG (exp, 1);
3997 /* The number of bytes to write (not the maximum). */
3998 tree len = CALL_EXPR_ARG (exp, 2);
3999 /* The length of the source sequence. */
4000 tree slen = c_strlen (src, 1);
4002 check_strncpy_sizes (exp, dest, src, len);
4004 /* We must be passed a constant len and src parameter. */
4005 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4006 return NULL_RTX;
4008 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4010 /* We're required to pad with trailing zeros if the requested
4011 len is greater than strlen(s2)+1. In that case try to
4012 use store_by_pieces, if it fails, punt. */
4013 if (tree_int_cst_lt (slen, len))
4015 unsigned int dest_align = get_pointer_alignment (dest);
4016 const char *p = c_getstr (src);
4017 rtx dest_mem;
4019 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4020 || !can_store_by_pieces (tree_to_uhwi (len),
4021 builtin_strncpy_read_str,
4022 CONST_CAST (char *, p),
4023 dest_align, false))
4024 return NULL_RTX;
4026 dest_mem = get_memory_rtx (dest, len);
4027 store_by_pieces (dest_mem, tree_to_uhwi (len),
4028 builtin_strncpy_read_str,
4029 CONST_CAST (char *, p), dest_align, false, 0);
4030 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4031 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4032 return dest_mem;
4035 return NULL_RTX;
4038 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4039 bytes from constant string DATA + OFFSET and return it as target
4040 constant. */
4043 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4044 scalar_int_mode mode)
4046 const char *c = (const char *) data;
4047 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4049 memset (p, *c, GET_MODE_SIZE (mode));
4051 return c_readstr (p, mode);
4054 /* Callback routine for store_by_pieces. Return the RTL of a register
4055 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4056 char value given in the RTL register data. For example, if mode is
4057 4 bytes wide, return the RTL for 0x01010101*data. */
4059 static rtx
4060 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4061 scalar_int_mode mode)
4063 rtx target, coeff;
4064 size_t size;
4065 char *p;
4067 size = GET_MODE_SIZE (mode);
4068 if (size == 1)
4069 return (rtx) data;
4071 p = XALLOCAVEC (char, size);
4072 memset (p, 1, size);
4073 coeff = c_readstr (p, mode);
4075 target = convert_to_mode (mode, (rtx) data, 1);
4076 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4077 return force_reg (mode, target);
4080 /* Expand expression EXP, which is a call to the memset builtin. Return
4081 NULL_RTX if we failed the caller should emit a normal call, otherwise
4082 try to get the result in TARGET, if convenient (and in mode MODE if that's
4083 convenient). */
4085 static rtx
4086 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4088 if (!validate_arglist (exp,
4089 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4090 return NULL_RTX;
4092 tree dest = CALL_EXPR_ARG (exp, 0);
4093 tree val = CALL_EXPR_ARG (exp, 1);
4094 tree len = CALL_EXPR_ARG (exp, 2);
4096 check_memop_sizes (exp, dest, NULL_TREE, len);
4098 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4101 /* Expand expression EXP, which is an instrumented call to the memset builtin.
4102 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
4103 try to get the result in TARGET, if convenient (and in mode MODE if that's
4104 convenient). */
4106 static rtx
4107 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
4109 if (!validate_arglist (exp,
4110 POINTER_TYPE, POINTER_BOUNDS_TYPE,
4111 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4112 return NULL_RTX;
4113 else
4115 tree dest = CALL_EXPR_ARG (exp, 0);
4116 tree val = CALL_EXPR_ARG (exp, 2);
4117 tree len = CALL_EXPR_ARG (exp, 3);
4118 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
4120 /* Return src bounds with the result. */
4121 if (res)
4123 rtx bnd = force_reg (targetm.chkp_bound_mode (),
4124 expand_normal (CALL_EXPR_ARG (exp, 1)));
4125 res = chkp_join_splitted_slot (res, bnd);
4127 return res;
4131 /* Helper function to do the actual work for expand_builtin_memset. The
4132 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4133 so that this can also be called without constructing an actual CALL_EXPR.
4134 The other arguments and return value are the same as for
4135 expand_builtin_memset. */
4137 static rtx
4138 expand_builtin_memset_args (tree dest, tree val, tree len,
4139 rtx target, machine_mode mode, tree orig_exp)
4141 tree fndecl, fn;
4142 enum built_in_function fcode;
4143 machine_mode val_mode;
4144 char c;
4145 unsigned int dest_align;
4146 rtx dest_mem, dest_addr, len_rtx;
4147 HOST_WIDE_INT expected_size = -1;
4148 unsigned int expected_align = 0;
4149 unsigned HOST_WIDE_INT min_size;
4150 unsigned HOST_WIDE_INT max_size;
4151 unsigned HOST_WIDE_INT probable_max_size;
4153 dest_align = get_pointer_alignment (dest);
4155 /* If DEST is not a pointer type, don't do this operation in-line. */
4156 if (dest_align == 0)
4157 return NULL_RTX;
4159 if (currently_expanding_gimple_stmt)
4160 stringop_block_profile (currently_expanding_gimple_stmt,
4161 &expected_align, &expected_size);
4163 if (expected_align < dest_align)
4164 expected_align = dest_align;
4166 /* If the LEN parameter is zero, return DEST. */
4167 if (integer_zerop (len))
4169 /* Evaluate and ignore VAL in case it has side-effects. */
4170 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4171 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4174 /* Stabilize the arguments in case we fail. */
4175 dest = builtin_save_expr (dest);
4176 val = builtin_save_expr (val);
4177 len = builtin_save_expr (len);
4179 len_rtx = expand_normal (len);
4180 determine_block_size (len, len_rtx, &min_size, &max_size,
4181 &probable_max_size);
4182 dest_mem = get_memory_rtx (dest, len);
4183 val_mode = TYPE_MODE (unsigned_char_type_node);
4185 if (TREE_CODE (val) != INTEGER_CST)
4187 rtx val_rtx;
4189 val_rtx = expand_normal (val);
4190 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4192 /* Assume that we can memset by pieces if we can store
4193 * the coefficients by pieces (in the required modes).
4194 * We can't pass builtin_memset_gen_str as that emits RTL. */
4195 c = 1;
4196 if (tree_fits_uhwi_p (len)
4197 && can_store_by_pieces (tree_to_uhwi (len),
4198 builtin_memset_read_str, &c, dest_align,
4199 true))
4201 val_rtx = force_reg (val_mode, val_rtx);
4202 store_by_pieces (dest_mem, tree_to_uhwi (len),
4203 builtin_memset_gen_str, val_rtx, dest_align,
4204 true, 0);
4206 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4207 dest_align, expected_align,
4208 expected_size, min_size, max_size,
4209 probable_max_size))
4210 goto do_libcall;
4212 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4213 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4214 return dest_mem;
4217 if (target_char_cast (val, &c))
4218 goto do_libcall;
4220 if (c)
4222 if (tree_fits_uhwi_p (len)
4223 && can_store_by_pieces (tree_to_uhwi (len),
4224 builtin_memset_read_str, &c, dest_align,
4225 true))
4226 store_by_pieces (dest_mem, tree_to_uhwi (len),
4227 builtin_memset_read_str, &c, dest_align, true, 0);
4228 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4229 gen_int_mode (c, val_mode),
4230 dest_align, expected_align,
4231 expected_size, min_size, max_size,
4232 probable_max_size))
4233 goto do_libcall;
4235 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4236 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4237 return dest_mem;
4240 set_mem_align (dest_mem, dest_align);
4241 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4242 CALL_EXPR_TAILCALL (orig_exp)
4243 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4244 expected_align, expected_size,
4245 min_size, max_size,
4246 probable_max_size);
4248 if (dest_addr == 0)
4250 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4251 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4254 return dest_addr;
4256 do_libcall:
4257 fndecl = get_callee_fndecl (orig_exp);
4258 fcode = DECL_FUNCTION_CODE (fndecl);
4259 if (fcode == BUILT_IN_MEMSET
4260 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
4261 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4262 dest, val, len);
4263 else if (fcode == BUILT_IN_BZERO)
4264 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4265 dest, len);
4266 else
4267 gcc_unreachable ();
4268 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4269 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4270 return expand_call (fn, target, target == const0_rtx);
4273 /* Expand expression EXP, which is a call to the bzero builtin. Return
4274 NULL_RTX if we failed the caller should emit a normal call. */
4276 static rtx
4277 expand_builtin_bzero (tree exp)
4279 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4280 return NULL_RTX;
4282 tree dest = CALL_EXPR_ARG (exp, 0);
4283 tree size = CALL_EXPR_ARG (exp, 1);
4285 check_memop_sizes (exp, dest, NULL_TREE, size);
4287 /* New argument list transforming bzero(ptr x, int y) to
4288 memset(ptr x, int 0, size_t y). This is done this way
4289 so that if it isn't expanded inline, we fallback to
4290 calling bzero instead of memset. */
4292 location_t loc = EXPR_LOCATION (exp);
4294 return expand_builtin_memset_args (dest, integer_zero_node,
4295 fold_convert_loc (loc,
4296 size_type_node, size),
4297 const0_rtx, VOIDmode, exp);
4300 /* Try to expand cmpstr operation ICODE with the given operands.
4301 Return the result rtx on success, otherwise return null. */
4303 static rtx
4304 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4305 HOST_WIDE_INT align)
4307 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4309 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4310 target = NULL_RTX;
4312 struct expand_operand ops[4];
4313 create_output_operand (&ops[0], target, insn_mode);
4314 create_fixed_operand (&ops[1], arg1_rtx);
4315 create_fixed_operand (&ops[2], arg2_rtx);
4316 create_integer_operand (&ops[3], align);
4317 if (maybe_expand_insn (icode, 4, ops))
4318 return ops[0].value;
4319 return NULL_RTX;
4322 /* Expand expression EXP, which is a call to the memcmp built-in function.
4323 Return NULL_RTX if we failed and the caller should emit a normal call,
4324 otherwise try to get the result in TARGET, if convenient.
4325 RESULT_EQ is true if we can relax the returned value to be either zero
4326 or nonzero, without caring about the sign. */
4328 static rtx
4329 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4331 if (!validate_arglist (exp,
4332 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4333 return NULL_RTX;
4335 tree arg1 = CALL_EXPR_ARG (exp, 0);
4336 tree arg2 = CALL_EXPR_ARG (exp, 1);
4337 tree len = CALL_EXPR_ARG (exp, 2);
4339 /* Diagnose calls where the specified length exceeds the size of either
4340 object. */
4341 if (warn_stringop_overflow)
4343 tree size = compute_objsize (arg1, 0);
4344 if (check_sizes (OPT_Wstringop_overflow_,
4345 exp, len, /*maxlen=*/NULL_TREE,
4346 size, /*objsize=*/NULL_TREE))
4348 size = compute_objsize (arg2, 0);
4349 check_sizes (OPT_Wstringop_overflow_,
4350 exp, len, /*maxlen=*/NULL_TREE,
4351 size, /*objsize=*/NULL_TREE);
4355 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4356 location_t loc = EXPR_LOCATION (exp);
4358 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4359 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4361 /* If we don't have POINTER_TYPE, call the function. */
4362 if (arg1_align == 0 || arg2_align == 0)
4363 return NULL_RTX;
4365 rtx arg1_rtx = get_memory_rtx (arg1, len);
4366 rtx arg2_rtx = get_memory_rtx (arg2, len);
4367 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4369 /* Set MEM_SIZE as appropriate. */
4370 if (CONST_INT_P (len_rtx))
4372 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4373 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4376 by_pieces_constfn constfn = NULL;
4378 const char *src_str = c_getstr (arg2);
4379 if (result_eq && src_str == NULL)
4381 src_str = c_getstr (arg1);
4382 if (src_str != NULL)
4383 std::swap (arg1_rtx, arg2_rtx);
4386 /* If SRC is a string constant and block move would be done
4387 by pieces, we can avoid loading the string from memory
4388 and only stored the computed constants. */
4389 if (src_str
4390 && CONST_INT_P (len_rtx)
4391 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4392 constfn = builtin_memcpy_read_str;
4394 rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4395 TREE_TYPE (len), target,
4396 result_eq, constfn,
4397 CONST_CAST (char *, src_str));
4399 if (result)
4401 /* Return the value in the proper mode for this function. */
4402 if (GET_MODE (result) == mode)
4403 return result;
4405 if (target != 0)
4407 convert_move (target, result, 0);
4408 return target;
4411 return convert_to_mode (mode, result, 0);
4414 return NULL_RTX;
4417 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4418 if we failed the caller should emit a normal call, otherwise try to get
4419 the result in TARGET, if convenient. */
4421 static rtx
4422 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4424 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4425 return NULL_RTX;
4427 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4428 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4429 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
4431 rtx arg1_rtx, arg2_rtx;
4432 tree fndecl, fn;
4433 tree arg1 = CALL_EXPR_ARG (exp, 0);
4434 tree arg2 = CALL_EXPR_ARG (exp, 1);
4435 rtx result = NULL_RTX;
4437 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4438 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4440 /* If we don't have POINTER_TYPE, call the function. */
4441 if (arg1_align == 0 || arg2_align == 0)
4442 return NULL_RTX;
4444 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4445 arg1 = builtin_save_expr (arg1);
4446 arg2 = builtin_save_expr (arg2);
4448 arg1_rtx = get_memory_rtx (arg1, NULL);
4449 arg2_rtx = get_memory_rtx (arg2, NULL);
4451 /* Try to call cmpstrsi. */
4452 if (cmpstr_icode != CODE_FOR_nothing)
4453 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4454 MIN (arg1_align, arg2_align));
4456 /* Try to determine at least one length and call cmpstrnsi. */
4457 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4459 tree len;
4460 rtx arg3_rtx;
4462 tree len1 = c_strlen (arg1, 1);
4463 tree len2 = c_strlen (arg2, 1);
4465 if (len1)
4466 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4467 if (len2)
4468 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4470 /* If we don't have a constant length for the first, use the length
4471 of the second, if we know it. We don't require a constant for
4472 this case; some cost analysis could be done if both are available
4473 but neither is constant. For now, assume they're equally cheap,
4474 unless one has side effects. If both strings have constant lengths,
4475 use the smaller. */
4477 if (!len1)
4478 len = len2;
4479 else if (!len2)
4480 len = len1;
4481 else if (TREE_SIDE_EFFECTS (len1))
4482 len = len2;
4483 else if (TREE_SIDE_EFFECTS (len2))
4484 len = len1;
4485 else if (TREE_CODE (len1) != INTEGER_CST)
4486 len = len2;
4487 else if (TREE_CODE (len2) != INTEGER_CST)
4488 len = len1;
4489 else if (tree_int_cst_lt (len1, len2))
4490 len = len1;
4491 else
4492 len = len2;
4494 /* If both arguments have side effects, we cannot optimize. */
4495 if (len && !TREE_SIDE_EFFECTS (len))
4497 arg3_rtx = expand_normal (len);
4498 result = expand_cmpstrn_or_cmpmem
4499 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4500 arg3_rtx, MIN (arg1_align, arg2_align));
4504 if (result)
4506 /* Return the value in the proper mode for this function. */
4507 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4508 if (GET_MODE (result) == mode)
4509 return result;
4510 if (target == 0)
4511 return convert_to_mode (mode, result, 0);
4512 convert_move (target, result, 0);
4513 return target;
4516 /* Expand the library call ourselves using a stabilized argument
4517 list to avoid re-evaluating the function's arguments twice. */
4518 fndecl = get_callee_fndecl (exp);
4519 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4520 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4521 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4522 return expand_call (fn, target, target == const0_rtx);
4524 return NULL_RTX;
4527 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4528 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4529 the result in TARGET, if convenient. */
4531 static rtx
4532 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4533 ATTRIBUTE_UNUSED machine_mode mode)
4535 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4537 if (!validate_arglist (exp,
4538 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4539 return NULL_RTX;
4541 /* If c_strlen can determine an expression for one of the string
4542 lengths, and it doesn't have side effects, then emit cmpstrnsi
4543 using length MIN(strlen(string)+1, arg3). */
4544 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4545 if (cmpstrn_icode != CODE_FOR_nothing)
4547 tree len, len1, len2, len3;
4548 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4549 rtx result;
4550 tree fndecl, fn;
4551 tree arg1 = CALL_EXPR_ARG (exp, 0);
4552 tree arg2 = CALL_EXPR_ARG (exp, 1);
4553 tree arg3 = CALL_EXPR_ARG (exp, 2);
4555 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4556 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4558 len1 = c_strlen (arg1, 1);
4559 len2 = c_strlen (arg2, 1);
4561 if (len1)
4562 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4563 if (len2)
4564 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4566 len3 = fold_convert_loc (loc, sizetype, arg3);
4568 /* If we don't have a constant length for the first, use the length
4569 of the second, if we know it. If neither string is constant length,
4570 use the given length argument. We don't require a constant for
4571 this case; some cost analysis could be done if both are available
4572 but neither is constant. For now, assume they're equally cheap,
4573 unless one has side effects. If both strings have constant lengths,
4574 use the smaller. */
4576 if (!len1 && !len2)
4577 len = len3;
4578 else if (!len1)
4579 len = len2;
4580 else if (!len2)
4581 len = len1;
4582 else if (TREE_SIDE_EFFECTS (len1))
4583 len = len2;
4584 else if (TREE_SIDE_EFFECTS (len2))
4585 len = len1;
4586 else if (TREE_CODE (len1) != INTEGER_CST)
4587 len = len2;
4588 else if (TREE_CODE (len2) != INTEGER_CST)
4589 len = len1;
4590 else if (tree_int_cst_lt (len1, len2))
4591 len = len1;
4592 else
4593 len = len2;
4595 /* If we are not using the given length, we must incorporate it here.
4596 The actual new length parameter will be MIN(len,arg3) in this case. */
4597 if (len != len3)
4598 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4599 arg1_rtx = get_memory_rtx (arg1, len);
4600 arg2_rtx = get_memory_rtx (arg2, len);
4601 arg3_rtx = expand_normal (len);
4602 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4603 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4604 MIN (arg1_align, arg2_align));
4605 if (result)
4607 /* Return the value in the proper mode for this function. */
4608 mode = TYPE_MODE (TREE_TYPE (exp));
4609 if (GET_MODE (result) == mode)
4610 return result;
4611 if (target == 0)
4612 return convert_to_mode (mode, result, 0);
4613 convert_move (target, result, 0);
4614 return target;
4617 /* Expand the library call ourselves using a stabilized argument
4618 list to avoid re-evaluating the function's arguments twice. */
4619 fndecl = get_callee_fndecl (exp);
4620 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4621 arg1, arg2, len);
4622 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4623 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4624 return expand_call (fn, target, target == const0_rtx);
4626 return NULL_RTX;
4629 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4630 if that's convenient. */
4633 expand_builtin_saveregs (void)
4635 rtx val;
4636 rtx_insn *seq;
4638 /* Don't do __builtin_saveregs more than once in a function.
4639 Save the result of the first call and reuse it. */
4640 if (saveregs_value != 0)
4641 return saveregs_value;
4643 /* When this function is called, it means that registers must be
4644 saved on entry to this function. So we migrate the call to the
4645 first insn of this function. */
4647 start_sequence ();
4649 /* Do whatever the machine needs done in this case. */
4650 val = targetm.calls.expand_builtin_saveregs ();
4652 seq = get_insns ();
4653 end_sequence ();
4655 saveregs_value = val;
4657 /* Put the insns after the NOTE that starts the function. If this
4658 is inside a start_sequence, make the outer-level insn chain current, so
4659 the code is placed at the start of the function. */
4660 push_topmost_sequence ();
4661 emit_insn_after (seq, entry_of_function ());
4662 pop_topmost_sequence ();
4664 return val;
4667 /* Expand a call to __builtin_next_arg. */
4669 static rtx
4670 expand_builtin_next_arg (void)
4672 /* Checking arguments is already done in fold_builtin_next_arg
4673 that must be called before this function. */
4674 return expand_binop (ptr_mode, add_optab,
4675 crtl->args.internal_arg_pointer,
4676 crtl->args.arg_offset_rtx,
4677 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4680 /* Make it easier for the backends by protecting the valist argument
4681 from multiple evaluations. */
4683 static tree
4684 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4686 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4688 /* The current way of determining the type of valist is completely
4689 bogus. We should have the information on the va builtin instead. */
4690 if (!vatype)
4691 vatype = targetm.fn_abi_va_list (cfun->decl);
4693 if (TREE_CODE (vatype) == ARRAY_TYPE)
4695 if (TREE_SIDE_EFFECTS (valist))
4696 valist = save_expr (valist);
4698 /* For this case, the backends will be expecting a pointer to
4699 vatype, but it's possible we've actually been given an array
4700 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4701 So fix it. */
4702 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4704 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4705 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4708 else
4710 tree pt = build_pointer_type (vatype);
4712 if (! needs_lvalue)
4714 if (! TREE_SIDE_EFFECTS (valist))
4715 return valist;
4717 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4718 TREE_SIDE_EFFECTS (valist) = 1;
4721 if (TREE_SIDE_EFFECTS (valist))
4722 valist = save_expr (valist);
4723 valist = fold_build2_loc (loc, MEM_REF,
4724 vatype, valist, build_int_cst (pt, 0));
4727 return valist;
4730 /* The "standard" definition of va_list is void*. */
4732 tree
4733 std_build_builtin_va_list (void)
4735 return ptr_type_node;
4738 /* The "standard" abi va_list is va_list_type_node. */
4740 tree
4741 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4743 return va_list_type_node;
4746 /* The "standard" type of va_list is va_list_type_node. */
4748 tree
4749 std_canonical_va_list_type (tree type)
4751 tree wtype, htype;
4753 wtype = va_list_type_node;
4754 htype = type;
4756 if (TREE_CODE (wtype) == ARRAY_TYPE)
4758 /* If va_list is an array type, the argument may have decayed
4759 to a pointer type, e.g. by being passed to another function.
4760 In that case, unwrap both types so that we can compare the
4761 underlying records. */
4762 if (TREE_CODE (htype) == ARRAY_TYPE
4763 || POINTER_TYPE_P (htype))
4765 wtype = TREE_TYPE (wtype);
4766 htype = TREE_TYPE (htype);
4769 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4770 return va_list_type_node;
4772 return NULL_TREE;
4775 /* The "standard" implementation of va_start: just assign `nextarg' to
4776 the variable. */
4778 void
4779 std_expand_builtin_va_start (tree valist, rtx nextarg)
4781 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4782 convert_move (va_r, nextarg, 0);
4784 /* We do not have any valid bounds for the pointer, so
4785 just store zero bounds for it. */
4786 if (chkp_function_instrumented_p (current_function_decl))
4787 chkp_expand_bounds_reset_for_mem (valist,
4788 make_tree (TREE_TYPE (valist),
4789 nextarg));
4792 /* Expand EXP, a call to __builtin_va_start. */
4794 static rtx
4795 expand_builtin_va_start (tree exp)
4797 rtx nextarg;
4798 tree valist;
4799 location_t loc = EXPR_LOCATION (exp);
4801 if (call_expr_nargs (exp) < 2)
4803 error_at (loc, "too few arguments to function %<va_start%>");
4804 return const0_rtx;
4807 if (fold_builtin_next_arg (exp, true))
4808 return const0_rtx;
4810 nextarg = expand_builtin_next_arg ();
4811 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4813 if (targetm.expand_builtin_va_start)
4814 targetm.expand_builtin_va_start (valist, nextarg);
4815 else
4816 std_expand_builtin_va_start (valist, nextarg);
4818 return const0_rtx;
4821 /* Expand EXP, a call to __builtin_va_end. */
4823 static rtx
4824 expand_builtin_va_end (tree exp)
4826 tree valist = CALL_EXPR_ARG (exp, 0);
4828 /* Evaluate for side effects, if needed. I hate macros that don't
4829 do that. */
4830 if (TREE_SIDE_EFFECTS (valist))
4831 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4833 return const0_rtx;
4836 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4837 builtin rather than just as an assignment in stdarg.h because of the
4838 nastiness of array-type va_list types. */
4840 static rtx
4841 expand_builtin_va_copy (tree exp)
4843 tree dst, src, t;
4844 location_t loc = EXPR_LOCATION (exp);
4846 dst = CALL_EXPR_ARG (exp, 0);
4847 src = CALL_EXPR_ARG (exp, 1);
4849 dst = stabilize_va_list_loc (loc, dst, 1);
4850 src = stabilize_va_list_loc (loc, src, 0);
4852 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4854 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4856 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4857 TREE_SIDE_EFFECTS (t) = 1;
4858 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4860 else
4862 rtx dstb, srcb, size;
4864 /* Evaluate to pointers. */
4865 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4866 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4867 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4868 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4870 dstb = convert_memory_address (Pmode, dstb);
4871 srcb = convert_memory_address (Pmode, srcb);
4873 /* "Dereference" to BLKmode memories. */
4874 dstb = gen_rtx_MEM (BLKmode, dstb);
4875 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4876 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4877 srcb = gen_rtx_MEM (BLKmode, srcb);
4878 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4879 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4881 /* Copy. */
4882 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4885 return const0_rtx;
4888 /* Expand a call to one of the builtin functions __builtin_frame_address or
4889 __builtin_return_address. */
4891 static rtx
4892 expand_builtin_frame_address (tree fndecl, tree exp)
4894 /* The argument must be a nonnegative integer constant.
4895 It counts the number of frames to scan up the stack.
4896 The value is either the frame pointer value or the return
4897 address saved in that frame. */
4898 if (call_expr_nargs (exp) == 0)
4899 /* Warning about missing arg was already issued. */
4900 return const0_rtx;
4901 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4903 error ("invalid argument to %qD", fndecl);
4904 return const0_rtx;
4906 else
4908 /* Number of frames to scan up the stack. */
4909 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4911 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4913 /* Some ports cannot access arbitrary stack frames. */
4914 if (tem == NULL)
4916 warning (0, "unsupported argument to %qD", fndecl);
4917 return const0_rtx;
4920 if (count)
4922 /* Warn since no effort is made to ensure that any frame
4923 beyond the current one exists or can be safely reached. */
4924 warning (OPT_Wframe_address, "calling %qD with "
4925 "a nonzero argument is unsafe", fndecl);
4928 /* For __builtin_frame_address, return what we've got. */
4929 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4930 return tem;
4932 if (!REG_P (tem)
4933 && ! CONSTANT_P (tem))
4934 tem = copy_addr_to_reg (tem);
4935 return tem;
4939 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4940 failed and the caller should emit a normal call. */
4942 static rtx
4943 expand_builtin_alloca (tree exp)
4945 rtx op0;
4946 rtx result;
4947 unsigned int align;
4948 tree fndecl = get_callee_fndecl (exp);
4949 HOST_WIDE_INT max_size;
4950 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4951 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
4952 bool valid_arglist
4953 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
4954 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
4955 VOID_TYPE)
4956 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
4957 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4958 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4960 if (!valid_arglist)
4961 return NULL_RTX;
4963 if ((alloca_for_var && !warn_vla_limit)
4964 || (!alloca_for_var && !warn_alloca_limit))
4966 /* -Walloca-larger-than and -Wvla-larger-than settings override
4967 the more general -Walloc-size-larger-than so unless either of
4968 the former options is specified check the alloca arguments for
4969 overflow. */
4970 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
4971 int idx[] = { 0, -1 };
4972 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
4975 /* Compute the argument. */
4976 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4978 /* Compute the alignment. */
4979 align = (fcode == BUILT_IN_ALLOCA
4980 ? BIGGEST_ALIGNMENT
4981 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
4983 /* Compute the maximum size. */
4984 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
4985 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
4986 : -1);
4988 /* Allocate the desired space. If the allocation stems from the declaration
4989 of a variable-sized object, it cannot accumulate. */
4990 result
4991 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
4992 result = convert_memory_address (ptr_mode, result);
4994 return result;
4997 /* Emit a call to __asan_allocas_unpoison call in EXP. Replace second argument
4998 of the call with virtual_stack_dynamic_rtx because in asan pass we emit a
4999 dummy value into second parameter relying on this function to perform the
5000 change. See motivation for this in comment to handle_builtin_stack_restore
5001 function. */
5003 static rtx
5004 expand_asan_emit_allocas_unpoison (tree exp)
5006 tree arg0 = CALL_EXPR_ARG (exp, 0);
5007 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5008 rtx bot = convert_memory_address (ptr_mode, virtual_stack_dynamic_rtx);
5009 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5010 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5011 top, ptr_mode, bot, ptr_mode);
5012 return ret;
5015 /* Expand a call to bswap builtin in EXP.
5016 Return NULL_RTX if a normal call should be emitted rather than expanding the
5017 function in-line. If convenient, the result should be placed in TARGET.
5018 SUBTARGET may be used as the target for computing one of EXP's operands. */
5020 static rtx
5021 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5022 rtx subtarget)
5024 tree arg;
5025 rtx op0;
5027 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5028 return NULL_RTX;
5030 arg = CALL_EXPR_ARG (exp, 0);
5031 op0 = expand_expr (arg,
5032 subtarget && GET_MODE (subtarget) == target_mode
5033 ? subtarget : NULL_RTX,
5034 target_mode, EXPAND_NORMAL);
5035 if (GET_MODE (op0) != target_mode)
5036 op0 = convert_to_mode (target_mode, op0, 1);
5038 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5040 gcc_assert (target);
5042 return convert_to_mode (target_mode, target, 1);
5045 /* Expand a call to a unary builtin in EXP.
5046 Return NULL_RTX if a normal call should be emitted rather than expanding the
5047 function in-line. If convenient, the result should be placed in TARGET.
5048 SUBTARGET may be used as the target for computing one of EXP's operands. */
5050 static rtx
5051 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5052 rtx subtarget, optab op_optab)
5054 rtx op0;
5056 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5057 return NULL_RTX;
5059 /* Compute the argument. */
5060 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5061 (subtarget
5062 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5063 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5064 VOIDmode, EXPAND_NORMAL);
5065 /* Compute op, into TARGET if possible.
5066 Set TARGET to wherever the result comes back. */
5067 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5068 op_optab, op0, target, op_optab != clrsb_optab);
5069 gcc_assert (target);
5071 return convert_to_mode (target_mode, target, 0);
5074 /* Expand a call to __builtin_expect. We just return our argument
5075 as the builtin_expect semantic should've been already executed by
5076 tree branch prediction pass. */
5078 static rtx
5079 expand_builtin_expect (tree exp, rtx target)
5081 tree arg;
5083 if (call_expr_nargs (exp) < 2)
5084 return const0_rtx;
5085 arg = CALL_EXPR_ARG (exp, 0);
5087 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5088 /* When guessing was done, the hints should be already stripped away. */
5089 gcc_assert (!flag_guess_branch_prob
5090 || optimize == 0 || seen_error ());
5091 return target;
5094 /* Expand a call to __builtin_assume_aligned. We just return our first
5095 argument as the builtin_assume_aligned semantic should've been already
5096 executed by CCP. */
5098 static rtx
5099 expand_builtin_assume_aligned (tree exp, rtx target)
5101 if (call_expr_nargs (exp) < 2)
5102 return const0_rtx;
5103 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5104 EXPAND_NORMAL);
5105 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5106 && (call_expr_nargs (exp) < 3
5107 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5108 return target;
5111 void
5112 expand_builtin_trap (void)
5114 if (targetm.have_trap ())
5116 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5117 /* For trap insns when not accumulating outgoing args force
5118 REG_ARGS_SIZE note to prevent crossjumping of calls with
5119 different args sizes. */
5120 if (!ACCUMULATE_OUTGOING_ARGS)
5121 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
5123 else
5125 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5126 tree call_expr = build_call_expr (fn, 0);
5127 expand_call (call_expr, NULL_RTX, false);
5130 emit_barrier ();
5133 /* Expand a call to __builtin_unreachable. We do nothing except emit
5134 a barrier saying that control flow will not pass here.
5136 It is the responsibility of the program being compiled to ensure
5137 that control flow does never reach __builtin_unreachable. */
5138 static void
5139 expand_builtin_unreachable (void)
5141 emit_barrier ();
5144 /* Expand EXP, a call to fabs, fabsf or fabsl.
5145 Return NULL_RTX if a normal call should be emitted rather than expanding
5146 the function inline. If convenient, the result should be placed
5147 in TARGET. SUBTARGET may be used as the target for computing
5148 the operand. */
5150 static rtx
5151 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5153 machine_mode mode;
5154 tree arg;
5155 rtx op0;
5157 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5158 return NULL_RTX;
5160 arg = CALL_EXPR_ARG (exp, 0);
5161 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5162 mode = TYPE_MODE (TREE_TYPE (arg));
5163 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5164 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5167 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5168 Return NULL is a normal call should be emitted rather than expanding the
5169 function inline. If convenient, the result should be placed in TARGET.
5170 SUBTARGET may be used as the target for computing the operand. */
5172 static rtx
5173 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5175 rtx op0, op1;
5176 tree arg;
5178 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5179 return NULL_RTX;
5181 arg = CALL_EXPR_ARG (exp, 0);
5182 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5184 arg = CALL_EXPR_ARG (exp, 1);
5185 op1 = expand_normal (arg);
5187 return expand_copysign (op0, op1, target);
5190 /* Expand a call to __builtin___clear_cache. */
5192 static rtx
5193 expand_builtin___clear_cache (tree exp)
5195 if (!targetm.code_for_clear_cache)
5197 #ifdef CLEAR_INSN_CACHE
5198 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5199 does something. Just do the default expansion to a call to
5200 __clear_cache(). */
5201 return NULL_RTX;
5202 #else
5203 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5204 does nothing. There is no need to call it. Do nothing. */
5205 return const0_rtx;
5206 #endif /* CLEAR_INSN_CACHE */
5209 /* We have a "clear_cache" insn, and it will handle everything. */
5210 tree begin, end;
5211 rtx begin_rtx, end_rtx;
5213 /* We must not expand to a library call. If we did, any
5214 fallback library function in libgcc that might contain a call to
5215 __builtin___clear_cache() would recurse infinitely. */
5216 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5218 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5219 return const0_rtx;
5222 if (targetm.have_clear_cache ())
5224 struct expand_operand ops[2];
5226 begin = CALL_EXPR_ARG (exp, 0);
5227 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5229 end = CALL_EXPR_ARG (exp, 1);
5230 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5232 create_address_operand (&ops[0], begin_rtx);
5233 create_address_operand (&ops[1], end_rtx);
5234 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5235 return const0_rtx;
5237 return const0_rtx;
5240 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5242 static rtx
5243 round_trampoline_addr (rtx tramp)
5245 rtx temp, addend, mask;
5247 /* If we don't need too much alignment, we'll have been guaranteed
5248 proper alignment by get_trampoline_type. */
5249 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5250 return tramp;
5252 /* Round address up to desired boundary. */
5253 temp = gen_reg_rtx (Pmode);
5254 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5255 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5257 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5258 temp, 0, OPTAB_LIB_WIDEN);
5259 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5260 temp, 0, OPTAB_LIB_WIDEN);
5262 return tramp;
5265 static rtx
5266 expand_builtin_init_trampoline (tree exp, bool onstack)
5268 tree t_tramp, t_func, t_chain;
5269 rtx m_tramp, r_tramp, r_chain, tmp;
5271 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5272 POINTER_TYPE, VOID_TYPE))
5273 return NULL_RTX;
5275 t_tramp = CALL_EXPR_ARG (exp, 0);
5276 t_func = CALL_EXPR_ARG (exp, 1);
5277 t_chain = CALL_EXPR_ARG (exp, 2);
5279 r_tramp = expand_normal (t_tramp);
5280 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5281 MEM_NOTRAP_P (m_tramp) = 1;
5283 /* If ONSTACK, the TRAMP argument should be the address of a field
5284 within the local function's FRAME decl. Either way, let's see if
5285 we can fill in the MEM_ATTRs for this memory. */
5286 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5287 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5289 /* Creator of a heap trampoline is responsible for making sure the
5290 address is aligned to at least STACK_BOUNDARY. Normally malloc
5291 will ensure this anyhow. */
5292 tmp = round_trampoline_addr (r_tramp);
5293 if (tmp != r_tramp)
5295 m_tramp = change_address (m_tramp, BLKmode, tmp);
5296 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5297 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5300 /* The FUNC argument should be the address of the nested function.
5301 Extract the actual function decl to pass to the hook. */
5302 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5303 t_func = TREE_OPERAND (t_func, 0);
5304 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5306 r_chain = expand_normal (t_chain);
5308 /* Generate insns to initialize the trampoline. */
5309 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5311 if (onstack)
5313 trampolines_created = 1;
5315 if (targetm.calls.custom_function_descriptors != 0)
5316 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5317 "trampoline generated for nested function %qD", t_func);
5320 return const0_rtx;
5323 static rtx
5324 expand_builtin_adjust_trampoline (tree exp)
5326 rtx tramp;
5328 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5329 return NULL_RTX;
5331 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5332 tramp = round_trampoline_addr (tramp);
5333 if (targetm.calls.trampoline_adjust_address)
5334 tramp = targetm.calls.trampoline_adjust_address (tramp);
5336 return tramp;
5339 /* Expand a call to the builtin descriptor initialization routine.
5340 A descriptor is made up of a couple of pointers to the static
5341 chain and the code entry in this order. */
5343 static rtx
5344 expand_builtin_init_descriptor (tree exp)
5346 tree t_descr, t_func, t_chain;
5347 rtx m_descr, r_descr, r_func, r_chain;
5349 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5350 VOID_TYPE))
5351 return NULL_RTX;
5353 t_descr = CALL_EXPR_ARG (exp, 0);
5354 t_func = CALL_EXPR_ARG (exp, 1);
5355 t_chain = CALL_EXPR_ARG (exp, 2);
5357 r_descr = expand_normal (t_descr);
5358 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5359 MEM_NOTRAP_P (m_descr) = 1;
5361 r_func = expand_normal (t_func);
5362 r_chain = expand_normal (t_chain);
5364 /* Generate insns to initialize the descriptor. */
5365 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5366 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5367 POINTER_SIZE / BITS_PER_UNIT), r_func);
5369 return const0_rtx;
5372 /* Expand a call to the builtin descriptor adjustment routine. */
5374 static rtx
5375 expand_builtin_adjust_descriptor (tree exp)
5377 rtx tramp;
5379 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5380 return NULL_RTX;
5382 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5384 /* Unalign the descriptor to allow runtime identification. */
5385 tramp = plus_constant (ptr_mode, tramp,
5386 targetm.calls.custom_function_descriptors);
5388 return force_operand (tramp, NULL_RTX);
5391 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5392 function. The function first checks whether the back end provides
5393 an insn to implement signbit for the respective mode. If not, it
5394 checks whether the floating point format of the value is such that
5395 the sign bit can be extracted. If that is not the case, error out.
5396 EXP is the expression that is a call to the builtin function; if
5397 convenient, the result should be placed in TARGET. */
5398 static rtx
5399 expand_builtin_signbit (tree exp, rtx target)
5401 const struct real_format *fmt;
5402 scalar_float_mode fmode;
5403 scalar_int_mode rmode, imode;
5404 tree arg;
5405 int word, bitpos;
5406 enum insn_code icode;
5407 rtx temp;
5408 location_t loc = EXPR_LOCATION (exp);
5410 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5411 return NULL_RTX;
5413 arg = CALL_EXPR_ARG (exp, 0);
5414 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5415 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5416 fmt = REAL_MODE_FORMAT (fmode);
5418 arg = builtin_save_expr (arg);
5420 /* Expand the argument yielding a RTX expression. */
5421 temp = expand_normal (arg);
5423 /* Check if the back end provides an insn that handles signbit for the
5424 argument's mode. */
5425 icode = optab_handler (signbit_optab, fmode);
5426 if (icode != CODE_FOR_nothing)
5428 rtx_insn *last = get_last_insn ();
5429 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5430 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5431 return target;
5432 delete_insns_since (last);
5435 /* For floating point formats without a sign bit, implement signbit
5436 as "ARG < 0.0". */
5437 bitpos = fmt->signbit_ro;
5438 if (bitpos < 0)
5440 /* But we can't do this if the format supports signed zero. */
5441 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5443 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5444 build_real (TREE_TYPE (arg), dconst0));
5445 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5448 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5450 imode = int_mode_for_mode (fmode).require ();
5451 temp = gen_lowpart (imode, temp);
5453 else
5455 imode = word_mode;
5456 /* Handle targets with different FP word orders. */
5457 if (FLOAT_WORDS_BIG_ENDIAN)
5458 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5459 else
5460 word = bitpos / BITS_PER_WORD;
5461 temp = operand_subword_force (temp, word, fmode);
5462 bitpos = bitpos % BITS_PER_WORD;
5465 /* Force the intermediate word_mode (or narrower) result into a
5466 register. This avoids attempting to create paradoxical SUBREGs
5467 of floating point modes below. */
5468 temp = force_reg (imode, temp);
5470 /* If the bitpos is within the "result mode" lowpart, the operation
5471 can be implement with a single bitwise AND. Otherwise, we need
5472 a right shift and an AND. */
5474 if (bitpos < GET_MODE_BITSIZE (rmode))
5476 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5478 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5479 temp = gen_lowpart (rmode, temp);
5480 temp = expand_binop (rmode, and_optab, temp,
5481 immed_wide_int_const (mask, rmode),
5482 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5484 else
5486 /* Perform a logical right shift to place the signbit in the least
5487 significant bit, then truncate the result to the desired mode
5488 and mask just this bit. */
5489 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5490 temp = gen_lowpart (rmode, temp);
5491 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5492 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5495 return temp;
5498 /* Expand fork or exec calls. TARGET is the desired target of the
5499 call. EXP is the call. FN is the
5500 identificator of the actual function. IGNORE is nonzero if the
5501 value is to be ignored. */
5503 static rtx
5504 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5506 tree id, decl;
5507 tree call;
5509 /* If we are not profiling, just call the function. */
5510 if (!profile_arc_flag)
5511 return NULL_RTX;
5513 /* Otherwise call the wrapper. This should be equivalent for the rest of
5514 compiler, so the code does not diverge, and the wrapper may run the
5515 code necessary for keeping the profiling sane. */
5517 switch (DECL_FUNCTION_CODE (fn))
5519 case BUILT_IN_FORK:
5520 id = get_identifier ("__gcov_fork");
5521 break;
5523 case BUILT_IN_EXECL:
5524 id = get_identifier ("__gcov_execl");
5525 break;
5527 case BUILT_IN_EXECV:
5528 id = get_identifier ("__gcov_execv");
5529 break;
5531 case BUILT_IN_EXECLP:
5532 id = get_identifier ("__gcov_execlp");
5533 break;
5535 case BUILT_IN_EXECLE:
5536 id = get_identifier ("__gcov_execle");
5537 break;
5539 case BUILT_IN_EXECVP:
5540 id = get_identifier ("__gcov_execvp");
5541 break;
5543 case BUILT_IN_EXECVE:
5544 id = get_identifier ("__gcov_execve");
5545 break;
5547 default:
5548 gcc_unreachable ();
5551 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5552 FUNCTION_DECL, id, TREE_TYPE (fn));
5553 DECL_EXTERNAL (decl) = 1;
5554 TREE_PUBLIC (decl) = 1;
5555 DECL_ARTIFICIAL (decl) = 1;
5556 TREE_NOTHROW (decl) = 1;
5557 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5558 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5559 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5560 return expand_call (call, target, ignore);
5565 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5566 the pointer in these functions is void*, the tree optimizers may remove
5567 casts. The mode computed in expand_builtin isn't reliable either, due
5568 to __sync_bool_compare_and_swap.
5570 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5571 group of builtins. This gives us log2 of the mode size. */
5573 static inline machine_mode
5574 get_builtin_sync_mode (int fcode_diff)
5576 /* The size is not negotiable, so ask not to get BLKmode in return
5577 if the target indicates that a smaller size would be better. */
5578 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5581 /* Expand the memory expression LOC and return the appropriate memory operand
5582 for the builtin_sync operations. */
5584 static rtx
5585 get_builtin_sync_mem (tree loc, machine_mode mode)
5587 rtx addr, mem;
5589 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5590 addr = convert_memory_address (Pmode, addr);
5592 /* Note that we explicitly do not want any alias information for this
5593 memory, so that we kill all other live memories. Otherwise we don't
5594 satisfy the full barrier semantics of the intrinsic. */
5595 mem = validize_mem (gen_rtx_MEM (mode, addr));
5597 /* The alignment needs to be at least according to that of the mode. */
5598 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5599 get_pointer_alignment (loc)));
5600 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5601 MEM_VOLATILE_P (mem) = 1;
5603 return mem;
5606 /* Make sure an argument is in the right mode.
5607 EXP is the tree argument.
5608 MODE is the mode it should be in. */
5610 static rtx
5611 expand_expr_force_mode (tree exp, machine_mode mode)
5613 rtx val;
5614 machine_mode old_mode;
5616 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5617 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5618 of CONST_INTs, where we know the old_mode only from the call argument. */
5620 old_mode = GET_MODE (val);
5621 if (old_mode == VOIDmode)
5622 old_mode = TYPE_MODE (TREE_TYPE (exp));
5623 val = convert_modes (mode, old_mode, val, 1);
5624 return val;
5628 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5629 EXP is the CALL_EXPR. CODE is the rtx code
5630 that corresponds to the arithmetic or logical operation from the name;
5631 an exception here is that NOT actually means NAND. TARGET is an optional
5632 place for us to store the results; AFTER is true if this is the
5633 fetch_and_xxx form. */
5635 static rtx
5636 expand_builtin_sync_operation (machine_mode mode, tree exp,
5637 enum rtx_code code, bool after,
5638 rtx target)
5640 rtx val, mem;
5641 location_t loc = EXPR_LOCATION (exp);
5643 if (code == NOT && warn_sync_nand)
5645 tree fndecl = get_callee_fndecl (exp);
5646 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5648 static bool warned_f_a_n, warned_n_a_f;
5650 switch (fcode)
5652 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5653 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5654 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5655 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5656 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5657 if (warned_f_a_n)
5658 break;
5660 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5661 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5662 warned_f_a_n = true;
5663 break;
5665 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5666 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5667 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5668 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5669 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5670 if (warned_n_a_f)
5671 break;
5673 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5674 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5675 warned_n_a_f = true;
5676 break;
5678 default:
5679 gcc_unreachable ();
5683 /* Expand the operands. */
5684 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5685 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5687 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5688 after);
5691 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5692 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5693 true if this is the boolean form. TARGET is a place for us to store the
5694 results; this is NOT optional if IS_BOOL is true. */
5696 static rtx
5697 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5698 bool is_bool, rtx target)
5700 rtx old_val, new_val, mem;
5701 rtx *pbool, *poval;
5703 /* Expand the operands. */
5704 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5705 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5706 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5708 pbool = poval = NULL;
5709 if (target != const0_rtx)
5711 if (is_bool)
5712 pbool = &target;
5713 else
5714 poval = &target;
5716 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5717 false, MEMMODEL_SYNC_SEQ_CST,
5718 MEMMODEL_SYNC_SEQ_CST))
5719 return NULL_RTX;
5721 return target;
5724 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5725 general form is actually an atomic exchange, and some targets only
5726 support a reduced form with the second argument being a constant 1.
5727 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5728 the results. */
5730 static rtx
5731 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5732 rtx target)
5734 rtx val, mem;
5736 /* Expand the operands. */
5737 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5738 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5740 return expand_sync_lock_test_and_set (target, mem, val);
5743 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5745 static void
5746 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5748 rtx mem;
5750 /* Expand the operands. */
5751 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5753 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5756 /* Given an integer representing an ``enum memmodel'', verify its
5757 correctness and return the memory model enum. */
5759 static enum memmodel
5760 get_memmodel (tree exp)
5762 rtx op;
5763 unsigned HOST_WIDE_INT val;
5764 source_location loc
5765 = expansion_point_location_if_in_system_header (input_location);
5767 /* If the parameter is not a constant, it's a run time value so we'll just
5768 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5769 if (TREE_CODE (exp) != INTEGER_CST)
5770 return MEMMODEL_SEQ_CST;
5772 op = expand_normal (exp);
5774 val = INTVAL (op);
5775 if (targetm.memmodel_check)
5776 val = targetm.memmodel_check (val);
5777 else if (val & ~MEMMODEL_MASK)
5779 warning_at (loc, OPT_Winvalid_memory_model,
5780 "unknown architecture specifier in memory model to builtin");
5781 return MEMMODEL_SEQ_CST;
5784 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5785 if (memmodel_base (val) >= MEMMODEL_LAST)
5787 warning_at (loc, OPT_Winvalid_memory_model,
5788 "invalid memory model argument to builtin");
5789 return MEMMODEL_SEQ_CST;
5792 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5793 be conservative and promote consume to acquire. */
5794 if (val == MEMMODEL_CONSUME)
5795 val = MEMMODEL_ACQUIRE;
5797 return (enum memmodel) val;
5800 /* Expand the __atomic_exchange intrinsic:
5801 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5802 EXP is the CALL_EXPR.
5803 TARGET is an optional place for us to store the results. */
5805 static rtx
5806 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5808 rtx val, mem;
5809 enum memmodel model;
5811 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5813 if (!flag_inline_atomics)
5814 return NULL_RTX;
5816 /* Expand the operands. */
5817 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5818 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5820 return expand_atomic_exchange (target, mem, val, model);
5823 /* Expand the __atomic_compare_exchange intrinsic:
5824 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5825 TYPE desired, BOOL weak,
5826 enum memmodel success,
5827 enum memmodel failure)
5828 EXP is the CALL_EXPR.
5829 TARGET is an optional place for us to store the results. */
5831 static rtx
5832 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5833 rtx target)
5835 rtx expect, desired, mem, oldval;
5836 rtx_code_label *label;
5837 enum memmodel success, failure;
5838 tree weak;
5839 bool is_weak;
5840 source_location loc
5841 = expansion_point_location_if_in_system_header (input_location);
5843 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5844 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5846 if (failure > success)
5848 warning_at (loc, OPT_Winvalid_memory_model,
5849 "failure memory model cannot be stronger than success "
5850 "memory model for %<__atomic_compare_exchange%>");
5851 success = MEMMODEL_SEQ_CST;
5854 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5856 warning_at (loc, OPT_Winvalid_memory_model,
5857 "invalid failure memory model for "
5858 "%<__atomic_compare_exchange%>");
5859 failure = MEMMODEL_SEQ_CST;
5860 success = MEMMODEL_SEQ_CST;
5864 if (!flag_inline_atomics)
5865 return NULL_RTX;
5867 /* Expand the operands. */
5868 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5870 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5871 expect = convert_memory_address (Pmode, expect);
5872 expect = gen_rtx_MEM (mode, expect);
5873 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5875 weak = CALL_EXPR_ARG (exp, 3);
5876 is_weak = false;
5877 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5878 is_weak = true;
5880 if (target == const0_rtx)
5881 target = NULL;
5883 /* Lest the rtl backend create a race condition with an imporoper store
5884 to memory, always create a new pseudo for OLDVAL. */
5885 oldval = NULL;
5887 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5888 is_weak, success, failure))
5889 return NULL_RTX;
5891 /* Conditionally store back to EXPECT, lest we create a race condition
5892 with an improper store to memory. */
5893 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5894 the normal case where EXPECT is totally private, i.e. a register. At
5895 which point the store can be unconditional. */
5896 label = gen_label_rtx ();
5897 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5898 GET_MODE (target), 1, label);
5899 emit_move_insn (expect, oldval);
5900 emit_label (label);
5902 return target;
5905 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5906 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5907 call. The weak parameter must be dropped to match the expected parameter
5908 list and the expected argument changed from value to pointer to memory
5909 slot. */
5911 static void
5912 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5914 unsigned int z;
5915 vec<tree, va_gc> *vec;
5917 vec_alloc (vec, 5);
5918 vec->quick_push (gimple_call_arg (call, 0));
5919 tree expected = gimple_call_arg (call, 1);
5920 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5921 TREE_TYPE (expected));
5922 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5923 if (expd != x)
5924 emit_move_insn (x, expd);
5925 tree v = make_tree (TREE_TYPE (expected), x);
5926 vec->quick_push (build1 (ADDR_EXPR,
5927 build_pointer_type (TREE_TYPE (expected)), v));
5928 vec->quick_push (gimple_call_arg (call, 2));
5929 /* Skip the boolean weak parameter. */
5930 for (z = 4; z < 6; z++)
5931 vec->quick_push (gimple_call_arg (call, z));
5932 built_in_function fncode
5933 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5934 + exact_log2 (GET_MODE_SIZE (mode)));
5935 tree fndecl = builtin_decl_explicit (fncode);
5936 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
5937 fndecl);
5938 tree exp = build_call_vec (boolean_type_node, fn, vec);
5939 tree lhs = gimple_call_lhs (call);
5940 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
5941 if (lhs)
5943 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5944 if (GET_MODE (boolret) != mode)
5945 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5946 x = force_reg (mode, x);
5947 write_complex_part (target, boolret, true);
5948 write_complex_part (target, x, false);
5952 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
5954 void
5955 expand_ifn_atomic_compare_exchange (gcall *call)
5957 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
5958 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
5959 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
5960 rtx expect, desired, mem, oldval, boolret;
5961 enum memmodel success, failure;
5962 tree lhs;
5963 bool is_weak;
5964 source_location loc
5965 = expansion_point_location_if_in_system_header (gimple_location (call));
5967 success = get_memmodel (gimple_call_arg (call, 4));
5968 failure = get_memmodel (gimple_call_arg (call, 5));
5970 if (failure > success)
5972 warning_at (loc, OPT_Winvalid_memory_model,
5973 "failure memory model cannot be stronger than success "
5974 "memory model for %<__atomic_compare_exchange%>");
5975 success = MEMMODEL_SEQ_CST;
5978 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5980 warning_at (loc, OPT_Winvalid_memory_model,
5981 "invalid failure memory model for "
5982 "%<__atomic_compare_exchange%>");
5983 failure = MEMMODEL_SEQ_CST;
5984 success = MEMMODEL_SEQ_CST;
5987 if (!flag_inline_atomics)
5989 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5990 return;
5993 /* Expand the operands. */
5994 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
5996 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
5997 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
5999 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6001 boolret = NULL;
6002 oldval = NULL;
6004 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6005 is_weak, success, failure))
6007 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6008 return;
6011 lhs = gimple_call_lhs (call);
6012 if (lhs)
6014 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6015 if (GET_MODE (boolret) != mode)
6016 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6017 write_complex_part (target, boolret, true);
6018 write_complex_part (target, oldval, false);
6022 /* Expand the __atomic_load intrinsic:
6023 TYPE __atomic_load (TYPE *object, enum memmodel)
6024 EXP is the CALL_EXPR.
6025 TARGET is an optional place for us to store the results. */
6027 static rtx
6028 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6030 rtx mem;
6031 enum memmodel model;
6033 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6034 if (is_mm_release (model) || is_mm_acq_rel (model))
6036 source_location loc
6037 = expansion_point_location_if_in_system_header (input_location);
6038 warning_at (loc, OPT_Winvalid_memory_model,
6039 "invalid memory model for %<__atomic_load%>");
6040 model = MEMMODEL_SEQ_CST;
6043 if (!flag_inline_atomics)
6044 return NULL_RTX;
6046 /* Expand the operand. */
6047 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6049 return expand_atomic_load (target, mem, model);
6053 /* Expand the __atomic_store intrinsic:
6054 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6055 EXP is the CALL_EXPR.
6056 TARGET is an optional place for us to store the results. */
6058 static rtx
6059 expand_builtin_atomic_store (machine_mode mode, tree exp)
6061 rtx mem, val;
6062 enum memmodel model;
6064 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6065 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6066 || is_mm_release (model)))
6068 source_location loc
6069 = expansion_point_location_if_in_system_header (input_location);
6070 warning_at (loc, OPT_Winvalid_memory_model,
6071 "invalid memory model for %<__atomic_store%>");
6072 model = MEMMODEL_SEQ_CST;
6075 if (!flag_inline_atomics)
6076 return NULL_RTX;
6078 /* Expand the operands. */
6079 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6080 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6082 return expand_atomic_store (mem, val, model, false);
6085 /* Expand the __atomic_fetch_XXX intrinsic:
6086 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6087 EXP is the CALL_EXPR.
6088 TARGET is an optional place for us to store the results.
6089 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6090 FETCH_AFTER is true if returning the result of the operation.
6091 FETCH_AFTER is false if returning the value before the operation.
6092 IGNORE is true if the result is not used.
6093 EXT_CALL is the correct builtin for an external call if this cannot be
6094 resolved to an instruction sequence. */
6096 static rtx
6097 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6098 enum rtx_code code, bool fetch_after,
6099 bool ignore, enum built_in_function ext_call)
6101 rtx val, mem, ret;
6102 enum memmodel model;
6103 tree fndecl;
6104 tree addr;
6106 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6108 /* Expand the operands. */
6109 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6110 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6112 /* Only try generating instructions if inlining is turned on. */
6113 if (flag_inline_atomics)
6115 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6116 if (ret)
6117 return ret;
6120 /* Return if a different routine isn't needed for the library call. */
6121 if (ext_call == BUILT_IN_NONE)
6122 return NULL_RTX;
6124 /* Change the call to the specified function. */
6125 fndecl = get_callee_fndecl (exp);
6126 addr = CALL_EXPR_FN (exp);
6127 STRIP_NOPS (addr);
6129 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6130 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6132 /* If we will emit code after the call, the call can not be a tail call.
6133 If it is emitted as a tail call, a barrier is emitted after it, and
6134 then all trailing code is removed. */
6135 if (!ignore)
6136 CALL_EXPR_TAILCALL (exp) = 0;
6138 /* Expand the call here so we can emit trailing code. */
6139 ret = expand_call (exp, target, ignore);
6141 /* Replace the original function just in case it matters. */
6142 TREE_OPERAND (addr, 0) = fndecl;
6144 /* Then issue the arithmetic correction to return the right result. */
6145 if (!ignore)
6147 if (code == NOT)
6149 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6150 OPTAB_LIB_WIDEN);
6151 ret = expand_simple_unop (mode, NOT, ret, target, true);
6153 else
6154 ret = expand_simple_binop (mode, code, ret, val, target, true,
6155 OPTAB_LIB_WIDEN);
6157 return ret;
6160 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6162 void
6163 expand_ifn_atomic_bit_test_and (gcall *call)
6165 tree ptr = gimple_call_arg (call, 0);
6166 tree bit = gimple_call_arg (call, 1);
6167 tree flag = gimple_call_arg (call, 2);
6168 tree lhs = gimple_call_lhs (call);
6169 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6170 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6171 enum rtx_code code;
6172 optab optab;
6173 struct expand_operand ops[5];
6175 gcc_assert (flag_inline_atomics);
6177 if (gimple_call_num_args (call) == 4)
6178 model = get_memmodel (gimple_call_arg (call, 3));
6180 rtx mem = get_builtin_sync_mem (ptr, mode);
6181 rtx val = expand_expr_force_mode (bit, mode);
6183 switch (gimple_call_internal_fn (call))
6185 case IFN_ATOMIC_BIT_TEST_AND_SET:
6186 code = IOR;
6187 optab = atomic_bit_test_and_set_optab;
6188 break;
6189 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6190 code = XOR;
6191 optab = atomic_bit_test_and_complement_optab;
6192 break;
6193 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6194 code = AND;
6195 optab = atomic_bit_test_and_reset_optab;
6196 break;
6197 default:
6198 gcc_unreachable ();
6201 if (lhs == NULL_TREE)
6203 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6204 val, NULL_RTX, true, OPTAB_DIRECT);
6205 if (code == AND)
6206 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6207 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6208 return;
6211 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6212 enum insn_code icode = direct_optab_handler (optab, mode);
6213 gcc_assert (icode != CODE_FOR_nothing);
6214 create_output_operand (&ops[0], target, mode);
6215 create_fixed_operand (&ops[1], mem);
6216 create_convert_operand_to (&ops[2], val, mode, true);
6217 create_integer_operand (&ops[3], model);
6218 create_integer_operand (&ops[4], integer_onep (flag));
6219 if (maybe_expand_insn (icode, 5, ops))
6220 return;
6222 rtx bitval = val;
6223 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6224 val, NULL_RTX, true, OPTAB_DIRECT);
6225 rtx maskval = val;
6226 if (code == AND)
6227 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6228 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6229 code, model, false);
6230 if (integer_onep (flag))
6232 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6233 NULL_RTX, true, OPTAB_DIRECT);
6234 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6235 true, OPTAB_DIRECT);
6237 else
6238 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6239 OPTAB_DIRECT);
6240 if (result != target)
6241 emit_move_insn (target, result);
6244 /* Expand an atomic clear operation.
6245 void _atomic_clear (BOOL *obj, enum memmodel)
6246 EXP is the call expression. */
6248 static rtx
6249 expand_builtin_atomic_clear (tree exp)
6251 machine_mode mode;
6252 rtx mem, ret;
6253 enum memmodel model;
6255 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6256 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6257 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6259 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6261 source_location loc
6262 = expansion_point_location_if_in_system_header (input_location);
6263 warning_at (loc, OPT_Winvalid_memory_model,
6264 "invalid memory model for %<__atomic_store%>");
6265 model = MEMMODEL_SEQ_CST;
6268 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6269 Failing that, a store is issued by __atomic_store. The only way this can
6270 fail is if the bool type is larger than a word size. Unlikely, but
6271 handle it anyway for completeness. Assume a single threaded model since
6272 there is no atomic support in this case, and no barriers are required. */
6273 ret = expand_atomic_store (mem, const0_rtx, model, true);
6274 if (!ret)
6275 emit_move_insn (mem, const0_rtx);
6276 return const0_rtx;
6279 /* Expand an atomic test_and_set operation.
6280 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6281 EXP is the call expression. */
6283 static rtx
6284 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6286 rtx mem;
6287 enum memmodel model;
6288 machine_mode mode;
6290 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6291 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6292 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6294 return expand_atomic_test_and_set (target, mem, model);
6298 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6299 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6301 static tree
6302 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6304 int size;
6305 machine_mode mode;
6306 unsigned int mode_align, type_align;
6308 if (TREE_CODE (arg0) != INTEGER_CST)
6309 return NULL_TREE;
6311 /* We need a corresponding integer mode for the access to be lock-free. */
6312 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6313 if (!int_mode_for_size (size, 0).exists (&mode))
6314 return boolean_false_node;
6316 mode_align = GET_MODE_ALIGNMENT (mode);
6318 if (TREE_CODE (arg1) == INTEGER_CST)
6320 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6322 /* Either this argument is null, or it's a fake pointer encoding
6323 the alignment of the object. */
6324 val = least_bit_hwi (val);
6325 val *= BITS_PER_UNIT;
6327 if (val == 0 || mode_align < val)
6328 type_align = mode_align;
6329 else
6330 type_align = val;
6332 else
6334 tree ttype = TREE_TYPE (arg1);
6336 /* This function is usually invoked and folded immediately by the front
6337 end before anything else has a chance to look at it. The pointer
6338 parameter at this point is usually cast to a void *, so check for that
6339 and look past the cast. */
6340 if (CONVERT_EXPR_P (arg1)
6341 && POINTER_TYPE_P (ttype)
6342 && VOID_TYPE_P (TREE_TYPE (ttype))
6343 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6344 arg1 = TREE_OPERAND (arg1, 0);
6346 ttype = TREE_TYPE (arg1);
6347 gcc_assert (POINTER_TYPE_P (ttype));
6349 /* Get the underlying type of the object. */
6350 ttype = TREE_TYPE (ttype);
6351 type_align = TYPE_ALIGN (ttype);
6354 /* If the object has smaller alignment, the lock free routines cannot
6355 be used. */
6356 if (type_align < mode_align)
6357 return boolean_false_node;
6359 /* Check if a compare_and_swap pattern exists for the mode which represents
6360 the required size. The pattern is not allowed to fail, so the existence
6361 of the pattern indicates support is present. Also require that an
6362 atomic load exists for the required size. */
6363 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6364 return boolean_true_node;
6365 else
6366 return boolean_false_node;
6369 /* Return true if the parameters to call EXP represent an object which will
6370 always generate lock free instructions. The first argument represents the
6371 size of the object, and the second parameter is a pointer to the object
6372 itself. If NULL is passed for the object, then the result is based on
6373 typical alignment for an object of the specified size. Otherwise return
6374 false. */
6376 static rtx
6377 expand_builtin_atomic_always_lock_free (tree exp)
6379 tree size;
6380 tree arg0 = CALL_EXPR_ARG (exp, 0);
6381 tree arg1 = CALL_EXPR_ARG (exp, 1);
6383 if (TREE_CODE (arg0) != INTEGER_CST)
6385 error ("non-constant argument 1 to __atomic_always_lock_free");
6386 return const0_rtx;
6389 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6390 if (size == boolean_true_node)
6391 return const1_rtx;
6392 return const0_rtx;
6395 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6396 is lock free on this architecture. */
6398 static tree
6399 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6401 if (!flag_inline_atomics)
6402 return NULL_TREE;
6404 /* If it isn't always lock free, don't generate a result. */
6405 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6406 return boolean_true_node;
6408 return NULL_TREE;
6411 /* Return true if the parameters to call EXP represent an object which will
6412 always generate lock free instructions. The first argument represents the
6413 size of the object, and the second parameter is a pointer to the object
6414 itself. If NULL is passed for the object, then the result is based on
6415 typical alignment for an object of the specified size. Otherwise return
6416 NULL*/
6418 static rtx
6419 expand_builtin_atomic_is_lock_free (tree exp)
6421 tree size;
6422 tree arg0 = CALL_EXPR_ARG (exp, 0);
6423 tree arg1 = CALL_EXPR_ARG (exp, 1);
6425 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6427 error ("non-integer argument 1 to __atomic_is_lock_free");
6428 return NULL_RTX;
6431 if (!flag_inline_atomics)
6432 return NULL_RTX;
6434 /* If the value is known at compile time, return the RTX for it. */
6435 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6436 if (size == boolean_true_node)
6437 return const1_rtx;
6439 return NULL_RTX;
6442 /* Expand the __atomic_thread_fence intrinsic:
6443 void __atomic_thread_fence (enum memmodel)
6444 EXP is the CALL_EXPR. */
6446 static void
6447 expand_builtin_atomic_thread_fence (tree exp)
6449 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6450 expand_mem_thread_fence (model);
6453 /* Expand the __atomic_signal_fence intrinsic:
6454 void __atomic_signal_fence (enum memmodel)
6455 EXP is the CALL_EXPR. */
6457 static void
6458 expand_builtin_atomic_signal_fence (tree exp)
6460 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6461 expand_mem_signal_fence (model);
6464 /* Expand the __sync_synchronize intrinsic. */
6466 static void
6467 expand_builtin_sync_synchronize (void)
6469 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6472 static rtx
6473 expand_builtin_thread_pointer (tree exp, rtx target)
6475 enum insn_code icode;
6476 if (!validate_arglist (exp, VOID_TYPE))
6477 return const0_rtx;
6478 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6479 if (icode != CODE_FOR_nothing)
6481 struct expand_operand op;
6482 /* If the target is not sutitable then create a new target. */
6483 if (target == NULL_RTX
6484 || !REG_P (target)
6485 || GET_MODE (target) != Pmode)
6486 target = gen_reg_rtx (Pmode);
6487 create_output_operand (&op, target, Pmode);
6488 expand_insn (icode, 1, &op);
6489 return target;
6491 error ("__builtin_thread_pointer is not supported on this target");
6492 return const0_rtx;
6495 static void
6496 expand_builtin_set_thread_pointer (tree exp)
6498 enum insn_code icode;
6499 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6500 return;
6501 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6502 if (icode != CODE_FOR_nothing)
6504 struct expand_operand op;
6505 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6506 Pmode, EXPAND_NORMAL);
6507 create_input_operand (&op, val, Pmode);
6508 expand_insn (icode, 1, &op);
6509 return;
6511 error ("__builtin_set_thread_pointer is not supported on this target");
6515 /* Emit code to restore the current value of stack. */
6517 static void
6518 expand_stack_restore (tree var)
6520 rtx_insn *prev;
6521 rtx sa = expand_normal (var);
6523 sa = convert_memory_address (Pmode, sa);
6525 prev = get_last_insn ();
6526 emit_stack_restore (SAVE_BLOCK, sa);
6528 record_new_stack_level ();
6530 fixup_args_size_notes (prev, get_last_insn (), 0);
6533 /* Emit code to save the current value of stack. */
6535 static rtx
6536 expand_stack_save (void)
6538 rtx ret = NULL_RTX;
6540 emit_stack_save (SAVE_BLOCK, &ret);
6541 return ret;
6545 /* Expand an expression EXP that calls a built-in function,
6546 with result going to TARGET if that's convenient
6547 (and in mode MODE if that's convenient).
6548 SUBTARGET may be used as the target for computing one of EXP's operands.
6549 IGNORE is nonzero if the value is to be ignored. */
6552 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
6553 int ignore)
6555 tree fndecl = get_callee_fndecl (exp);
6556 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6557 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6558 int flags;
6560 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6561 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6563 /* When ASan is enabled, we don't want to expand some memory/string
6564 builtins and rely on libsanitizer's hooks. This allows us to avoid
6565 redundant checks and be sure, that possible overflow will be detected
6566 by ASan. */
6568 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
6569 return expand_call (exp, target, ignore);
6571 /* When not optimizing, generate calls to library functions for a certain
6572 set of builtins. */
6573 if (!optimize
6574 && !called_as_built_in (fndecl)
6575 && fcode != BUILT_IN_FORK
6576 && fcode != BUILT_IN_EXECL
6577 && fcode != BUILT_IN_EXECV
6578 && fcode != BUILT_IN_EXECLP
6579 && fcode != BUILT_IN_EXECLE
6580 && fcode != BUILT_IN_EXECVP
6581 && fcode != BUILT_IN_EXECVE
6582 && !ALLOCA_FUNCTION_CODE_P (fcode)
6583 && fcode != BUILT_IN_FREE
6584 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
6585 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
6586 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
6587 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
6588 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6589 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
6590 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
6591 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
6592 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
6593 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
6594 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
6595 && fcode != BUILT_IN_CHKP_BNDRET)
6596 return expand_call (exp, target, ignore);
6598 /* The built-in function expanders test for target == const0_rtx
6599 to determine whether the function's result will be ignored. */
6600 if (ignore)
6601 target = const0_rtx;
6603 /* If the result of a pure or const built-in function is ignored, and
6604 none of its arguments are volatile, we can avoid expanding the
6605 built-in call and just evaluate the arguments for side-effects. */
6606 if (target == const0_rtx
6607 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6608 && !(flags & ECF_LOOPING_CONST_OR_PURE))
6610 bool volatilep = false;
6611 tree arg;
6612 call_expr_arg_iterator iter;
6614 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6615 if (TREE_THIS_VOLATILE (arg))
6617 volatilep = true;
6618 break;
6621 if (! volatilep)
6623 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6624 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6625 return const0_rtx;
6629 /* expand_builtin_with_bounds is supposed to be used for
6630 instrumented builtin calls. */
6631 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
6633 switch (fcode)
6635 CASE_FLT_FN (BUILT_IN_FABS):
6636 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
6637 case BUILT_IN_FABSD32:
6638 case BUILT_IN_FABSD64:
6639 case BUILT_IN_FABSD128:
6640 target = expand_builtin_fabs (exp, target, subtarget);
6641 if (target)
6642 return target;
6643 break;
6645 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6646 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
6647 target = expand_builtin_copysign (exp, target, subtarget);
6648 if (target)
6649 return target;
6650 break;
6652 /* Just do a normal library call if we were unable to fold
6653 the values. */
6654 CASE_FLT_FN (BUILT_IN_CABS):
6655 break;
6657 CASE_FLT_FN (BUILT_IN_FMA):
6658 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
6659 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6660 if (target)
6661 return target;
6662 break;
6664 CASE_FLT_FN (BUILT_IN_ILOGB):
6665 if (! flag_unsafe_math_optimizations)
6666 break;
6667 gcc_fallthrough ();
6668 CASE_FLT_FN (BUILT_IN_ISINF):
6669 CASE_FLT_FN (BUILT_IN_FINITE):
6670 case BUILT_IN_ISFINITE:
6671 case BUILT_IN_ISNORMAL:
6672 target = expand_builtin_interclass_mathfn (exp, target);
6673 if (target)
6674 return target;
6675 break;
6677 CASE_FLT_FN (BUILT_IN_ICEIL):
6678 CASE_FLT_FN (BUILT_IN_LCEIL):
6679 CASE_FLT_FN (BUILT_IN_LLCEIL):
6680 CASE_FLT_FN (BUILT_IN_LFLOOR):
6681 CASE_FLT_FN (BUILT_IN_IFLOOR):
6682 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6683 target = expand_builtin_int_roundingfn (exp, target);
6684 if (target)
6685 return target;
6686 break;
6688 CASE_FLT_FN (BUILT_IN_IRINT):
6689 CASE_FLT_FN (BUILT_IN_LRINT):
6690 CASE_FLT_FN (BUILT_IN_LLRINT):
6691 CASE_FLT_FN (BUILT_IN_IROUND):
6692 CASE_FLT_FN (BUILT_IN_LROUND):
6693 CASE_FLT_FN (BUILT_IN_LLROUND):
6694 target = expand_builtin_int_roundingfn_2 (exp, target);
6695 if (target)
6696 return target;
6697 break;
6699 CASE_FLT_FN (BUILT_IN_POWI):
6700 target = expand_builtin_powi (exp, target);
6701 if (target)
6702 return target;
6703 break;
6705 CASE_FLT_FN (BUILT_IN_CEXPI):
6706 target = expand_builtin_cexpi (exp, target);
6707 gcc_assert (target);
6708 return target;
6710 CASE_FLT_FN (BUILT_IN_SIN):
6711 CASE_FLT_FN (BUILT_IN_COS):
6712 if (! flag_unsafe_math_optimizations)
6713 break;
6714 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6715 if (target)
6716 return target;
6717 break;
6719 CASE_FLT_FN (BUILT_IN_SINCOS):
6720 if (! flag_unsafe_math_optimizations)
6721 break;
6722 target = expand_builtin_sincos (exp);
6723 if (target)
6724 return target;
6725 break;
6727 case BUILT_IN_APPLY_ARGS:
6728 return expand_builtin_apply_args ();
6730 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6731 FUNCTION with a copy of the parameters described by
6732 ARGUMENTS, and ARGSIZE. It returns a block of memory
6733 allocated on the stack into which is stored all the registers
6734 that might possibly be used for returning the result of a
6735 function. ARGUMENTS is the value returned by
6736 __builtin_apply_args. ARGSIZE is the number of bytes of
6737 arguments that must be copied. ??? How should this value be
6738 computed? We'll also need a safe worst case value for varargs
6739 functions. */
6740 case BUILT_IN_APPLY:
6741 if (!validate_arglist (exp, POINTER_TYPE,
6742 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6743 && !validate_arglist (exp, REFERENCE_TYPE,
6744 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6745 return const0_rtx;
6746 else
6748 rtx ops[3];
6750 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6751 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6752 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6754 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6757 /* __builtin_return (RESULT) causes the function to return the
6758 value described by RESULT. RESULT is address of the block of
6759 memory returned by __builtin_apply. */
6760 case BUILT_IN_RETURN:
6761 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6762 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6763 return const0_rtx;
6765 case BUILT_IN_SAVEREGS:
6766 return expand_builtin_saveregs ();
6768 case BUILT_IN_VA_ARG_PACK:
6769 /* All valid uses of __builtin_va_arg_pack () are removed during
6770 inlining. */
6771 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6772 return const0_rtx;
6774 case BUILT_IN_VA_ARG_PACK_LEN:
6775 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6776 inlining. */
6777 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6778 return const0_rtx;
6780 /* Return the address of the first anonymous stack arg. */
6781 case BUILT_IN_NEXT_ARG:
6782 if (fold_builtin_next_arg (exp, false))
6783 return const0_rtx;
6784 return expand_builtin_next_arg ();
6786 case BUILT_IN_CLEAR_CACHE:
6787 target = expand_builtin___clear_cache (exp);
6788 if (target)
6789 return target;
6790 break;
6792 case BUILT_IN_CLASSIFY_TYPE:
6793 return expand_builtin_classify_type (exp);
6795 case BUILT_IN_CONSTANT_P:
6796 return const0_rtx;
6798 case BUILT_IN_FRAME_ADDRESS:
6799 case BUILT_IN_RETURN_ADDRESS:
6800 return expand_builtin_frame_address (fndecl, exp);
6802 /* Returns the address of the area where the structure is returned.
6803 0 otherwise. */
6804 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6805 if (call_expr_nargs (exp) != 0
6806 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6807 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6808 return const0_rtx;
6809 else
6810 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6812 CASE_BUILT_IN_ALLOCA:
6813 target = expand_builtin_alloca (exp);
6814 if (target)
6815 return target;
6816 break;
6818 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
6819 return expand_asan_emit_allocas_unpoison (exp);
6821 case BUILT_IN_STACK_SAVE:
6822 return expand_stack_save ();
6824 case BUILT_IN_STACK_RESTORE:
6825 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6826 return const0_rtx;
6828 case BUILT_IN_BSWAP16:
6829 case BUILT_IN_BSWAP32:
6830 case BUILT_IN_BSWAP64:
6831 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6832 if (target)
6833 return target;
6834 break;
6836 CASE_INT_FN (BUILT_IN_FFS):
6837 target = expand_builtin_unop (target_mode, exp, target,
6838 subtarget, ffs_optab);
6839 if (target)
6840 return target;
6841 break;
6843 CASE_INT_FN (BUILT_IN_CLZ):
6844 target = expand_builtin_unop (target_mode, exp, target,
6845 subtarget, clz_optab);
6846 if (target)
6847 return target;
6848 break;
6850 CASE_INT_FN (BUILT_IN_CTZ):
6851 target = expand_builtin_unop (target_mode, exp, target,
6852 subtarget, ctz_optab);
6853 if (target)
6854 return target;
6855 break;
6857 CASE_INT_FN (BUILT_IN_CLRSB):
6858 target = expand_builtin_unop (target_mode, exp, target,
6859 subtarget, clrsb_optab);
6860 if (target)
6861 return target;
6862 break;
6864 CASE_INT_FN (BUILT_IN_POPCOUNT):
6865 target = expand_builtin_unop (target_mode, exp, target,
6866 subtarget, popcount_optab);
6867 if (target)
6868 return target;
6869 break;
6871 CASE_INT_FN (BUILT_IN_PARITY):
6872 target = expand_builtin_unop (target_mode, exp, target,
6873 subtarget, parity_optab);
6874 if (target)
6875 return target;
6876 break;
6878 case BUILT_IN_STRLEN:
6879 target = expand_builtin_strlen (exp, target, target_mode);
6880 if (target)
6881 return target;
6882 break;
6884 case BUILT_IN_STRCAT:
6885 target = expand_builtin_strcat (exp, target);
6886 if (target)
6887 return target;
6888 break;
6890 case BUILT_IN_STRCPY:
6891 target = expand_builtin_strcpy (exp, target);
6892 if (target)
6893 return target;
6894 break;
6896 case BUILT_IN_STRNCAT:
6897 target = expand_builtin_strncat (exp, target);
6898 if (target)
6899 return target;
6900 break;
6902 case BUILT_IN_STRNCPY:
6903 target = expand_builtin_strncpy (exp, target);
6904 if (target)
6905 return target;
6906 break;
6908 case BUILT_IN_STPCPY:
6909 target = expand_builtin_stpcpy (exp, target, mode);
6910 if (target)
6911 return target;
6912 break;
6914 case BUILT_IN_STPNCPY:
6915 target = expand_builtin_stpncpy (exp, target);
6916 if (target)
6917 return target;
6918 break;
6920 case BUILT_IN_MEMCHR:
6921 target = expand_builtin_memchr (exp, target);
6922 if (target)
6923 return target;
6924 break;
6926 case BUILT_IN_MEMCPY:
6927 target = expand_builtin_memcpy (exp, target);
6928 if (target)
6929 return target;
6930 break;
6932 case BUILT_IN_MEMMOVE:
6933 target = expand_builtin_memmove (exp, target);
6934 if (target)
6935 return target;
6936 break;
6938 case BUILT_IN_MEMPCPY:
6939 target = expand_builtin_mempcpy (exp, target);
6940 if (target)
6941 return target;
6942 break;
6944 case BUILT_IN_MEMSET:
6945 target = expand_builtin_memset (exp, target, mode);
6946 if (target)
6947 return target;
6948 break;
6950 case BUILT_IN_BZERO:
6951 target = expand_builtin_bzero (exp);
6952 if (target)
6953 return target;
6954 break;
6956 case BUILT_IN_STRCMP:
6957 target = expand_builtin_strcmp (exp, target);
6958 if (target)
6959 return target;
6960 break;
6962 case BUILT_IN_STRNCMP:
6963 target = expand_builtin_strncmp (exp, target, mode);
6964 if (target)
6965 return target;
6966 break;
6968 case BUILT_IN_BCMP:
6969 case BUILT_IN_MEMCMP:
6970 case BUILT_IN_MEMCMP_EQ:
6971 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
6972 if (target)
6973 return target;
6974 if (fcode == BUILT_IN_MEMCMP_EQ)
6976 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
6977 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
6979 break;
6981 case BUILT_IN_SETJMP:
6982 /* This should have been lowered to the builtins below. */
6983 gcc_unreachable ();
6985 case BUILT_IN_SETJMP_SETUP:
6986 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6987 and the receiver label. */
6988 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6990 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6991 VOIDmode, EXPAND_NORMAL);
6992 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6993 rtx_insn *label_r = label_rtx (label);
6995 /* This is copied from the handling of non-local gotos. */
6996 expand_builtin_setjmp_setup (buf_addr, label_r);
6997 nonlocal_goto_handler_labels
6998 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6999 nonlocal_goto_handler_labels);
7000 /* ??? Do not let expand_label treat us as such since we would
7001 not want to be both on the list of non-local labels and on
7002 the list of forced labels. */
7003 FORCED_LABEL (label) = 0;
7004 return const0_rtx;
7006 break;
7008 case BUILT_IN_SETJMP_RECEIVER:
7009 /* __builtin_setjmp_receiver is passed the receiver label. */
7010 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7012 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7013 rtx_insn *label_r = label_rtx (label);
7015 expand_builtin_setjmp_receiver (label_r);
7016 return const0_rtx;
7018 break;
7020 /* __builtin_longjmp is passed a pointer to an array of five words.
7021 It's similar to the C library longjmp function but works with
7022 __builtin_setjmp above. */
7023 case BUILT_IN_LONGJMP:
7024 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7026 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7027 VOIDmode, EXPAND_NORMAL);
7028 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7030 if (value != const1_rtx)
7032 error ("%<__builtin_longjmp%> second argument must be 1");
7033 return const0_rtx;
7036 expand_builtin_longjmp (buf_addr, value);
7037 return const0_rtx;
7039 break;
7041 case BUILT_IN_NONLOCAL_GOTO:
7042 target = expand_builtin_nonlocal_goto (exp);
7043 if (target)
7044 return target;
7045 break;
7047 /* This updates the setjmp buffer that is its argument with the value
7048 of the current stack pointer. */
7049 case BUILT_IN_UPDATE_SETJMP_BUF:
7050 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7052 rtx buf_addr
7053 = expand_normal (CALL_EXPR_ARG (exp, 0));
7055 expand_builtin_update_setjmp_buf (buf_addr);
7056 return const0_rtx;
7058 break;
7060 case BUILT_IN_TRAP:
7061 expand_builtin_trap ();
7062 return const0_rtx;
7064 case BUILT_IN_UNREACHABLE:
7065 expand_builtin_unreachable ();
7066 return const0_rtx;
7068 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7069 case BUILT_IN_SIGNBITD32:
7070 case BUILT_IN_SIGNBITD64:
7071 case BUILT_IN_SIGNBITD128:
7072 target = expand_builtin_signbit (exp, target);
7073 if (target)
7074 return target;
7075 break;
7077 /* Various hooks for the DWARF 2 __throw routine. */
7078 case BUILT_IN_UNWIND_INIT:
7079 expand_builtin_unwind_init ();
7080 return const0_rtx;
7081 case BUILT_IN_DWARF_CFA:
7082 return virtual_cfa_rtx;
7083 #ifdef DWARF2_UNWIND_INFO
7084 case BUILT_IN_DWARF_SP_COLUMN:
7085 return expand_builtin_dwarf_sp_column ();
7086 case BUILT_IN_INIT_DWARF_REG_SIZES:
7087 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7088 return const0_rtx;
7089 #endif
7090 case BUILT_IN_FROB_RETURN_ADDR:
7091 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7092 case BUILT_IN_EXTRACT_RETURN_ADDR:
7093 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7094 case BUILT_IN_EH_RETURN:
7095 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7096 CALL_EXPR_ARG (exp, 1));
7097 return const0_rtx;
7098 case BUILT_IN_EH_RETURN_DATA_REGNO:
7099 return expand_builtin_eh_return_data_regno (exp);
7100 case BUILT_IN_EXTEND_POINTER:
7101 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7102 case BUILT_IN_EH_POINTER:
7103 return expand_builtin_eh_pointer (exp);
7104 case BUILT_IN_EH_FILTER:
7105 return expand_builtin_eh_filter (exp);
7106 case BUILT_IN_EH_COPY_VALUES:
7107 return expand_builtin_eh_copy_values (exp);
7109 case BUILT_IN_VA_START:
7110 return expand_builtin_va_start (exp);
7111 case BUILT_IN_VA_END:
7112 return expand_builtin_va_end (exp);
7113 case BUILT_IN_VA_COPY:
7114 return expand_builtin_va_copy (exp);
7115 case BUILT_IN_EXPECT:
7116 return expand_builtin_expect (exp, target);
7117 case BUILT_IN_ASSUME_ALIGNED:
7118 return expand_builtin_assume_aligned (exp, target);
7119 case BUILT_IN_PREFETCH:
7120 expand_builtin_prefetch (exp);
7121 return const0_rtx;
7123 case BUILT_IN_INIT_TRAMPOLINE:
7124 return expand_builtin_init_trampoline (exp, true);
7125 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7126 return expand_builtin_init_trampoline (exp, false);
7127 case BUILT_IN_ADJUST_TRAMPOLINE:
7128 return expand_builtin_adjust_trampoline (exp);
7130 case BUILT_IN_INIT_DESCRIPTOR:
7131 return expand_builtin_init_descriptor (exp);
7132 case BUILT_IN_ADJUST_DESCRIPTOR:
7133 return expand_builtin_adjust_descriptor (exp);
7135 case BUILT_IN_FORK:
7136 case BUILT_IN_EXECL:
7137 case BUILT_IN_EXECV:
7138 case BUILT_IN_EXECLP:
7139 case BUILT_IN_EXECLE:
7140 case BUILT_IN_EXECVP:
7141 case BUILT_IN_EXECVE:
7142 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7143 if (target)
7144 return target;
7145 break;
7147 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7148 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7149 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7150 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7151 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7152 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7153 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7154 if (target)
7155 return target;
7156 break;
7158 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7159 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7160 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7161 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7162 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7163 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7164 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7165 if (target)
7166 return target;
7167 break;
7169 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7170 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7171 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7172 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7173 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7174 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7175 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7176 if (target)
7177 return target;
7178 break;
7180 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7181 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7182 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7183 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7184 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7185 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7186 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7187 if (target)
7188 return target;
7189 break;
7191 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7192 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7193 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7194 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7195 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7196 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7197 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7198 if (target)
7199 return target;
7200 break;
7202 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7203 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7204 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7205 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7206 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7207 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7208 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7209 if (target)
7210 return target;
7211 break;
7213 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7214 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7215 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7216 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7217 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7218 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7219 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7220 if (target)
7221 return target;
7222 break;
7224 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7225 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7226 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7227 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7228 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7229 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7230 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7231 if (target)
7232 return target;
7233 break;
7235 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7236 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7237 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7238 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7239 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7240 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7241 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7242 if (target)
7243 return target;
7244 break;
7246 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7247 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7248 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7249 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7250 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7251 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7252 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7253 if (target)
7254 return target;
7255 break;
7257 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7258 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7259 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7260 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7261 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7262 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7263 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7264 if (target)
7265 return target;
7266 break;
7268 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7269 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7270 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7271 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7272 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7273 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7274 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7275 if (target)
7276 return target;
7277 break;
7279 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7280 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7281 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7282 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7283 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7284 if (mode == VOIDmode)
7285 mode = TYPE_MODE (boolean_type_node);
7286 if (!target || !register_operand (target, mode))
7287 target = gen_reg_rtx (mode);
7289 mode = get_builtin_sync_mode
7290 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7291 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7292 if (target)
7293 return target;
7294 break;
7296 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7297 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7298 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7299 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7300 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7301 mode = get_builtin_sync_mode
7302 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7303 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7304 if (target)
7305 return target;
7306 break;
7308 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7309 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7310 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7311 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7312 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7313 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7314 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7315 if (target)
7316 return target;
7317 break;
7319 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7320 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7321 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7322 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7323 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7324 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7325 expand_builtin_sync_lock_release (mode, exp);
7326 return const0_rtx;
7328 case BUILT_IN_SYNC_SYNCHRONIZE:
7329 expand_builtin_sync_synchronize ();
7330 return const0_rtx;
7332 case BUILT_IN_ATOMIC_EXCHANGE_1:
7333 case BUILT_IN_ATOMIC_EXCHANGE_2:
7334 case BUILT_IN_ATOMIC_EXCHANGE_4:
7335 case BUILT_IN_ATOMIC_EXCHANGE_8:
7336 case BUILT_IN_ATOMIC_EXCHANGE_16:
7337 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7338 target = expand_builtin_atomic_exchange (mode, exp, target);
7339 if (target)
7340 return target;
7341 break;
7343 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7344 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7345 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7346 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7347 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7349 unsigned int nargs, z;
7350 vec<tree, va_gc> *vec;
7352 mode =
7353 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7354 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7355 if (target)
7356 return target;
7358 /* If this is turned into an external library call, the weak parameter
7359 must be dropped to match the expected parameter list. */
7360 nargs = call_expr_nargs (exp);
7361 vec_alloc (vec, nargs - 1);
7362 for (z = 0; z < 3; z++)
7363 vec->quick_push (CALL_EXPR_ARG (exp, z));
7364 /* Skip the boolean weak parameter. */
7365 for (z = 4; z < 6; z++)
7366 vec->quick_push (CALL_EXPR_ARG (exp, z));
7367 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7368 break;
7371 case BUILT_IN_ATOMIC_LOAD_1:
7372 case BUILT_IN_ATOMIC_LOAD_2:
7373 case BUILT_IN_ATOMIC_LOAD_4:
7374 case BUILT_IN_ATOMIC_LOAD_8:
7375 case BUILT_IN_ATOMIC_LOAD_16:
7376 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7377 target = expand_builtin_atomic_load (mode, exp, target);
7378 if (target)
7379 return target;
7380 break;
7382 case BUILT_IN_ATOMIC_STORE_1:
7383 case BUILT_IN_ATOMIC_STORE_2:
7384 case BUILT_IN_ATOMIC_STORE_4:
7385 case BUILT_IN_ATOMIC_STORE_8:
7386 case BUILT_IN_ATOMIC_STORE_16:
7387 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7388 target = expand_builtin_atomic_store (mode, exp);
7389 if (target)
7390 return const0_rtx;
7391 break;
7393 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7394 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7395 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7396 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7397 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7399 enum built_in_function lib;
7400 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7401 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7402 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7403 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7404 ignore, lib);
7405 if (target)
7406 return target;
7407 break;
7409 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7410 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7411 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7412 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7413 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7415 enum built_in_function lib;
7416 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7417 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7418 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7419 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7420 ignore, lib);
7421 if (target)
7422 return target;
7423 break;
7425 case BUILT_IN_ATOMIC_AND_FETCH_1:
7426 case BUILT_IN_ATOMIC_AND_FETCH_2:
7427 case BUILT_IN_ATOMIC_AND_FETCH_4:
7428 case BUILT_IN_ATOMIC_AND_FETCH_8:
7429 case BUILT_IN_ATOMIC_AND_FETCH_16:
7431 enum built_in_function lib;
7432 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7433 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7434 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7435 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7436 ignore, lib);
7437 if (target)
7438 return target;
7439 break;
7441 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7442 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7443 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7444 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7445 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7447 enum built_in_function lib;
7448 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7449 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7450 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7451 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7452 ignore, lib);
7453 if (target)
7454 return target;
7455 break;
7457 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7458 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7459 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7460 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7461 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7463 enum built_in_function lib;
7464 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7465 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7466 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7467 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7468 ignore, lib);
7469 if (target)
7470 return target;
7471 break;
7473 case BUILT_IN_ATOMIC_OR_FETCH_1:
7474 case BUILT_IN_ATOMIC_OR_FETCH_2:
7475 case BUILT_IN_ATOMIC_OR_FETCH_4:
7476 case BUILT_IN_ATOMIC_OR_FETCH_8:
7477 case BUILT_IN_ATOMIC_OR_FETCH_16:
7479 enum built_in_function lib;
7480 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7481 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7482 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7483 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7484 ignore, lib);
7485 if (target)
7486 return target;
7487 break;
7489 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7490 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7491 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7492 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7493 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7494 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7495 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7496 ignore, BUILT_IN_NONE);
7497 if (target)
7498 return target;
7499 break;
7501 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7502 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7503 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7504 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7505 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7506 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7507 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7508 ignore, BUILT_IN_NONE);
7509 if (target)
7510 return target;
7511 break;
7513 case BUILT_IN_ATOMIC_FETCH_AND_1:
7514 case BUILT_IN_ATOMIC_FETCH_AND_2:
7515 case BUILT_IN_ATOMIC_FETCH_AND_4:
7516 case BUILT_IN_ATOMIC_FETCH_AND_8:
7517 case BUILT_IN_ATOMIC_FETCH_AND_16:
7518 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7519 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7520 ignore, BUILT_IN_NONE);
7521 if (target)
7522 return target;
7523 break;
7525 case BUILT_IN_ATOMIC_FETCH_NAND_1:
7526 case BUILT_IN_ATOMIC_FETCH_NAND_2:
7527 case BUILT_IN_ATOMIC_FETCH_NAND_4:
7528 case BUILT_IN_ATOMIC_FETCH_NAND_8:
7529 case BUILT_IN_ATOMIC_FETCH_NAND_16:
7530 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
7531 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
7532 ignore, BUILT_IN_NONE);
7533 if (target)
7534 return target;
7535 break;
7537 case BUILT_IN_ATOMIC_FETCH_XOR_1:
7538 case BUILT_IN_ATOMIC_FETCH_XOR_2:
7539 case BUILT_IN_ATOMIC_FETCH_XOR_4:
7540 case BUILT_IN_ATOMIC_FETCH_XOR_8:
7541 case BUILT_IN_ATOMIC_FETCH_XOR_16:
7542 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
7543 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
7544 ignore, BUILT_IN_NONE);
7545 if (target)
7546 return target;
7547 break;
7549 case BUILT_IN_ATOMIC_FETCH_OR_1:
7550 case BUILT_IN_ATOMIC_FETCH_OR_2:
7551 case BUILT_IN_ATOMIC_FETCH_OR_4:
7552 case BUILT_IN_ATOMIC_FETCH_OR_8:
7553 case BUILT_IN_ATOMIC_FETCH_OR_16:
7554 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
7555 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
7556 ignore, BUILT_IN_NONE);
7557 if (target)
7558 return target;
7559 break;
7561 case BUILT_IN_ATOMIC_TEST_AND_SET:
7562 return expand_builtin_atomic_test_and_set (exp, target);
7564 case BUILT_IN_ATOMIC_CLEAR:
7565 return expand_builtin_atomic_clear (exp);
7567 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
7568 return expand_builtin_atomic_always_lock_free (exp);
7570 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
7571 target = expand_builtin_atomic_is_lock_free (exp);
7572 if (target)
7573 return target;
7574 break;
7576 case BUILT_IN_ATOMIC_THREAD_FENCE:
7577 expand_builtin_atomic_thread_fence (exp);
7578 return const0_rtx;
7580 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
7581 expand_builtin_atomic_signal_fence (exp);
7582 return const0_rtx;
7584 case BUILT_IN_OBJECT_SIZE:
7585 return expand_builtin_object_size (exp);
7587 case BUILT_IN_MEMCPY_CHK:
7588 case BUILT_IN_MEMPCPY_CHK:
7589 case BUILT_IN_MEMMOVE_CHK:
7590 case BUILT_IN_MEMSET_CHK:
7591 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7592 if (target)
7593 return target;
7594 break;
7596 case BUILT_IN_STRCPY_CHK:
7597 case BUILT_IN_STPCPY_CHK:
7598 case BUILT_IN_STRNCPY_CHK:
7599 case BUILT_IN_STPNCPY_CHK:
7600 case BUILT_IN_STRCAT_CHK:
7601 case BUILT_IN_STRNCAT_CHK:
7602 case BUILT_IN_SNPRINTF_CHK:
7603 case BUILT_IN_VSNPRINTF_CHK:
7604 maybe_emit_chk_warning (exp, fcode);
7605 break;
7607 case BUILT_IN_SPRINTF_CHK:
7608 case BUILT_IN_VSPRINTF_CHK:
7609 maybe_emit_sprintf_chk_warning (exp, fcode);
7610 break;
7612 case BUILT_IN_FREE:
7613 if (warn_free_nonheap_object)
7614 maybe_emit_free_warning (exp);
7615 break;
7617 case BUILT_IN_THREAD_POINTER:
7618 return expand_builtin_thread_pointer (exp, target);
7620 case BUILT_IN_SET_THREAD_POINTER:
7621 expand_builtin_set_thread_pointer (exp);
7622 return const0_rtx;
7624 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
7625 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
7626 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
7627 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
7628 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7629 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7630 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7631 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7632 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7633 case BUILT_IN_CHKP_GET_PTR_LBOUND:
7634 case BUILT_IN_CHKP_GET_PTR_UBOUND:
7635 /* We allow user CHKP builtins if Pointer Bounds
7636 Checker is off. */
7637 if (!chkp_function_instrumented_p (current_function_decl))
7639 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7640 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7641 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7642 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7643 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7644 return expand_normal (CALL_EXPR_ARG (exp, 0));
7645 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7646 return expand_normal (size_zero_node);
7647 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7648 return expand_normal (size_int (-1));
7649 else
7650 return const0_rtx;
7652 /* FALLTHROUGH */
7654 case BUILT_IN_CHKP_BNDMK:
7655 case BUILT_IN_CHKP_BNDSTX:
7656 case BUILT_IN_CHKP_BNDCL:
7657 case BUILT_IN_CHKP_BNDCU:
7658 case BUILT_IN_CHKP_BNDLDX:
7659 case BUILT_IN_CHKP_BNDRET:
7660 case BUILT_IN_CHKP_INTERSECT:
7661 case BUILT_IN_CHKP_NARROW:
7662 case BUILT_IN_CHKP_EXTRACT_LOWER:
7663 case BUILT_IN_CHKP_EXTRACT_UPPER:
7664 /* Software implementation of Pointer Bounds Checker is NYI.
7665 Target support is required. */
7666 error ("Your target platform does not support -fcheck-pointer-bounds");
7667 break;
7669 case BUILT_IN_ACC_ON_DEVICE:
7670 /* Do library call, if we failed to expand the builtin when
7671 folding. */
7672 break;
7674 default: /* just do library call, if unknown builtin */
7675 break;
7678 /* The switch statement above can drop through to cause the function
7679 to be called normally. */
7680 return expand_call (exp, target, ignore);
7683 /* Similar to expand_builtin but is used for instrumented calls. */
7686 expand_builtin_with_bounds (tree exp, rtx target,
7687 rtx subtarget ATTRIBUTE_UNUSED,
7688 machine_mode mode, int ignore)
7690 tree fndecl = get_callee_fndecl (exp);
7691 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7693 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7695 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7696 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7698 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7699 && fcode < END_CHKP_BUILTINS);
7701 switch (fcode)
7703 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7704 target = expand_builtin_memcpy_with_bounds (exp, target);
7705 if (target)
7706 return target;
7707 break;
7709 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7710 target = expand_builtin_mempcpy_with_bounds (exp, target);
7711 if (target)
7712 return target;
7713 break;
7715 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7716 target = expand_builtin_memset_with_bounds (exp, target, mode);
7717 if (target)
7718 return target;
7719 break;
7721 default:
7722 break;
7725 /* The switch statement above can drop through to cause the function
7726 to be called normally. */
7727 return expand_call (exp, target, ignore);
7730 /* Determine whether a tree node represents a call to a built-in
7731 function. If the tree T is a call to a built-in function with
7732 the right number of arguments of the appropriate types, return
7733 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7734 Otherwise the return value is END_BUILTINS. */
7736 enum built_in_function
7737 builtin_mathfn_code (const_tree t)
7739 const_tree fndecl, arg, parmlist;
7740 const_tree argtype, parmtype;
7741 const_call_expr_arg_iterator iter;
7743 if (TREE_CODE (t) != CALL_EXPR
7744 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7745 return END_BUILTINS;
7747 fndecl = get_callee_fndecl (t);
7748 if (fndecl == NULL_TREE
7749 || TREE_CODE (fndecl) != FUNCTION_DECL
7750 || ! DECL_BUILT_IN (fndecl)
7751 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7752 return END_BUILTINS;
7754 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7755 init_const_call_expr_arg_iterator (t, &iter);
7756 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7758 /* If a function doesn't take a variable number of arguments,
7759 the last element in the list will have type `void'. */
7760 parmtype = TREE_VALUE (parmlist);
7761 if (VOID_TYPE_P (parmtype))
7763 if (more_const_call_expr_args_p (&iter))
7764 return END_BUILTINS;
7765 return DECL_FUNCTION_CODE (fndecl);
7768 if (! more_const_call_expr_args_p (&iter))
7769 return END_BUILTINS;
7771 arg = next_const_call_expr_arg (&iter);
7772 argtype = TREE_TYPE (arg);
7774 if (SCALAR_FLOAT_TYPE_P (parmtype))
7776 if (! SCALAR_FLOAT_TYPE_P (argtype))
7777 return END_BUILTINS;
7779 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7781 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7782 return END_BUILTINS;
7784 else if (POINTER_TYPE_P (parmtype))
7786 if (! POINTER_TYPE_P (argtype))
7787 return END_BUILTINS;
7789 else if (INTEGRAL_TYPE_P (parmtype))
7791 if (! INTEGRAL_TYPE_P (argtype))
7792 return END_BUILTINS;
7794 else
7795 return END_BUILTINS;
7798 /* Variable-length argument list. */
7799 return DECL_FUNCTION_CODE (fndecl);
7802 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7803 evaluate to a constant. */
7805 static tree
7806 fold_builtin_constant_p (tree arg)
7808 /* We return 1 for a numeric type that's known to be a constant
7809 value at compile-time or for an aggregate type that's a
7810 literal constant. */
7811 STRIP_NOPS (arg);
7813 /* If we know this is a constant, emit the constant of one. */
7814 if (CONSTANT_CLASS_P (arg)
7815 || (TREE_CODE (arg) == CONSTRUCTOR
7816 && TREE_CONSTANT (arg)))
7817 return integer_one_node;
7818 if (TREE_CODE (arg) == ADDR_EXPR)
7820 tree op = TREE_OPERAND (arg, 0);
7821 if (TREE_CODE (op) == STRING_CST
7822 || (TREE_CODE (op) == ARRAY_REF
7823 && integer_zerop (TREE_OPERAND (op, 1))
7824 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7825 return integer_one_node;
7828 /* If this expression has side effects, show we don't know it to be a
7829 constant. Likewise if it's a pointer or aggregate type since in
7830 those case we only want literals, since those are only optimized
7831 when generating RTL, not later.
7832 And finally, if we are compiling an initializer, not code, we
7833 need to return a definite result now; there's not going to be any
7834 more optimization done. */
7835 if (TREE_SIDE_EFFECTS (arg)
7836 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7837 || POINTER_TYPE_P (TREE_TYPE (arg))
7838 || cfun == 0
7839 || folding_initializer
7840 || force_folding_builtin_constant_p)
7841 return integer_zero_node;
7843 return NULL_TREE;
7846 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7847 return it as a truthvalue. */
7849 static tree
7850 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7851 tree predictor)
7853 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7855 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7856 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7857 ret_type = TREE_TYPE (TREE_TYPE (fn));
7858 pred_type = TREE_VALUE (arg_types);
7859 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7861 pred = fold_convert_loc (loc, pred_type, pred);
7862 expected = fold_convert_loc (loc, expected_type, expected);
7863 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7864 predictor);
7866 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7867 build_int_cst (ret_type, 0));
7870 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7871 NULL_TREE if no simplification is possible. */
7873 tree
7874 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7876 tree inner, fndecl, inner_arg0;
7877 enum tree_code code;
7879 /* Distribute the expected value over short-circuiting operators.
7880 See through the cast from truthvalue_type_node to long. */
7881 inner_arg0 = arg0;
7882 while (CONVERT_EXPR_P (inner_arg0)
7883 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7884 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7885 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7887 /* If this is a builtin_expect within a builtin_expect keep the
7888 inner one. See through a comparison against a constant. It
7889 might have been added to create a thruthvalue. */
7890 inner = inner_arg0;
7892 if (COMPARISON_CLASS_P (inner)
7893 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7894 inner = TREE_OPERAND (inner, 0);
7896 if (TREE_CODE (inner) == CALL_EXPR
7897 && (fndecl = get_callee_fndecl (inner))
7898 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7899 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7900 return arg0;
7902 inner = inner_arg0;
7903 code = TREE_CODE (inner);
7904 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7906 tree op0 = TREE_OPERAND (inner, 0);
7907 tree op1 = TREE_OPERAND (inner, 1);
7909 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7910 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7911 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7913 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7916 /* If the argument isn't invariant then there's nothing else we can do. */
7917 if (!TREE_CONSTANT (inner_arg0))
7918 return NULL_TREE;
7920 /* If we expect that a comparison against the argument will fold to
7921 a constant return the constant. In practice, this means a true
7922 constant or the address of a non-weak symbol. */
7923 inner = inner_arg0;
7924 STRIP_NOPS (inner);
7925 if (TREE_CODE (inner) == ADDR_EXPR)
7929 inner = TREE_OPERAND (inner, 0);
7931 while (TREE_CODE (inner) == COMPONENT_REF
7932 || TREE_CODE (inner) == ARRAY_REF);
7933 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
7934 return NULL_TREE;
7937 /* Otherwise, ARG0 already has the proper type for the return value. */
7938 return arg0;
7941 /* Fold a call to __builtin_classify_type with argument ARG. */
7943 static tree
7944 fold_builtin_classify_type (tree arg)
7946 if (arg == 0)
7947 return build_int_cst (integer_type_node, no_type_class);
7949 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7952 /* Fold a call to __builtin_strlen with argument ARG. */
7954 static tree
7955 fold_builtin_strlen (location_t loc, tree type, tree arg)
7957 if (!validate_arg (arg, POINTER_TYPE))
7958 return NULL_TREE;
7959 else
7961 tree len = c_strlen (arg, 0);
7963 if (len)
7964 return fold_convert_loc (loc, type, len);
7966 return NULL_TREE;
7970 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7972 static tree
7973 fold_builtin_inf (location_t loc, tree type, int warn)
7975 REAL_VALUE_TYPE real;
7977 /* __builtin_inff is intended to be usable to define INFINITY on all
7978 targets. If an infinity is not available, INFINITY expands "to a
7979 positive constant of type float that overflows at translation
7980 time", footnote "In this case, using INFINITY will violate the
7981 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7982 Thus we pedwarn to ensure this constraint violation is
7983 diagnosed. */
7984 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7985 pedwarn (loc, 0, "target format does not support infinity");
7987 real_inf (&real);
7988 return build_real (type, real);
7991 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7992 NULL_TREE if no simplification can be made. */
7994 static tree
7995 fold_builtin_sincos (location_t loc,
7996 tree arg0, tree arg1, tree arg2)
7998 tree type;
7999 tree fndecl, call = NULL_TREE;
8001 if (!validate_arg (arg0, REAL_TYPE)
8002 || !validate_arg (arg1, POINTER_TYPE)
8003 || !validate_arg (arg2, POINTER_TYPE))
8004 return NULL_TREE;
8006 type = TREE_TYPE (arg0);
8008 /* Calculate the result when the argument is a constant. */
8009 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8010 if (fn == END_BUILTINS)
8011 return NULL_TREE;
8013 /* Canonicalize sincos to cexpi. */
8014 if (TREE_CODE (arg0) == REAL_CST)
8016 tree complex_type = build_complex_type (type);
8017 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8019 if (!call)
8021 if (!targetm.libc_has_function (function_c99_math_complex)
8022 || !builtin_decl_implicit_p (fn))
8023 return NULL_TREE;
8024 fndecl = builtin_decl_explicit (fn);
8025 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8026 call = builtin_save_expr (call);
8029 return build2 (COMPOUND_EXPR, void_type_node,
8030 build2 (MODIFY_EXPR, void_type_node,
8031 build_fold_indirect_ref_loc (loc, arg1),
8032 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8033 build2 (MODIFY_EXPR, void_type_node,
8034 build_fold_indirect_ref_loc (loc, arg2),
8035 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8038 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8039 Return NULL_TREE if no simplification can be made. */
8041 static tree
8042 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8044 if (!validate_arg (arg1, POINTER_TYPE)
8045 || !validate_arg (arg2, POINTER_TYPE)
8046 || !validate_arg (len, INTEGER_TYPE))
8047 return NULL_TREE;
8049 /* If the LEN parameter is zero, return zero. */
8050 if (integer_zerop (len))
8051 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8052 arg1, arg2);
8054 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8055 if (operand_equal_p (arg1, arg2, 0))
8056 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8058 /* If len parameter is one, return an expression corresponding to
8059 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8060 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8062 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8063 tree cst_uchar_ptr_node
8064 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8066 tree ind1
8067 = fold_convert_loc (loc, integer_type_node,
8068 build1 (INDIRECT_REF, cst_uchar_node,
8069 fold_convert_loc (loc,
8070 cst_uchar_ptr_node,
8071 arg1)));
8072 tree ind2
8073 = fold_convert_loc (loc, integer_type_node,
8074 build1 (INDIRECT_REF, cst_uchar_node,
8075 fold_convert_loc (loc,
8076 cst_uchar_ptr_node,
8077 arg2)));
8078 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8081 return NULL_TREE;
8084 /* Fold a call to builtin isascii with argument ARG. */
8086 static tree
8087 fold_builtin_isascii (location_t loc, tree arg)
8089 if (!validate_arg (arg, INTEGER_TYPE))
8090 return NULL_TREE;
8091 else
8093 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8094 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8095 build_int_cst (integer_type_node,
8096 ~ (unsigned HOST_WIDE_INT) 0x7f));
8097 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8098 arg, integer_zero_node);
8102 /* Fold a call to builtin toascii with argument ARG. */
8104 static tree
8105 fold_builtin_toascii (location_t loc, tree arg)
8107 if (!validate_arg (arg, INTEGER_TYPE))
8108 return NULL_TREE;
8110 /* Transform toascii(c) -> (c & 0x7f). */
8111 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8112 build_int_cst (integer_type_node, 0x7f));
8115 /* Fold a call to builtin isdigit with argument ARG. */
8117 static tree
8118 fold_builtin_isdigit (location_t loc, tree arg)
8120 if (!validate_arg (arg, INTEGER_TYPE))
8121 return NULL_TREE;
8122 else
8124 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8125 /* According to the C standard, isdigit is unaffected by locale.
8126 However, it definitely is affected by the target character set. */
8127 unsigned HOST_WIDE_INT target_digit0
8128 = lang_hooks.to_target_charset ('0');
8130 if (target_digit0 == 0)
8131 return NULL_TREE;
8133 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8134 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8135 build_int_cst (unsigned_type_node, target_digit0));
8136 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8137 build_int_cst (unsigned_type_node, 9));
8141 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8143 static tree
8144 fold_builtin_fabs (location_t loc, tree arg, tree type)
8146 if (!validate_arg (arg, REAL_TYPE))
8147 return NULL_TREE;
8149 arg = fold_convert_loc (loc, type, arg);
8150 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8153 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8155 static tree
8156 fold_builtin_abs (location_t loc, tree arg, tree type)
8158 if (!validate_arg (arg, INTEGER_TYPE))
8159 return NULL_TREE;
8161 arg = fold_convert_loc (loc, type, arg);
8162 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8165 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8167 static tree
8168 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
8170 /* ??? Only expand to FMA_EXPR if it's directly supported. */
8171 if (validate_arg (arg0, REAL_TYPE)
8172 && validate_arg (arg1, REAL_TYPE)
8173 && validate_arg (arg2, REAL_TYPE)
8174 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
8175 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
8177 return NULL_TREE;
8180 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8182 static tree
8183 fold_builtin_carg (location_t loc, tree arg, tree type)
8185 if (validate_arg (arg, COMPLEX_TYPE)
8186 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8188 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8190 if (atan2_fn)
8192 tree new_arg = builtin_save_expr (arg);
8193 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8194 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8195 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8199 return NULL_TREE;
8202 /* Fold a call to builtin frexp, we can assume the base is 2. */
8204 static tree
8205 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8207 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8208 return NULL_TREE;
8210 STRIP_NOPS (arg0);
8212 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8213 return NULL_TREE;
8215 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8217 /* Proceed if a valid pointer type was passed in. */
8218 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8220 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8221 tree frac, exp;
8223 switch (value->cl)
8225 case rvc_zero:
8226 /* For +-0, return (*exp = 0, +-0). */
8227 exp = integer_zero_node;
8228 frac = arg0;
8229 break;
8230 case rvc_nan:
8231 case rvc_inf:
8232 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8233 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8234 case rvc_normal:
8236 /* Since the frexp function always expects base 2, and in
8237 GCC normalized significands are already in the range
8238 [0.5, 1.0), we have exactly what frexp wants. */
8239 REAL_VALUE_TYPE frac_rvt = *value;
8240 SET_REAL_EXP (&frac_rvt, 0);
8241 frac = build_real (rettype, frac_rvt);
8242 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8244 break;
8245 default:
8246 gcc_unreachable ();
8249 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8250 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8251 TREE_SIDE_EFFECTS (arg1) = 1;
8252 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8255 return NULL_TREE;
8258 /* Fold a call to builtin modf. */
8260 static tree
8261 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8263 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8264 return NULL_TREE;
8266 STRIP_NOPS (arg0);
8268 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8269 return NULL_TREE;
8271 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8273 /* Proceed if a valid pointer type was passed in. */
8274 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8276 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8277 REAL_VALUE_TYPE trunc, frac;
8279 switch (value->cl)
8281 case rvc_nan:
8282 case rvc_zero:
8283 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8284 trunc = frac = *value;
8285 break;
8286 case rvc_inf:
8287 /* For +-Inf, return (*arg1 = arg0, +-0). */
8288 frac = dconst0;
8289 frac.sign = value->sign;
8290 trunc = *value;
8291 break;
8292 case rvc_normal:
8293 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8294 real_trunc (&trunc, VOIDmode, value);
8295 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8296 /* If the original number was negative and already
8297 integral, then the fractional part is -0.0. */
8298 if (value->sign && frac.cl == rvc_zero)
8299 frac.sign = value->sign;
8300 break;
8303 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8304 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8305 build_real (rettype, trunc));
8306 TREE_SIDE_EFFECTS (arg1) = 1;
8307 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8308 build_real (rettype, frac));
8311 return NULL_TREE;
8314 /* Given a location LOC, an interclass builtin function decl FNDECL
8315 and its single argument ARG, return an folded expression computing
8316 the same, or NULL_TREE if we either couldn't or didn't want to fold
8317 (the latter happen if there's an RTL instruction available). */
8319 static tree
8320 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8322 machine_mode mode;
8324 if (!validate_arg (arg, REAL_TYPE))
8325 return NULL_TREE;
8327 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8328 return NULL_TREE;
8330 mode = TYPE_MODE (TREE_TYPE (arg));
8332 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8334 /* If there is no optab, try generic code. */
8335 switch (DECL_FUNCTION_CODE (fndecl))
8337 tree result;
8339 CASE_FLT_FN (BUILT_IN_ISINF):
8341 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8342 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8343 tree type = TREE_TYPE (arg);
8344 REAL_VALUE_TYPE r;
8345 char buf[128];
8347 if (is_ibm_extended)
8349 /* NaN and Inf are encoded in the high-order double value
8350 only. The low-order value is not significant. */
8351 type = double_type_node;
8352 mode = DFmode;
8353 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8355 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8356 real_from_string (&r, buf);
8357 result = build_call_expr (isgr_fn, 2,
8358 fold_build1_loc (loc, ABS_EXPR, type, arg),
8359 build_real (type, r));
8360 return result;
8362 CASE_FLT_FN (BUILT_IN_FINITE):
8363 case BUILT_IN_ISFINITE:
8365 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8366 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8367 tree type = TREE_TYPE (arg);
8368 REAL_VALUE_TYPE r;
8369 char buf[128];
8371 if (is_ibm_extended)
8373 /* NaN and Inf are encoded in the high-order double value
8374 only. The low-order value is not significant. */
8375 type = double_type_node;
8376 mode = DFmode;
8377 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8379 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8380 real_from_string (&r, buf);
8381 result = build_call_expr (isle_fn, 2,
8382 fold_build1_loc (loc, ABS_EXPR, type, arg),
8383 build_real (type, r));
8384 /*result = fold_build2_loc (loc, UNGT_EXPR,
8385 TREE_TYPE (TREE_TYPE (fndecl)),
8386 fold_build1_loc (loc, ABS_EXPR, type, arg),
8387 build_real (type, r));
8388 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8389 TREE_TYPE (TREE_TYPE (fndecl)),
8390 result);*/
8391 return result;
8393 case BUILT_IN_ISNORMAL:
8395 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8396 islessequal(fabs(x),DBL_MAX). */
8397 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8398 tree type = TREE_TYPE (arg);
8399 tree orig_arg, max_exp, min_exp;
8400 machine_mode orig_mode = mode;
8401 REAL_VALUE_TYPE rmax, rmin;
8402 char buf[128];
8404 orig_arg = arg = builtin_save_expr (arg);
8405 if (is_ibm_extended)
8407 /* Use double to test the normal range of IBM extended
8408 precision. Emin for IBM extended precision is
8409 different to emin for IEEE double, being 53 higher
8410 since the low double exponent is at least 53 lower
8411 than the high double exponent. */
8412 type = double_type_node;
8413 mode = DFmode;
8414 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8416 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8418 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8419 real_from_string (&rmax, buf);
8420 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8421 real_from_string (&rmin, buf);
8422 max_exp = build_real (type, rmax);
8423 min_exp = build_real (type, rmin);
8425 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8426 if (is_ibm_extended)
8428 /* Testing the high end of the range is done just using
8429 the high double, using the same test as isfinite().
8430 For the subnormal end of the range we first test the
8431 high double, then if its magnitude is equal to the
8432 limit of 0x1p-969, we test whether the low double is
8433 non-zero and opposite sign to the high double. */
8434 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8435 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8436 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8437 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8438 arg, min_exp);
8439 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8440 complex_double_type_node, orig_arg);
8441 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8442 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8443 tree zero = build_real (type, dconst0);
8444 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8445 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8446 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8447 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8448 fold_build3 (COND_EXPR,
8449 integer_type_node,
8450 hilt, logt, lolt));
8451 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8452 eq_min, ok_lo);
8453 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8454 gt_min, eq_min);
8456 else
8458 tree const isge_fn
8459 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8460 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8462 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8463 max_exp, min_exp);
8464 return result;
8466 default:
8467 break;
8470 return NULL_TREE;
8473 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8474 ARG is the argument for the call. */
8476 static tree
8477 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8479 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8481 if (!validate_arg (arg, REAL_TYPE))
8482 return NULL_TREE;
8484 switch (builtin_index)
8486 case BUILT_IN_ISINF:
8487 if (!HONOR_INFINITIES (arg))
8488 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8490 return NULL_TREE;
8492 case BUILT_IN_ISINF_SIGN:
8494 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8495 /* In a boolean context, GCC will fold the inner COND_EXPR to
8496 1. So e.g. "if (isinf_sign(x))" would be folded to just
8497 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8498 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8499 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8500 tree tmp = NULL_TREE;
8502 arg = builtin_save_expr (arg);
8504 if (signbit_fn && isinf_fn)
8506 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8507 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8509 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8510 signbit_call, integer_zero_node);
8511 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8512 isinf_call, integer_zero_node);
8514 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8515 integer_minus_one_node, integer_one_node);
8516 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8517 isinf_call, tmp,
8518 integer_zero_node);
8521 return tmp;
8524 case BUILT_IN_ISFINITE:
8525 if (!HONOR_NANS (arg)
8526 && !HONOR_INFINITIES (arg))
8527 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8529 return NULL_TREE;
8531 case BUILT_IN_ISNAN:
8532 if (!HONOR_NANS (arg))
8533 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8536 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8537 if (is_ibm_extended)
8539 /* NaN and Inf are encoded in the high-order double value
8540 only. The low-order value is not significant. */
8541 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8544 arg = builtin_save_expr (arg);
8545 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8547 default:
8548 gcc_unreachable ();
8552 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8553 This builtin will generate code to return the appropriate floating
8554 point classification depending on the value of the floating point
8555 number passed in. The possible return values must be supplied as
8556 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8557 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8558 one floating point argument which is "type generic". */
8560 static tree
8561 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8563 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8564 arg, type, res, tmp;
8565 machine_mode mode;
8566 REAL_VALUE_TYPE r;
8567 char buf[128];
8569 /* Verify the required arguments in the original call. */
8570 if (nargs != 6
8571 || !validate_arg (args[0], INTEGER_TYPE)
8572 || !validate_arg (args[1], INTEGER_TYPE)
8573 || !validate_arg (args[2], INTEGER_TYPE)
8574 || !validate_arg (args[3], INTEGER_TYPE)
8575 || !validate_arg (args[4], INTEGER_TYPE)
8576 || !validate_arg (args[5], REAL_TYPE))
8577 return NULL_TREE;
8579 fp_nan = args[0];
8580 fp_infinite = args[1];
8581 fp_normal = args[2];
8582 fp_subnormal = args[3];
8583 fp_zero = args[4];
8584 arg = args[5];
8585 type = TREE_TYPE (arg);
8586 mode = TYPE_MODE (type);
8587 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8589 /* fpclassify(x) ->
8590 isnan(x) ? FP_NAN :
8591 (fabs(x) == Inf ? FP_INFINITE :
8592 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8593 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8595 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8596 build_real (type, dconst0));
8597 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8598 tmp, fp_zero, fp_subnormal);
8600 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8601 real_from_string (&r, buf);
8602 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8603 arg, build_real (type, r));
8604 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8606 if (HONOR_INFINITIES (mode))
8608 real_inf (&r);
8609 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8610 build_real (type, r));
8611 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8612 fp_infinite, res);
8615 if (HONOR_NANS (mode))
8617 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8618 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8621 return res;
8624 /* Fold a call to an unordered comparison function such as
8625 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8626 being called and ARG0 and ARG1 are the arguments for the call.
8627 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8628 the opposite of the desired result. UNORDERED_CODE is used
8629 for modes that can hold NaNs and ORDERED_CODE is used for
8630 the rest. */
8632 static tree
8633 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8634 enum tree_code unordered_code,
8635 enum tree_code ordered_code)
8637 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8638 enum tree_code code;
8639 tree type0, type1;
8640 enum tree_code code0, code1;
8641 tree cmp_type = NULL_TREE;
8643 type0 = TREE_TYPE (arg0);
8644 type1 = TREE_TYPE (arg1);
8646 code0 = TREE_CODE (type0);
8647 code1 = TREE_CODE (type1);
8649 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8650 /* Choose the wider of two real types. */
8651 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8652 ? type0 : type1;
8653 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8654 cmp_type = type0;
8655 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8656 cmp_type = type1;
8658 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8659 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8661 if (unordered_code == UNORDERED_EXPR)
8663 if (!HONOR_NANS (arg0))
8664 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8665 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8668 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8669 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8670 fold_build2_loc (loc, code, type, arg0, arg1));
8673 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8674 arithmetics if it can never overflow, or into internal functions that
8675 return both result of arithmetics and overflowed boolean flag in
8676 a complex integer result, or some other check for overflow.
8677 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8678 checking part of that. */
8680 static tree
8681 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8682 tree arg0, tree arg1, tree arg2)
8684 enum internal_fn ifn = IFN_LAST;
8685 /* The code of the expression corresponding to the type-generic
8686 built-in, or ERROR_MARK for the type-specific ones. */
8687 enum tree_code opcode = ERROR_MARK;
8688 bool ovf_only = false;
8690 switch (fcode)
8692 case BUILT_IN_ADD_OVERFLOW_P:
8693 ovf_only = true;
8694 /* FALLTHRU */
8695 case BUILT_IN_ADD_OVERFLOW:
8696 opcode = PLUS_EXPR;
8697 /* FALLTHRU */
8698 case BUILT_IN_SADD_OVERFLOW:
8699 case BUILT_IN_SADDL_OVERFLOW:
8700 case BUILT_IN_SADDLL_OVERFLOW:
8701 case BUILT_IN_UADD_OVERFLOW:
8702 case BUILT_IN_UADDL_OVERFLOW:
8703 case BUILT_IN_UADDLL_OVERFLOW:
8704 ifn = IFN_ADD_OVERFLOW;
8705 break;
8706 case BUILT_IN_SUB_OVERFLOW_P:
8707 ovf_only = true;
8708 /* FALLTHRU */
8709 case BUILT_IN_SUB_OVERFLOW:
8710 opcode = MINUS_EXPR;
8711 /* FALLTHRU */
8712 case BUILT_IN_SSUB_OVERFLOW:
8713 case BUILT_IN_SSUBL_OVERFLOW:
8714 case BUILT_IN_SSUBLL_OVERFLOW:
8715 case BUILT_IN_USUB_OVERFLOW:
8716 case BUILT_IN_USUBL_OVERFLOW:
8717 case BUILT_IN_USUBLL_OVERFLOW:
8718 ifn = IFN_SUB_OVERFLOW;
8719 break;
8720 case BUILT_IN_MUL_OVERFLOW_P:
8721 ovf_only = true;
8722 /* FALLTHRU */
8723 case BUILT_IN_MUL_OVERFLOW:
8724 opcode = MULT_EXPR;
8725 /* FALLTHRU */
8726 case BUILT_IN_SMUL_OVERFLOW:
8727 case BUILT_IN_SMULL_OVERFLOW:
8728 case BUILT_IN_SMULLL_OVERFLOW:
8729 case BUILT_IN_UMUL_OVERFLOW:
8730 case BUILT_IN_UMULL_OVERFLOW:
8731 case BUILT_IN_UMULLL_OVERFLOW:
8732 ifn = IFN_MUL_OVERFLOW;
8733 break;
8734 default:
8735 gcc_unreachable ();
8738 /* For the "generic" overloads, the first two arguments can have different
8739 types and the last argument determines the target type to use to check
8740 for overflow. The arguments of the other overloads all have the same
8741 type. */
8742 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
8744 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8745 arguments are constant, attempt to fold the built-in call into a constant
8746 expression indicating whether or not it detected an overflow. */
8747 if (ovf_only
8748 && TREE_CODE (arg0) == INTEGER_CST
8749 && TREE_CODE (arg1) == INTEGER_CST)
8750 /* Perform the computation in the target type and check for overflow. */
8751 return omit_one_operand_loc (loc, boolean_type_node,
8752 arith_overflowed_p (opcode, type, arg0, arg1)
8753 ? boolean_true_node : boolean_false_node,
8754 arg2);
8756 tree ctype = build_complex_type (type);
8757 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8758 2, arg0, arg1);
8759 tree tgt = save_expr (call);
8760 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8761 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8762 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8764 if (ovf_only)
8765 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
8767 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8768 tree store
8769 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8770 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8773 /* Fold a call to __builtin_FILE to a constant string. */
8775 static inline tree
8776 fold_builtin_FILE (location_t loc)
8778 if (const char *fname = LOCATION_FILE (loc))
8779 return build_string_literal (strlen (fname) + 1, fname);
8781 return build_string_literal (1, "");
8784 /* Fold a call to __builtin_FUNCTION to a constant string. */
8786 static inline tree
8787 fold_builtin_FUNCTION ()
8789 const char *name = "";
8791 if (current_function_decl)
8792 name = lang_hooks.decl_printable_name (current_function_decl, 0);
8794 return build_string_literal (strlen (name) + 1, name);
8797 /* Fold a call to __builtin_LINE to an integer constant. */
8799 static inline tree
8800 fold_builtin_LINE (location_t loc, tree type)
8802 return build_int_cst (type, LOCATION_LINE (loc));
8805 /* Fold a call to built-in function FNDECL with 0 arguments.
8806 This function returns NULL_TREE if no simplification was possible. */
8808 static tree
8809 fold_builtin_0 (location_t loc, tree fndecl)
8811 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8812 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8813 switch (fcode)
8815 case BUILT_IN_FILE:
8816 return fold_builtin_FILE (loc);
8818 case BUILT_IN_FUNCTION:
8819 return fold_builtin_FUNCTION ();
8821 case BUILT_IN_LINE:
8822 return fold_builtin_LINE (loc, type);
8824 CASE_FLT_FN (BUILT_IN_INF):
8825 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
8826 case BUILT_IN_INFD32:
8827 case BUILT_IN_INFD64:
8828 case BUILT_IN_INFD128:
8829 return fold_builtin_inf (loc, type, true);
8831 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8832 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
8833 return fold_builtin_inf (loc, type, false);
8835 case BUILT_IN_CLASSIFY_TYPE:
8836 return fold_builtin_classify_type (NULL_TREE);
8838 default:
8839 break;
8841 return NULL_TREE;
8844 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8845 This function returns NULL_TREE if no simplification was possible. */
8847 static tree
8848 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8850 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8851 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8853 if (TREE_CODE (arg0) == ERROR_MARK)
8854 return NULL_TREE;
8856 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
8857 return ret;
8859 switch (fcode)
8861 case BUILT_IN_CONSTANT_P:
8863 tree val = fold_builtin_constant_p (arg0);
8865 /* Gimplification will pull the CALL_EXPR for the builtin out of
8866 an if condition. When not optimizing, we'll not CSE it back.
8867 To avoid link error types of regressions, return false now. */
8868 if (!val && !optimize)
8869 val = integer_zero_node;
8871 return val;
8874 case BUILT_IN_CLASSIFY_TYPE:
8875 return fold_builtin_classify_type (arg0);
8877 case BUILT_IN_STRLEN:
8878 return fold_builtin_strlen (loc, type, arg0);
8880 CASE_FLT_FN (BUILT_IN_FABS):
8881 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8882 case BUILT_IN_FABSD32:
8883 case BUILT_IN_FABSD64:
8884 case BUILT_IN_FABSD128:
8885 return fold_builtin_fabs (loc, arg0, type);
8887 case BUILT_IN_ABS:
8888 case BUILT_IN_LABS:
8889 case BUILT_IN_LLABS:
8890 case BUILT_IN_IMAXABS:
8891 return fold_builtin_abs (loc, arg0, type);
8893 CASE_FLT_FN (BUILT_IN_CONJ):
8894 if (validate_arg (arg0, COMPLEX_TYPE)
8895 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8896 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8897 break;
8899 CASE_FLT_FN (BUILT_IN_CREAL):
8900 if (validate_arg (arg0, COMPLEX_TYPE)
8901 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8902 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8903 break;
8905 CASE_FLT_FN (BUILT_IN_CIMAG):
8906 if (validate_arg (arg0, COMPLEX_TYPE)
8907 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8908 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8909 break;
8911 CASE_FLT_FN (BUILT_IN_CARG):
8912 return fold_builtin_carg (loc, arg0, type);
8914 case BUILT_IN_ISASCII:
8915 return fold_builtin_isascii (loc, arg0);
8917 case BUILT_IN_TOASCII:
8918 return fold_builtin_toascii (loc, arg0);
8920 case BUILT_IN_ISDIGIT:
8921 return fold_builtin_isdigit (loc, arg0);
8923 CASE_FLT_FN (BUILT_IN_FINITE):
8924 case BUILT_IN_FINITED32:
8925 case BUILT_IN_FINITED64:
8926 case BUILT_IN_FINITED128:
8927 case BUILT_IN_ISFINITE:
8929 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
8930 if (ret)
8931 return ret;
8932 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8935 CASE_FLT_FN (BUILT_IN_ISINF):
8936 case BUILT_IN_ISINFD32:
8937 case BUILT_IN_ISINFD64:
8938 case BUILT_IN_ISINFD128:
8940 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
8941 if (ret)
8942 return ret;
8943 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8946 case BUILT_IN_ISNORMAL:
8947 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8949 case BUILT_IN_ISINF_SIGN:
8950 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
8952 CASE_FLT_FN (BUILT_IN_ISNAN):
8953 case BUILT_IN_ISNAND32:
8954 case BUILT_IN_ISNAND64:
8955 case BUILT_IN_ISNAND128:
8956 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
8958 case BUILT_IN_FREE:
8959 if (integer_zerop (arg0))
8960 return build_empty_stmt (loc);
8961 break;
8963 default:
8964 break;
8967 return NULL_TREE;
8971 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8972 This function returns NULL_TREE if no simplification was possible. */
8974 static tree
8975 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
8977 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8978 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8980 if (TREE_CODE (arg0) == ERROR_MARK
8981 || TREE_CODE (arg1) == ERROR_MARK)
8982 return NULL_TREE;
8984 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
8985 return ret;
8987 switch (fcode)
8989 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
8990 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
8991 if (validate_arg (arg0, REAL_TYPE)
8992 && validate_arg (arg1, POINTER_TYPE))
8993 return do_mpfr_lgamma_r (arg0, arg1, type);
8994 break;
8996 CASE_FLT_FN (BUILT_IN_FREXP):
8997 return fold_builtin_frexp (loc, arg0, arg1, type);
8999 CASE_FLT_FN (BUILT_IN_MODF):
9000 return fold_builtin_modf (loc, arg0, arg1, type);
9002 case BUILT_IN_STRSPN:
9003 return fold_builtin_strspn (loc, arg0, arg1);
9005 case BUILT_IN_STRCSPN:
9006 return fold_builtin_strcspn (loc, arg0, arg1);
9008 case BUILT_IN_STRPBRK:
9009 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9011 case BUILT_IN_EXPECT:
9012 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
9014 case BUILT_IN_ISGREATER:
9015 return fold_builtin_unordered_cmp (loc, fndecl,
9016 arg0, arg1, UNLE_EXPR, LE_EXPR);
9017 case BUILT_IN_ISGREATEREQUAL:
9018 return fold_builtin_unordered_cmp (loc, fndecl,
9019 arg0, arg1, UNLT_EXPR, LT_EXPR);
9020 case BUILT_IN_ISLESS:
9021 return fold_builtin_unordered_cmp (loc, fndecl,
9022 arg0, arg1, UNGE_EXPR, GE_EXPR);
9023 case BUILT_IN_ISLESSEQUAL:
9024 return fold_builtin_unordered_cmp (loc, fndecl,
9025 arg0, arg1, UNGT_EXPR, GT_EXPR);
9026 case BUILT_IN_ISLESSGREATER:
9027 return fold_builtin_unordered_cmp (loc, fndecl,
9028 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9029 case BUILT_IN_ISUNORDERED:
9030 return fold_builtin_unordered_cmp (loc, fndecl,
9031 arg0, arg1, UNORDERED_EXPR,
9032 NOP_EXPR);
9034 /* We do the folding for va_start in the expander. */
9035 case BUILT_IN_VA_START:
9036 break;
9038 case BUILT_IN_OBJECT_SIZE:
9039 return fold_builtin_object_size (arg0, arg1);
9041 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9042 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9044 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9045 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9047 default:
9048 break;
9050 return NULL_TREE;
9053 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9054 and ARG2.
9055 This function returns NULL_TREE if no simplification was possible. */
9057 static tree
9058 fold_builtin_3 (location_t loc, tree fndecl,
9059 tree arg0, tree arg1, tree arg2)
9061 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9062 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9064 if (TREE_CODE (arg0) == ERROR_MARK
9065 || TREE_CODE (arg1) == ERROR_MARK
9066 || TREE_CODE (arg2) == ERROR_MARK)
9067 return NULL_TREE;
9069 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9070 arg0, arg1, arg2))
9071 return ret;
9073 switch (fcode)
9076 CASE_FLT_FN (BUILT_IN_SINCOS):
9077 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9079 CASE_FLT_FN (BUILT_IN_FMA):
9080 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
9081 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
9083 CASE_FLT_FN (BUILT_IN_REMQUO):
9084 if (validate_arg (arg0, REAL_TYPE)
9085 && validate_arg (arg1, REAL_TYPE)
9086 && validate_arg (arg2, POINTER_TYPE))
9087 return do_mpfr_remquo (arg0, arg1, arg2);
9088 break;
9090 case BUILT_IN_MEMCMP:
9091 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9093 case BUILT_IN_EXPECT:
9094 return fold_builtin_expect (loc, arg0, arg1, arg2);
9096 case BUILT_IN_ADD_OVERFLOW:
9097 case BUILT_IN_SUB_OVERFLOW:
9098 case BUILT_IN_MUL_OVERFLOW:
9099 case BUILT_IN_ADD_OVERFLOW_P:
9100 case BUILT_IN_SUB_OVERFLOW_P:
9101 case BUILT_IN_MUL_OVERFLOW_P:
9102 case BUILT_IN_SADD_OVERFLOW:
9103 case BUILT_IN_SADDL_OVERFLOW:
9104 case BUILT_IN_SADDLL_OVERFLOW:
9105 case BUILT_IN_SSUB_OVERFLOW:
9106 case BUILT_IN_SSUBL_OVERFLOW:
9107 case BUILT_IN_SSUBLL_OVERFLOW:
9108 case BUILT_IN_SMUL_OVERFLOW:
9109 case BUILT_IN_SMULL_OVERFLOW:
9110 case BUILT_IN_SMULLL_OVERFLOW:
9111 case BUILT_IN_UADD_OVERFLOW:
9112 case BUILT_IN_UADDL_OVERFLOW:
9113 case BUILT_IN_UADDLL_OVERFLOW:
9114 case BUILT_IN_USUB_OVERFLOW:
9115 case BUILT_IN_USUBL_OVERFLOW:
9116 case BUILT_IN_USUBLL_OVERFLOW:
9117 case BUILT_IN_UMUL_OVERFLOW:
9118 case BUILT_IN_UMULL_OVERFLOW:
9119 case BUILT_IN_UMULLL_OVERFLOW:
9120 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9122 default:
9123 break;
9125 return NULL_TREE;
9128 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9129 arguments. IGNORE is true if the result of the
9130 function call is ignored. This function returns NULL_TREE if no
9131 simplification was possible. */
9133 tree
9134 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9136 tree ret = NULL_TREE;
9138 switch (nargs)
9140 case 0:
9141 ret = fold_builtin_0 (loc, fndecl);
9142 break;
9143 case 1:
9144 ret = fold_builtin_1 (loc, fndecl, args[0]);
9145 break;
9146 case 2:
9147 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9148 break;
9149 case 3:
9150 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9151 break;
9152 default:
9153 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9154 break;
9156 if (ret)
9158 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9159 SET_EXPR_LOCATION (ret, loc);
9160 TREE_NO_WARNING (ret) = 1;
9161 return ret;
9163 return NULL_TREE;
9166 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9167 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9168 of arguments in ARGS to be omitted. OLDNARGS is the number of
9169 elements in ARGS. */
9171 static tree
9172 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9173 int skip, tree fndecl, int n, va_list newargs)
9175 int nargs = oldnargs - skip + n;
9176 tree *buffer;
9178 if (n > 0)
9180 int i, j;
9182 buffer = XALLOCAVEC (tree, nargs);
9183 for (i = 0; i < n; i++)
9184 buffer[i] = va_arg (newargs, tree);
9185 for (j = skip; j < oldnargs; j++, i++)
9186 buffer[i] = args[j];
9188 else
9189 buffer = args + skip;
9191 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9194 /* Return true if FNDECL shouldn't be folded right now.
9195 If a built-in function has an inline attribute always_inline
9196 wrapper, defer folding it after always_inline functions have
9197 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9198 might not be performed. */
9200 bool
9201 avoid_folding_inline_builtin (tree fndecl)
9203 return (DECL_DECLARED_INLINE_P (fndecl)
9204 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9205 && cfun
9206 && !cfun->always_inline_functions_inlined
9207 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9210 /* A wrapper function for builtin folding that prevents warnings for
9211 "statement without effect" and the like, caused by removing the
9212 call node earlier than the warning is generated. */
9214 tree
9215 fold_call_expr (location_t loc, tree exp, bool ignore)
9217 tree ret = NULL_TREE;
9218 tree fndecl = get_callee_fndecl (exp);
9219 if (fndecl
9220 && TREE_CODE (fndecl) == FUNCTION_DECL
9221 && DECL_BUILT_IN (fndecl)
9222 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9223 yet. Defer folding until we see all the arguments
9224 (after inlining). */
9225 && !CALL_EXPR_VA_ARG_PACK (exp))
9227 int nargs = call_expr_nargs (exp);
9229 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9230 instead last argument is __builtin_va_arg_pack (). Defer folding
9231 even in that case, until arguments are finalized. */
9232 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9234 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9235 if (fndecl2
9236 && TREE_CODE (fndecl2) == FUNCTION_DECL
9237 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9238 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9239 return NULL_TREE;
9242 if (avoid_folding_inline_builtin (fndecl))
9243 return NULL_TREE;
9245 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9246 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9247 CALL_EXPR_ARGP (exp), ignore);
9248 else
9250 tree *args = CALL_EXPR_ARGP (exp);
9251 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9252 if (ret)
9253 return ret;
9256 return NULL_TREE;
9259 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9260 N arguments are passed in the array ARGARRAY. Return a folded
9261 expression or NULL_TREE if no simplification was possible. */
9263 tree
9264 fold_builtin_call_array (location_t loc, tree,
9265 tree fn,
9266 int n,
9267 tree *argarray)
9269 if (TREE_CODE (fn) != ADDR_EXPR)
9270 return NULL_TREE;
9272 tree fndecl = TREE_OPERAND (fn, 0);
9273 if (TREE_CODE (fndecl) == FUNCTION_DECL
9274 && DECL_BUILT_IN (fndecl))
9276 /* If last argument is __builtin_va_arg_pack (), arguments to this
9277 function are not finalized yet. Defer folding until they are. */
9278 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9280 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9281 if (fndecl2
9282 && TREE_CODE (fndecl2) == FUNCTION_DECL
9283 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9284 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9285 return NULL_TREE;
9287 if (avoid_folding_inline_builtin (fndecl))
9288 return NULL_TREE;
9289 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9290 return targetm.fold_builtin (fndecl, n, argarray, false);
9291 else
9292 return fold_builtin_n (loc, fndecl, argarray, n, false);
9295 return NULL_TREE;
9298 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9299 along with N new arguments specified as the "..." parameters. SKIP
9300 is the number of arguments in EXP to be omitted. This function is used
9301 to do varargs-to-varargs transformations. */
9303 static tree
9304 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9306 va_list ap;
9307 tree t;
9309 va_start (ap, n);
9310 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9311 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9312 va_end (ap);
9314 return t;
9317 /* Validate a single argument ARG against a tree code CODE representing
9318 a type. Return true when argument is valid. */
9320 static bool
9321 validate_arg (const_tree arg, enum tree_code code)
9323 if (!arg)
9324 return false;
9325 else if (code == POINTER_TYPE)
9326 return POINTER_TYPE_P (TREE_TYPE (arg));
9327 else if (code == INTEGER_TYPE)
9328 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9329 return code == TREE_CODE (TREE_TYPE (arg));
9332 /* This function validates the types of a function call argument list
9333 against a specified list of tree_codes. If the last specifier is a 0,
9334 that represents an ellipses, otherwise the last specifier must be a
9335 VOID_TYPE.
9337 This is the GIMPLE version of validate_arglist. Eventually we want to
9338 completely convert builtins.c to work from GIMPLEs and the tree based
9339 validate_arglist will then be removed. */
9341 bool
9342 validate_gimple_arglist (const gcall *call, ...)
9344 enum tree_code code;
9345 bool res = 0;
9346 va_list ap;
9347 const_tree arg;
9348 size_t i;
9350 va_start (ap, call);
9351 i = 0;
9355 code = (enum tree_code) va_arg (ap, int);
9356 switch (code)
9358 case 0:
9359 /* This signifies an ellipses, any further arguments are all ok. */
9360 res = true;
9361 goto end;
9362 case VOID_TYPE:
9363 /* This signifies an endlink, if no arguments remain, return
9364 true, otherwise return false. */
9365 res = (i == gimple_call_num_args (call));
9366 goto end;
9367 default:
9368 /* If no parameters remain or the parameter's code does not
9369 match the specified code, return false. Otherwise continue
9370 checking any remaining arguments. */
9371 arg = gimple_call_arg (call, i++);
9372 if (!validate_arg (arg, code))
9373 goto end;
9374 break;
9377 while (1);
9379 /* We need gotos here since we can only have one VA_CLOSE in a
9380 function. */
9381 end: ;
9382 va_end (ap);
9384 return res;
9387 /* Default target-specific builtin expander that does nothing. */
9390 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9391 rtx target ATTRIBUTE_UNUSED,
9392 rtx subtarget ATTRIBUTE_UNUSED,
9393 machine_mode mode ATTRIBUTE_UNUSED,
9394 int ignore ATTRIBUTE_UNUSED)
9396 return NULL_RTX;
9399 /* Returns true is EXP represents data that would potentially reside
9400 in a readonly section. */
9402 bool
9403 readonly_data_expr (tree exp)
9405 STRIP_NOPS (exp);
9407 if (TREE_CODE (exp) != ADDR_EXPR)
9408 return false;
9410 exp = get_base_address (TREE_OPERAND (exp, 0));
9411 if (!exp)
9412 return false;
9414 /* Make sure we call decl_readonly_section only for trees it
9415 can handle (since it returns true for everything it doesn't
9416 understand). */
9417 if (TREE_CODE (exp) == STRING_CST
9418 || TREE_CODE (exp) == CONSTRUCTOR
9419 || (VAR_P (exp) && TREE_STATIC (exp)))
9420 return decl_readonly_section (exp, 0);
9421 else
9422 return false;
9425 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9426 to the call, and TYPE is its return type.
9428 Return NULL_TREE if no simplification was possible, otherwise return the
9429 simplified form of the call as a tree.
9431 The simplified form may be a constant or other expression which
9432 computes the same value, but in a more efficient manner (including
9433 calls to other builtin functions).
9435 The call may contain arguments which need to be evaluated, but
9436 which are not useful to determine the result of the call. In
9437 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9438 COMPOUND_EXPR will be an argument which must be evaluated.
9439 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9440 COMPOUND_EXPR in the chain will contain the tree for the simplified
9441 form of the builtin function call. */
9443 static tree
9444 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9446 if (!validate_arg (s1, POINTER_TYPE)
9447 || !validate_arg (s2, POINTER_TYPE))
9448 return NULL_TREE;
9449 else
9451 tree fn;
9452 const char *p1, *p2;
9454 p2 = c_getstr (s2);
9455 if (p2 == NULL)
9456 return NULL_TREE;
9458 p1 = c_getstr (s1);
9459 if (p1 != NULL)
9461 const char *r = strpbrk (p1, p2);
9462 tree tem;
9464 if (r == NULL)
9465 return build_int_cst (TREE_TYPE (s1), 0);
9467 /* Return an offset into the constant string argument. */
9468 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9469 return fold_convert_loc (loc, type, tem);
9472 if (p2[0] == '\0')
9473 /* strpbrk(x, "") == NULL.
9474 Evaluate and ignore s1 in case it had side-effects. */
9475 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
9477 if (p2[1] != '\0')
9478 return NULL_TREE; /* Really call strpbrk. */
9480 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9481 if (!fn)
9482 return NULL_TREE;
9484 /* New argument list transforming strpbrk(s1, s2) to
9485 strchr(s1, s2[0]). */
9486 return build_call_expr_loc (loc, fn, 2, s1,
9487 build_int_cst (integer_type_node, p2[0]));
9491 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9492 to the call.
9494 Return NULL_TREE if no simplification was possible, otherwise return the
9495 simplified form of the call as a tree.
9497 The simplified form may be a constant or other expression which
9498 computes the same value, but in a more efficient manner (including
9499 calls to other builtin functions).
9501 The call may contain arguments which need to be evaluated, but
9502 which are not useful to determine the result of the call. In
9503 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9504 COMPOUND_EXPR will be an argument which must be evaluated.
9505 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9506 COMPOUND_EXPR in the chain will contain the tree for the simplified
9507 form of the builtin function call. */
9509 static tree
9510 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9512 if (!validate_arg (s1, POINTER_TYPE)
9513 || !validate_arg (s2, POINTER_TYPE))
9514 return NULL_TREE;
9515 else
9517 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9519 /* If either argument is "", return NULL_TREE. */
9520 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9521 /* Evaluate and ignore both arguments in case either one has
9522 side-effects. */
9523 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9524 s1, s2);
9525 return NULL_TREE;
9529 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9530 to the call.
9532 Return NULL_TREE if no simplification was possible, otherwise return the
9533 simplified form of the call as a tree.
9535 The simplified form may be a constant or other expression which
9536 computes the same value, but in a more efficient manner (including
9537 calls to other builtin functions).
9539 The call may contain arguments which need to be evaluated, but
9540 which are not useful to determine the result of the call. In
9541 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9542 COMPOUND_EXPR will be an argument which must be evaluated.
9543 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9544 COMPOUND_EXPR in the chain will contain the tree for the simplified
9545 form of the builtin function call. */
9547 static tree
9548 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9550 if (!validate_arg (s1, POINTER_TYPE)
9551 || !validate_arg (s2, POINTER_TYPE))
9552 return NULL_TREE;
9553 else
9555 /* If the first argument is "", return NULL_TREE. */
9556 const char *p1 = c_getstr (s1);
9557 if (p1 && *p1 == '\0')
9559 /* Evaluate and ignore argument s2 in case it has
9560 side-effects. */
9561 return omit_one_operand_loc (loc, size_type_node,
9562 size_zero_node, s2);
9565 /* If the second argument is "", return __builtin_strlen(s1). */
9566 const char *p2 = c_getstr (s2);
9567 if (p2 && *p2 == '\0')
9569 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9571 /* If the replacement _DECL isn't initialized, don't do the
9572 transformation. */
9573 if (!fn)
9574 return NULL_TREE;
9576 return build_call_expr_loc (loc, fn, 1, s1);
9578 return NULL_TREE;
9582 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9583 produced. False otherwise. This is done so that we don't output the error
9584 or warning twice or three times. */
9586 bool
9587 fold_builtin_next_arg (tree exp, bool va_start_p)
9589 tree fntype = TREE_TYPE (current_function_decl);
9590 int nargs = call_expr_nargs (exp);
9591 tree arg;
9592 /* There is good chance the current input_location points inside the
9593 definition of the va_start macro (perhaps on the token for
9594 builtin) in a system header, so warnings will not be emitted.
9595 Use the location in real source code. */
9596 source_location current_location =
9597 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9598 NULL);
9600 if (!stdarg_p (fntype))
9602 error ("%<va_start%> used in function with fixed args");
9603 return true;
9606 if (va_start_p)
9608 if (va_start_p && (nargs != 2))
9610 error ("wrong number of arguments to function %<va_start%>");
9611 return true;
9613 arg = CALL_EXPR_ARG (exp, 1);
9615 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9616 when we checked the arguments and if needed issued a warning. */
9617 else
9619 if (nargs == 0)
9621 /* Evidently an out of date version of <stdarg.h>; can't validate
9622 va_start's second argument, but can still work as intended. */
9623 warning_at (current_location,
9624 OPT_Wvarargs,
9625 "%<__builtin_next_arg%> called without an argument");
9626 return true;
9628 else if (nargs > 1)
9630 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9631 return true;
9633 arg = CALL_EXPR_ARG (exp, 0);
9636 if (TREE_CODE (arg) == SSA_NAME)
9637 arg = SSA_NAME_VAR (arg);
9639 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9640 or __builtin_next_arg (0) the first time we see it, after checking
9641 the arguments and if needed issuing a warning. */
9642 if (!integer_zerop (arg))
9644 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9646 /* Strip off all nops for the sake of the comparison. This
9647 is not quite the same as STRIP_NOPS. It does more.
9648 We must also strip off INDIRECT_EXPR for C++ reference
9649 parameters. */
9650 while (CONVERT_EXPR_P (arg)
9651 || TREE_CODE (arg) == INDIRECT_REF)
9652 arg = TREE_OPERAND (arg, 0);
9653 if (arg != last_parm)
9655 /* FIXME: Sometimes with the tree optimizers we can get the
9656 not the last argument even though the user used the last
9657 argument. We just warn and set the arg to be the last
9658 argument so that we will get wrong-code because of
9659 it. */
9660 warning_at (current_location,
9661 OPT_Wvarargs,
9662 "second parameter of %<va_start%> not last named argument");
9665 /* Undefined by C99 7.15.1.4p4 (va_start):
9666 "If the parameter parmN is declared with the register storage
9667 class, with a function or array type, or with a type that is
9668 not compatible with the type that results after application of
9669 the default argument promotions, the behavior is undefined."
9671 else if (DECL_REGISTER (arg))
9673 warning_at (current_location,
9674 OPT_Wvarargs,
9675 "undefined behavior when second parameter of "
9676 "%<va_start%> is declared with %<register%> storage");
9679 /* We want to verify the second parameter just once before the tree
9680 optimizers are run and then avoid keeping it in the tree,
9681 as otherwise we could warn even for correct code like:
9682 void foo (int i, ...)
9683 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9684 if (va_start_p)
9685 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9686 else
9687 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9689 return false;
9693 /* Expand a call EXP to __builtin_object_size. */
9695 static rtx
9696 expand_builtin_object_size (tree exp)
9698 tree ost;
9699 int object_size_type;
9700 tree fndecl = get_callee_fndecl (exp);
9702 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9704 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
9705 exp, fndecl);
9706 expand_builtin_trap ();
9707 return const0_rtx;
9710 ost = CALL_EXPR_ARG (exp, 1);
9711 STRIP_NOPS (ost);
9713 if (TREE_CODE (ost) != INTEGER_CST
9714 || tree_int_cst_sgn (ost) < 0
9715 || compare_tree_int (ost, 3) > 0)
9717 error ("%Klast argument of %qD is not integer constant between 0 and 3",
9718 exp, fndecl);
9719 expand_builtin_trap ();
9720 return const0_rtx;
9723 object_size_type = tree_to_shwi (ost);
9725 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9728 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9729 FCODE is the BUILT_IN_* to use.
9730 Return NULL_RTX if we failed; the caller should emit a normal call,
9731 otherwise try to get the result in TARGET, if convenient (and in
9732 mode MODE if that's convenient). */
9734 static rtx
9735 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9736 enum built_in_function fcode)
9738 tree dest, src, len, size;
9740 if (!validate_arglist (exp,
9741 POINTER_TYPE,
9742 fcode == BUILT_IN_MEMSET_CHK
9743 ? INTEGER_TYPE : POINTER_TYPE,
9744 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9745 return NULL_RTX;
9747 dest = CALL_EXPR_ARG (exp, 0);
9748 src = CALL_EXPR_ARG (exp, 1);
9749 len = CALL_EXPR_ARG (exp, 2);
9750 size = CALL_EXPR_ARG (exp, 3);
9752 bool sizes_ok = check_sizes (OPT_Wstringop_overflow_,
9753 exp, len, /*maxlen=*/NULL_TREE,
9754 /*str=*/NULL_TREE, size);
9756 if (!tree_fits_uhwi_p (size))
9757 return NULL_RTX;
9759 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
9761 /* Avoid transforming the checking call to an ordinary one when
9762 an overflow has been detected or when the call couldn't be
9763 validated because the size is not constant. */
9764 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
9765 return NULL_RTX;
9767 tree fn = NULL_TREE;
9768 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9769 mem{cpy,pcpy,move,set} is available. */
9770 switch (fcode)
9772 case BUILT_IN_MEMCPY_CHK:
9773 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
9774 break;
9775 case BUILT_IN_MEMPCPY_CHK:
9776 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
9777 break;
9778 case BUILT_IN_MEMMOVE_CHK:
9779 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
9780 break;
9781 case BUILT_IN_MEMSET_CHK:
9782 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
9783 break;
9784 default:
9785 break;
9788 if (! fn)
9789 return NULL_RTX;
9791 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
9792 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9793 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9794 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9796 else if (fcode == BUILT_IN_MEMSET_CHK)
9797 return NULL_RTX;
9798 else
9800 unsigned int dest_align = get_pointer_alignment (dest);
9802 /* If DEST is not a pointer type, call the normal function. */
9803 if (dest_align == 0)
9804 return NULL_RTX;
9806 /* If SRC and DEST are the same (and not volatile), do nothing. */
9807 if (operand_equal_p (src, dest, 0))
9809 tree expr;
9811 if (fcode != BUILT_IN_MEMPCPY_CHK)
9813 /* Evaluate and ignore LEN in case it has side-effects. */
9814 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9815 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9818 expr = fold_build_pointer_plus (dest, len);
9819 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9822 /* __memmove_chk special case. */
9823 if (fcode == BUILT_IN_MEMMOVE_CHK)
9825 unsigned int src_align = get_pointer_alignment (src);
9827 if (src_align == 0)
9828 return NULL_RTX;
9830 /* If src is categorized for a readonly section we can use
9831 normal __memcpy_chk. */
9832 if (readonly_data_expr (src))
9834 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
9835 if (!fn)
9836 return NULL_RTX;
9837 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9838 dest, src, len, size);
9839 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9840 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9841 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9844 return NULL_RTX;
9848 /* Emit warning if a buffer overflow is detected at compile time. */
9850 static void
9851 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9853 /* The source string. */
9854 tree srcstr = NULL_TREE;
9855 /* The size of the destination object. */
9856 tree objsize = NULL_TREE;
9857 /* The string that is being concatenated with (as in __strcat_chk)
9858 or null if it isn't. */
9859 tree catstr = NULL_TREE;
9860 /* The maximum length of the source sequence in a bounded operation
9861 (such as __strncat_chk) or null if the operation isn't bounded
9862 (such as __strcat_chk). */
9863 tree maxlen = NULL_TREE;
9864 /* The exact size of the access (such as in __strncpy_chk). */
9865 tree size = NULL_TREE;
9867 switch (fcode)
9869 case BUILT_IN_STRCPY_CHK:
9870 case BUILT_IN_STPCPY_CHK:
9871 srcstr = CALL_EXPR_ARG (exp, 1);
9872 objsize = CALL_EXPR_ARG (exp, 2);
9873 break;
9875 case BUILT_IN_STRCAT_CHK:
9876 /* For __strcat_chk the warning will be emitted only if overflowing
9877 by at least strlen (dest) + 1 bytes. */
9878 catstr = CALL_EXPR_ARG (exp, 0);
9879 srcstr = CALL_EXPR_ARG (exp, 1);
9880 objsize = CALL_EXPR_ARG (exp, 2);
9881 break;
9883 case BUILT_IN_STRNCAT_CHK:
9884 catstr = CALL_EXPR_ARG (exp, 0);
9885 srcstr = CALL_EXPR_ARG (exp, 1);
9886 maxlen = CALL_EXPR_ARG (exp, 2);
9887 objsize = CALL_EXPR_ARG (exp, 3);
9888 break;
9890 case BUILT_IN_STRNCPY_CHK:
9891 case BUILT_IN_STPNCPY_CHK:
9892 srcstr = CALL_EXPR_ARG (exp, 1);
9893 size = CALL_EXPR_ARG (exp, 2);
9894 objsize = CALL_EXPR_ARG (exp, 3);
9895 break;
9897 case BUILT_IN_SNPRINTF_CHK:
9898 case BUILT_IN_VSNPRINTF_CHK:
9899 maxlen = CALL_EXPR_ARG (exp, 1);
9900 objsize = CALL_EXPR_ARG (exp, 3);
9901 break;
9902 default:
9903 gcc_unreachable ();
9906 if (catstr && maxlen)
9908 /* Check __strncat_chk. There is no way to determine the length
9909 of the string to which the source string is being appended so
9910 just warn when the length of the source string is not known. */
9911 check_strncat_sizes (exp, objsize);
9912 return;
9915 check_sizes (OPT_Wstringop_overflow_, exp,
9916 size, maxlen, srcstr, objsize);
9919 /* Emit warning if a buffer overflow is detected at compile time
9920 in __sprintf_chk/__vsprintf_chk calls. */
9922 static void
9923 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
9925 tree size, len, fmt;
9926 const char *fmt_str;
9927 int nargs = call_expr_nargs (exp);
9929 /* Verify the required arguments in the original call. */
9931 if (nargs < 4)
9932 return;
9933 size = CALL_EXPR_ARG (exp, 2);
9934 fmt = CALL_EXPR_ARG (exp, 3);
9936 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9937 return;
9939 /* Check whether the format is a literal string constant. */
9940 fmt_str = c_getstr (fmt);
9941 if (fmt_str == NULL)
9942 return;
9944 if (!init_target_chars ())
9945 return;
9947 /* If the format doesn't contain % args or %%, we know its size. */
9948 if (strchr (fmt_str, target_percent) == 0)
9949 len = build_int_cstu (size_type_node, strlen (fmt_str));
9950 /* If the format is "%s" and first ... argument is a string literal,
9951 we know it too. */
9952 else if (fcode == BUILT_IN_SPRINTF_CHK
9953 && strcmp (fmt_str, target_percent_s) == 0)
9955 tree arg;
9957 if (nargs < 5)
9958 return;
9959 arg = CALL_EXPR_ARG (exp, 4);
9960 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
9961 return;
9963 len = c_strlen (arg, 1);
9964 if (!len || ! tree_fits_uhwi_p (len))
9965 return;
9967 else
9968 return;
9970 /* Add one for the terminating nul. */
9971 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
9972 check_sizes (OPT_Wstringop_overflow_,
9973 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, len, size);
9976 /* Emit warning if a free is called with address of a variable. */
9978 static void
9979 maybe_emit_free_warning (tree exp)
9981 tree arg = CALL_EXPR_ARG (exp, 0);
9983 STRIP_NOPS (arg);
9984 if (TREE_CODE (arg) != ADDR_EXPR)
9985 return;
9987 arg = get_base_address (TREE_OPERAND (arg, 0));
9988 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
9989 return;
9991 if (SSA_VAR_P (arg))
9992 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9993 "%Kattempt to free a non-heap object %qD", exp, arg);
9994 else
9995 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9996 "%Kattempt to free a non-heap object", exp);
9999 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10000 if possible. */
10002 static tree
10003 fold_builtin_object_size (tree ptr, tree ost)
10005 unsigned HOST_WIDE_INT bytes;
10006 int object_size_type;
10008 if (!validate_arg (ptr, POINTER_TYPE)
10009 || !validate_arg (ost, INTEGER_TYPE))
10010 return NULL_TREE;
10012 STRIP_NOPS (ost);
10014 if (TREE_CODE (ost) != INTEGER_CST
10015 || tree_int_cst_sgn (ost) < 0
10016 || compare_tree_int (ost, 3) > 0)
10017 return NULL_TREE;
10019 object_size_type = tree_to_shwi (ost);
10021 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10022 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10023 and (size_t) 0 for types 2 and 3. */
10024 if (TREE_SIDE_EFFECTS (ptr))
10025 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10027 if (TREE_CODE (ptr) == ADDR_EXPR)
10029 compute_builtin_object_size (ptr, object_size_type, &bytes);
10030 if (wi::fits_to_tree_p (bytes, size_type_node))
10031 return build_int_cstu (size_type_node, bytes);
10033 else if (TREE_CODE (ptr) == SSA_NAME)
10035 /* If object size is not known yet, delay folding until
10036 later. Maybe subsequent passes will help determining
10037 it. */
10038 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10039 && wi::fits_to_tree_p (bytes, size_type_node))
10040 return build_int_cstu (size_type_node, bytes);
10043 return NULL_TREE;
10046 /* Builtins with folding operations that operate on "..." arguments
10047 need special handling; we need to store the arguments in a convenient
10048 data structure before attempting any folding. Fortunately there are
10049 only a few builtins that fall into this category. FNDECL is the
10050 function, EXP is the CALL_EXPR for the call. */
10052 static tree
10053 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10055 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10056 tree ret = NULL_TREE;
10058 switch (fcode)
10060 case BUILT_IN_FPCLASSIFY:
10061 ret = fold_builtin_fpclassify (loc, args, nargs);
10062 break;
10064 default:
10065 break;
10067 if (ret)
10069 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10070 SET_EXPR_LOCATION (ret, loc);
10071 TREE_NO_WARNING (ret) = 1;
10072 return ret;
10074 return NULL_TREE;
10077 /* Initialize format string characters in the target charset. */
10079 bool
10080 init_target_chars (void)
10082 static bool init;
10083 if (!init)
10085 target_newline = lang_hooks.to_target_charset ('\n');
10086 target_percent = lang_hooks.to_target_charset ('%');
10087 target_c = lang_hooks.to_target_charset ('c');
10088 target_s = lang_hooks.to_target_charset ('s');
10089 if (target_newline == 0 || target_percent == 0 || target_c == 0
10090 || target_s == 0)
10091 return false;
10093 target_percent_c[0] = target_percent;
10094 target_percent_c[1] = target_c;
10095 target_percent_c[2] = '\0';
10097 target_percent_s[0] = target_percent;
10098 target_percent_s[1] = target_s;
10099 target_percent_s[2] = '\0';
10101 target_percent_s_newline[0] = target_percent;
10102 target_percent_s_newline[1] = target_s;
10103 target_percent_s_newline[2] = target_newline;
10104 target_percent_s_newline[3] = '\0';
10106 init = true;
10108 return true;
10111 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10112 and no overflow/underflow occurred. INEXACT is true if M was not
10113 exactly calculated. TYPE is the tree type for the result. This
10114 function assumes that you cleared the MPFR flags and then
10115 calculated M to see if anything subsequently set a flag prior to
10116 entering this function. Return NULL_TREE if any checks fail. */
10118 static tree
10119 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10121 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10122 overflow/underflow occurred. If -frounding-math, proceed iff the
10123 result of calling FUNC was exact. */
10124 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10125 && (!flag_rounding_math || !inexact))
10127 REAL_VALUE_TYPE rr;
10129 real_from_mpfr (&rr, m, type, GMP_RNDN);
10130 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10131 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10132 but the mpft_t is not, then we underflowed in the
10133 conversion. */
10134 if (real_isfinite (&rr)
10135 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10137 REAL_VALUE_TYPE rmode;
10139 real_convert (&rmode, TYPE_MODE (type), &rr);
10140 /* Proceed iff the specified mode can hold the value. */
10141 if (real_identical (&rmode, &rr))
10142 return build_real (type, rmode);
10145 return NULL_TREE;
10148 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10149 number and no overflow/underflow occurred. INEXACT is true if M
10150 was not exactly calculated. TYPE is the tree type for the result.
10151 This function assumes that you cleared the MPFR flags and then
10152 calculated M to see if anything subsequently set a flag prior to
10153 entering this function. Return NULL_TREE if any checks fail, if
10154 FORCE_CONVERT is true, then bypass the checks. */
10156 static tree
10157 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10159 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10160 overflow/underflow occurred. If -frounding-math, proceed iff the
10161 result of calling FUNC was exact. */
10162 if (force_convert
10163 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10164 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10165 && (!flag_rounding_math || !inexact)))
10167 REAL_VALUE_TYPE re, im;
10169 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10170 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10171 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10172 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10173 but the mpft_t is not, then we underflowed in the
10174 conversion. */
10175 if (force_convert
10176 || (real_isfinite (&re) && real_isfinite (&im)
10177 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10178 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10180 REAL_VALUE_TYPE re_mode, im_mode;
10182 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10183 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10184 /* Proceed iff the specified mode can hold the value. */
10185 if (force_convert
10186 || (real_identical (&re_mode, &re)
10187 && real_identical (&im_mode, &im)))
10188 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10189 build_real (TREE_TYPE (type), im_mode));
10192 return NULL_TREE;
10195 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10196 the pointer *(ARG_QUO) and return the result. The type is taken
10197 from the type of ARG0 and is used for setting the precision of the
10198 calculation and results. */
10200 static tree
10201 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10203 tree const type = TREE_TYPE (arg0);
10204 tree result = NULL_TREE;
10206 STRIP_NOPS (arg0);
10207 STRIP_NOPS (arg1);
10209 /* To proceed, MPFR must exactly represent the target floating point
10210 format, which only happens when the target base equals two. */
10211 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10212 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10213 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10215 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10216 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10218 if (real_isfinite (ra0) && real_isfinite (ra1))
10220 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10221 const int prec = fmt->p;
10222 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10223 tree result_rem;
10224 long integer_quo;
10225 mpfr_t m0, m1;
10227 mpfr_inits2 (prec, m0, m1, NULL);
10228 mpfr_from_real (m0, ra0, GMP_RNDN);
10229 mpfr_from_real (m1, ra1, GMP_RNDN);
10230 mpfr_clear_flags ();
10231 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10232 /* Remquo is independent of the rounding mode, so pass
10233 inexact=0 to do_mpfr_ckconv(). */
10234 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10235 mpfr_clears (m0, m1, NULL);
10236 if (result_rem)
10238 /* MPFR calculates quo in the host's long so it may
10239 return more bits in quo than the target int can hold
10240 if sizeof(host long) > sizeof(target int). This can
10241 happen even for native compilers in LP64 mode. In
10242 these cases, modulo the quo value with the largest
10243 number that the target int can hold while leaving one
10244 bit for the sign. */
10245 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10246 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10248 /* Dereference the quo pointer argument. */
10249 arg_quo = build_fold_indirect_ref (arg_quo);
10250 /* Proceed iff a valid pointer type was passed in. */
10251 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10253 /* Set the value. */
10254 tree result_quo
10255 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10256 build_int_cst (TREE_TYPE (arg_quo),
10257 integer_quo));
10258 TREE_SIDE_EFFECTS (result_quo) = 1;
10259 /* Combine the quo assignment with the rem. */
10260 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10261 result_quo, result_rem));
10266 return result;
10269 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10270 resulting value as a tree with type TYPE. The mpfr precision is
10271 set to the precision of TYPE. We assume that this mpfr function
10272 returns zero if the result could be calculated exactly within the
10273 requested precision. In addition, the integer pointer represented
10274 by ARG_SG will be dereferenced and set to the appropriate signgam
10275 (-1,1) value. */
10277 static tree
10278 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10280 tree result = NULL_TREE;
10282 STRIP_NOPS (arg);
10284 /* To proceed, MPFR must exactly represent the target floating point
10285 format, which only happens when the target base equals two. Also
10286 verify ARG is a constant and that ARG_SG is an int pointer. */
10287 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10288 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10289 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10290 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10292 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10294 /* In addition to NaN and Inf, the argument cannot be zero or a
10295 negative integer. */
10296 if (real_isfinite (ra)
10297 && ra->cl != rvc_zero
10298 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10300 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10301 const int prec = fmt->p;
10302 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10303 int inexact, sg;
10304 mpfr_t m;
10305 tree result_lg;
10307 mpfr_init2 (m, prec);
10308 mpfr_from_real (m, ra, GMP_RNDN);
10309 mpfr_clear_flags ();
10310 inexact = mpfr_lgamma (m, &sg, m, rnd);
10311 result_lg = do_mpfr_ckconv (m, type, inexact);
10312 mpfr_clear (m);
10313 if (result_lg)
10315 tree result_sg;
10317 /* Dereference the arg_sg pointer argument. */
10318 arg_sg = build_fold_indirect_ref (arg_sg);
10319 /* Assign the signgam value into *arg_sg. */
10320 result_sg = fold_build2 (MODIFY_EXPR,
10321 TREE_TYPE (arg_sg), arg_sg,
10322 build_int_cst (TREE_TYPE (arg_sg), sg));
10323 TREE_SIDE_EFFECTS (result_sg) = 1;
10324 /* Combine the signgam assignment with the lgamma result. */
10325 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10326 result_sg, result_lg));
10331 return result;
10334 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10335 mpc function FUNC on it and return the resulting value as a tree
10336 with type TYPE. The mpfr precision is set to the precision of
10337 TYPE. We assume that function FUNC returns zero if the result
10338 could be calculated exactly within the requested precision. If
10339 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10340 in the arguments and/or results. */
10342 tree
10343 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10344 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10346 tree result = NULL_TREE;
10348 STRIP_NOPS (arg0);
10349 STRIP_NOPS (arg1);
10351 /* To proceed, MPFR must exactly represent the target floating point
10352 format, which only happens when the target base equals two. */
10353 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10354 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10355 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10356 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10357 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10359 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10360 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10361 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10362 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10364 if (do_nonfinite
10365 || (real_isfinite (re0) && real_isfinite (im0)
10366 && real_isfinite (re1) && real_isfinite (im1)))
10368 const struct real_format *const fmt =
10369 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10370 const int prec = fmt->p;
10371 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10372 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10373 int inexact;
10374 mpc_t m0, m1;
10376 mpc_init2 (m0, prec);
10377 mpc_init2 (m1, prec);
10378 mpfr_from_real (mpc_realref (m0), re0, rnd);
10379 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10380 mpfr_from_real (mpc_realref (m1), re1, rnd);
10381 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10382 mpfr_clear_flags ();
10383 inexact = func (m0, m0, m1, crnd);
10384 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10385 mpc_clear (m0);
10386 mpc_clear (m1);
10390 return result;
10393 /* A wrapper function for builtin folding that prevents warnings for
10394 "statement without effect" and the like, caused by removing the
10395 call node earlier than the warning is generated. */
10397 tree
10398 fold_call_stmt (gcall *stmt, bool ignore)
10400 tree ret = NULL_TREE;
10401 tree fndecl = gimple_call_fndecl (stmt);
10402 location_t loc = gimple_location (stmt);
10403 if (fndecl
10404 && TREE_CODE (fndecl) == FUNCTION_DECL
10405 && DECL_BUILT_IN (fndecl)
10406 && !gimple_call_va_arg_pack_p (stmt))
10408 int nargs = gimple_call_num_args (stmt);
10409 tree *args = (nargs > 0
10410 ? gimple_call_arg_ptr (stmt, 0)
10411 : &error_mark_node);
10413 if (avoid_folding_inline_builtin (fndecl))
10414 return NULL_TREE;
10415 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10417 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10419 else
10421 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10422 if (ret)
10424 /* Propagate location information from original call to
10425 expansion of builtin. Otherwise things like
10426 maybe_emit_chk_warning, that operate on the expansion
10427 of a builtin, will use the wrong location information. */
10428 if (gimple_has_location (stmt))
10430 tree realret = ret;
10431 if (TREE_CODE (ret) == NOP_EXPR)
10432 realret = TREE_OPERAND (ret, 0);
10433 if (CAN_HAVE_LOCATION_P (realret)
10434 && !EXPR_HAS_LOCATION (realret))
10435 SET_EXPR_LOCATION (realret, loc);
10436 return realret;
10438 return ret;
10442 return NULL_TREE;
10445 /* Look up the function in builtin_decl that corresponds to DECL
10446 and set ASMSPEC as its user assembler name. DECL must be a
10447 function decl that declares a builtin. */
10449 void
10450 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10452 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10453 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10454 && asmspec != 0);
10456 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10457 set_user_assembler_name (builtin, asmspec);
10459 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10460 && INT_TYPE_SIZE < BITS_PER_WORD)
10462 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
10463 set_user_assembler_libfunc ("ffs", asmspec);
10464 set_optab_libfunc (ffs_optab, mode, "ffs");
10468 /* Return true if DECL is a builtin that expands to a constant or similarly
10469 simple code. */
10470 bool
10471 is_simple_builtin (tree decl)
10473 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10474 switch (DECL_FUNCTION_CODE (decl))
10476 /* Builtins that expand to constants. */
10477 case BUILT_IN_CONSTANT_P:
10478 case BUILT_IN_EXPECT:
10479 case BUILT_IN_OBJECT_SIZE:
10480 case BUILT_IN_UNREACHABLE:
10481 /* Simple register moves or loads from stack. */
10482 case BUILT_IN_ASSUME_ALIGNED:
10483 case BUILT_IN_RETURN_ADDRESS:
10484 case BUILT_IN_EXTRACT_RETURN_ADDR:
10485 case BUILT_IN_FROB_RETURN_ADDR:
10486 case BUILT_IN_RETURN:
10487 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10488 case BUILT_IN_FRAME_ADDRESS:
10489 case BUILT_IN_VA_END:
10490 case BUILT_IN_STACK_SAVE:
10491 case BUILT_IN_STACK_RESTORE:
10492 /* Exception state returns or moves registers around. */
10493 case BUILT_IN_EH_FILTER:
10494 case BUILT_IN_EH_POINTER:
10495 case BUILT_IN_EH_COPY_VALUES:
10496 return true;
10498 default:
10499 return false;
10502 return false;
10505 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10506 most probably expanded inline into reasonably simple code. This is a
10507 superset of is_simple_builtin. */
10508 bool
10509 is_inexpensive_builtin (tree decl)
10511 if (!decl)
10512 return false;
10513 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10514 return true;
10515 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10516 switch (DECL_FUNCTION_CODE (decl))
10518 case BUILT_IN_ABS:
10519 CASE_BUILT_IN_ALLOCA:
10520 case BUILT_IN_BSWAP16:
10521 case BUILT_IN_BSWAP32:
10522 case BUILT_IN_BSWAP64:
10523 case BUILT_IN_CLZ:
10524 case BUILT_IN_CLZIMAX:
10525 case BUILT_IN_CLZL:
10526 case BUILT_IN_CLZLL:
10527 case BUILT_IN_CTZ:
10528 case BUILT_IN_CTZIMAX:
10529 case BUILT_IN_CTZL:
10530 case BUILT_IN_CTZLL:
10531 case BUILT_IN_FFS:
10532 case BUILT_IN_FFSIMAX:
10533 case BUILT_IN_FFSL:
10534 case BUILT_IN_FFSLL:
10535 case BUILT_IN_IMAXABS:
10536 case BUILT_IN_FINITE:
10537 case BUILT_IN_FINITEF:
10538 case BUILT_IN_FINITEL:
10539 case BUILT_IN_FINITED32:
10540 case BUILT_IN_FINITED64:
10541 case BUILT_IN_FINITED128:
10542 case BUILT_IN_FPCLASSIFY:
10543 case BUILT_IN_ISFINITE:
10544 case BUILT_IN_ISINF_SIGN:
10545 case BUILT_IN_ISINF:
10546 case BUILT_IN_ISINFF:
10547 case BUILT_IN_ISINFL:
10548 case BUILT_IN_ISINFD32:
10549 case BUILT_IN_ISINFD64:
10550 case BUILT_IN_ISINFD128:
10551 case BUILT_IN_ISNAN:
10552 case BUILT_IN_ISNANF:
10553 case BUILT_IN_ISNANL:
10554 case BUILT_IN_ISNAND32:
10555 case BUILT_IN_ISNAND64:
10556 case BUILT_IN_ISNAND128:
10557 case BUILT_IN_ISNORMAL:
10558 case BUILT_IN_ISGREATER:
10559 case BUILT_IN_ISGREATEREQUAL:
10560 case BUILT_IN_ISLESS:
10561 case BUILT_IN_ISLESSEQUAL:
10562 case BUILT_IN_ISLESSGREATER:
10563 case BUILT_IN_ISUNORDERED:
10564 case BUILT_IN_VA_ARG_PACK:
10565 case BUILT_IN_VA_ARG_PACK_LEN:
10566 case BUILT_IN_VA_COPY:
10567 case BUILT_IN_TRAP:
10568 case BUILT_IN_SAVEREGS:
10569 case BUILT_IN_POPCOUNTL:
10570 case BUILT_IN_POPCOUNTLL:
10571 case BUILT_IN_POPCOUNTIMAX:
10572 case BUILT_IN_POPCOUNT:
10573 case BUILT_IN_PARITYL:
10574 case BUILT_IN_PARITYLL:
10575 case BUILT_IN_PARITYIMAX:
10576 case BUILT_IN_PARITY:
10577 case BUILT_IN_LABS:
10578 case BUILT_IN_LLABS:
10579 case BUILT_IN_PREFETCH:
10580 case BUILT_IN_ACC_ON_DEVICE:
10581 return true;
10583 default:
10584 return is_simple_builtin (decl);
10587 return false;
10590 /* Return true if T is a constant and the value cast to a target char
10591 can be represented by a host char.
10592 Store the casted char constant in *P if so. */
10594 bool
10595 target_char_cst_p (tree t, char *p)
10597 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
10598 return false;
10600 *p = (char)tree_to_uhwi (t);
10601 return true;