* tree-loop-distribution.c (INCLUDE_ALGORITHM): New header file.
[official-gcc.git] / gcc / builtins.c
blob28fc76b5fc0f547262d1203ca211a9843db3100a
1 /* Expand builtin functions.
2 Copyright (C) 1988-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "stor-layout.h"
47 #include "calls.h"
48 #include "varasm.h"
49 #include "tree-object-size.h"
50 #include "realmpfr.h"
51 #include "cfgrtl.h"
52 #include "except.h"
53 #include "dojump.h"
54 #include "explow.h"
55 #include "stmt.h"
56 #include "expr.h"
57 #include "libfuncs.h"
58 #include "output.h"
59 #include "typeclass.h"
60 #include "langhooks.h"
61 #include "value-prof.h"
62 #include "builtins.h"
63 #include "stringpool.h"
64 #include "attribs.h"
65 #include "asan.h"
66 #include "cilk.h"
67 #include "tree-chkp.h"
68 #include "rtl-chkp.h"
69 #include "internal-fn.h"
70 #include "case-cfn-macros.h"
71 #include "gimple-fold.h"
72 #include "intl.h"
74 struct target_builtins default_target_builtins;
75 #if SWITCHABLE_TARGET
76 struct target_builtins *this_target_builtins = &default_target_builtins;
77 #endif
79 /* Define the names of the builtin function types and codes. */
80 const char *const built_in_class_names[BUILT_IN_LAST]
81 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
83 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
84 const char * built_in_names[(int) END_BUILTINS] =
86 #include "builtins.def"
89 /* Setup an array of builtin_info_type, make sure each element decl is
90 initialized to NULL_TREE. */
91 builtin_info_type builtin_info[(int)END_BUILTINS];
93 /* Non-zero if __builtin_constant_p should be folded right away. */
94 bool force_folding_builtin_constant_p;
96 static rtx c_readstr (const char *, scalar_int_mode);
97 static int target_char_cast (tree, char *);
98 static rtx get_memory_rtx (tree, tree);
99 static int apply_args_size (void);
100 static int apply_result_size (void);
101 static rtx result_vector (int, rtx);
102 static void expand_builtin_prefetch (tree);
103 static rtx expand_builtin_apply_args (void);
104 static rtx expand_builtin_apply_args_1 (void);
105 static rtx expand_builtin_apply (rtx, rtx, rtx);
106 static void expand_builtin_return (rtx);
107 static enum type_class type_to_class (tree);
108 static rtx expand_builtin_classify_type (tree);
109 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
110 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
111 static rtx expand_builtin_interclass_mathfn (tree, rtx);
112 static rtx expand_builtin_sincos (tree);
113 static rtx expand_builtin_cexpi (tree, rtx);
114 static rtx expand_builtin_int_roundingfn (tree, rtx);
115 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
116 static rtx expand_builtin_next_arg (void);
117 static rtx expand_builtin_va_start (tree);
118 static rtx expand_builtin_va_end (tree);
119 static rtx expand_builtin_va_copy (tree);
120 static rtx expand_builtin_strcmp (tree, rtx);
121 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
122 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
123 static rtx expand_builtin_memchr (tree, rtx);
124 static rtx expand_builtin_memcpy (tree, rtx);
125 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
126 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
127 rtx target, tree exp, int endp);
128 static rtx expand_builtin_memmove (tree, rtx);
129 static rtx expand_builtin_mempcpy (tree, rtx);
130 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx);
131 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, int);
132 static rtx expand_builtin_strcat (tree, rtx);
133 static rtx expand_builtin_strcpy (tree, rtx);
134 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
135 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
136 static rtx expand_builtin_stpncpy (tree, rtx);
137 static rtx expand_builtin_strncat (tree, rtx);
138 static rtx expand_builtin_strncpy (tree, rtx);
139 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
140 static rtx expand_builtin_memset (tree, rtx, machine_mode);
141 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
142 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
143 static rtx expand_builtin_bzero (tree);
144 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
145 static rtx expand_builtin_alloca (tree);
146 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
147 static rtx expand_builtin_frame_address (tree, tree);
148 static tree stabilize_va_list_loc (location_t, tree, int);
149 static rtx expand_builtin_expect (tree, rtx);
150 static tree fold_builtin_constant_p (tree);
151 static tree fold_builtin_classify_type (tree);
152 static tree fold_builtin_strlen (location_t, tree, tree);
153 static tree fold_builtin_inf (location_t, tree, int);
154 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
155 static bool validate_arg (const_tree, enum tree_code code);
156 static rtx expand_builtin_fabs (tree, rtx, rtx);
157 static rtx expand_builtin_signbit (tree, rtx);
158 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
159 static tree fold_builtin_isascii (location_t, tree);
160 static tree fold_builtin_toascii (location_t, tree);
161 static tree fold_builtin_isdigit (location_t, tree);
162 static tree fold_builtin_fabs (location_t, tree, tree);
163 static tree fold_builtin_abs (location_t, tree, tree);
164 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
165 enum tree_code);
166 static tree fold_builtin_0 (location_t, tree);
167 static tree fold_builtin_1 (location_t, tree, tree);
168 static tree fold_builtin_2 (location_t, tree, tree, tree);
169 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
170 static tree fold_builtin_varargs (location_t, tree, tree*, int);
172 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
173 static tree fold_builtin_strspn (location_t, tree, tree);
174 static tree fold_builtin_strcspn (location_t, tree, tree);
176 static rtx expand_builtin_object_size (tree);
177 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
178 enum built_in_function);
179 static void maybe_emit_chk_warning (tree, enum built_in_function);
180 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
181 static void maybe_emit_free_warning (tree);
182 static tree fold_builtin_object_size (tree, tree);
184 unsigned HOST_WIDE_INT target_newline;
185 unsigned HOST_WIDE_INT target_percent;
186 static unsigned HOST_WIDE_INT target_c;
187 static unsigned HOST_WIDE_INT target_s;
188 char target_percent_c[3];
189 char target_percent_s[3];
190 char target_percent_s_newline[4];
191 static tree do_mpfr_remquo (tree, tree, tree);
192 static tree do_mpfr_lgamma_r (tree, tree, tree);
193 static void expand_builtin_sync_synchronize (void);
195 /* Return true if NAME starts with __builtin_ or __sync_. */
197 static bool
198 is_builtin_name (const char *name)
200 if (strncmp (name, "__builtin_", 10) == 0)
201 return true;
202 if (strncmp (name, "__sync_", 7) == 0)
203 return true;
204 if (strncmp (name, "__atomic_", 9) == 0)
205 return true;
206 if (flag_cilkplus
207 && (!strcmp (name, "__cilkrts_detach")
208 || !strcmp (name, "__cilkrts_pop_frame")))
209 return true;
210 return false;
214 /* Return true if DECL is a function symbol representing a built-in. */
216 bool
217 is_builtin_fn (tree decl)
219 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
222 /* Return true if NODE should be considered for inline expansion regardless
223 of the optimization level. This means whenever a function is invoked with
224 its "internal" name, which normally contains the prefix "__builtin". */
226 bool
227 called_as_built_in (tree node)
229 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
230 we want the name used to call the function, not the name it
231 will have. */
232 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
233 return is_builtin_name (name);
236 /* Compute values M and N such that M divides (address of EXP - N) and such
237 that N < M. If these numbers can be determined, store M in alignp and N in
238 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
239 *alignp and any bit-offset to *bitposp.
241 Note that the address (and thus the alignment) computed here is based
242 on the address to which a symbol resolves, whereas DECL_ALIGN is based
243 on the address at which an object is actually located. These two
244 addresses are not always the same. For example, on ARM targets,
245 the address &foo of a Thumb function foo() has the lowest bit set,
246 whereas foo() itself starts on an even address.
248 If ADDR_P is true we are taking the address of the memory reference EXP
249 and thus cannot rely on the access taking place. */
251 static bool
252 get_object_alignment_2 (tree exp, unsigned int *alignp,
253 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
255 HOST_WIDE_INT bitsize, bitpos;
256 tree offset;
257 machine_mode mode;
258 int unsignedp, reversep, volatilep;
259 unsigned int align = BITS_PER_UNIT;
260 bool known_alignment = false;
262 /* Get the innermost object and the constant (bitpos) and possibly
263 variable (offset) offset of the access. */
264 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
265 &unsignedp, &reversep, &volatilep);
267 /* Extract alignment information from the innermost object and
268 possibly adjust bitpos and offset. */
269 if (TREE_CODE (exp) == FUNCTION_DECL)
271 /* Function addresses can encode extra information besides their
272 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
273 allows the low bit to be used as a virtual bit, we know
274 that the address itself must be at least 2-byte aligned. */
275 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
276 align = 2 * BITS_PER_UNIT;
278 else if (TREE_CODE (exp) == LABEL_DECL)
280 else if (TREE_CODE (exp) == CONST_DECL)
282 /* The alignment of a CONST_DECL is determined by its initializer. */
283 exp = DECL_INITIAL (exp);
284 align = TYPE_ALIGN (TREE_TYPE (exp));
285 if (CONSTANT_CLASS_P (exp))
286 align = targetm.constant_alignment (exp, align);
288 known_alignment = true;
290 else if (DECL_P (exp))
292 align = DECL_ALIGN (exp);
293 known_alignment = true;
295 else if (TREE_CODE (exp) == INDIRECT_REF
296 || TREE_CODE (exp) == MEM_REF
297 || TREE_CODE (exp) == TARGET_MEM_REF)
299 tree addr = TREE_OPERAND (exp, 0);
300 unsigned ptr_align;
301 unsigned HOST_WIDE_INT ptr_bitpos;
302 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
304 /* If the address is explicitely aligned, handle that. */
305 if (TREE_CODE (addr) == BIT_AND_EXPR
306 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
308 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
309 ptr_bitmask *= BITS_PER_UNIT;
310 align = least_bit_hwi (ptr_bitmask);
311 addr = TREE_OPERAND (addr, 0);
314 known_alignment
315 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
316 align = MAX (ptr_align, align);
318 /* Re-apply explicit alignment to the bitpos. */
319 ptr_bitpos &= ptr_bitmask;
321 /* The alignment of the pointer operand in a TARGET_MEM_REF
322 has to take the variable offset parts into account. */
323 if (TREE_CODE (exp) == TARGET_MEM_REF)
325 if (TMR_INDEX (exp))
327 unsigned HOST_WIDE_INT step = 1;
328 if (TMR_STEP (exp))
329 step = TREE_INT_CST_LOW (TMR_STEP (exp));
330 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
332 if (TMR_INDEX2 (exp))
333 align = BITS_PER_UNIT;
334 known_alignment = false;
337 /* When EXP is an actual memory reference then we can use
338 TYPE_ALIGN of a pointer indirection to derive alignment.
339 Do so only if get_pointer_alignment_1 did not reveal absolute
340 alignment knowledge and if using that alignment would
341 improve the situation. */
342 unsigned int talign;
343 if (!addr_p && !known_alignment
344 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
345 && talign > align)
346 align = talign;
347 else
349 /* Else adjust bitpos accordingly. */
350 bitpos += ptr_bitpos;
351 if (TREE_CODE (exp) == MEM_REF
352 || TREE_CODE (exp) == TARGET_MEM_REF)
353 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
356 else if (TREE_CODE (exp) == STRING_CST)
358 /* STRING_CST are the only constant objects we allow to be not
359 wrapped inside a CONST_DECL. */
360 align = TYPE_ALIGN (TREE_TYPE (exp));
361 if (CONSTANT_CLASS_P (exp))
362 align = targetm.constant_alignment (exp, align);
364 known_alignment = true;
367 /* If there is a non-constant offset part extract the maximum
368 alignment that can prevail. */
369 if (offset)
371 unsigned int trailing_zeros = tree_ctz (offset);
372 if (trailing_zeros < HOST_BITS_PER_INT)
374 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
375 if (inner)
376 align = MIN (align, inner);
380 *alignp = align;
381 *bitposp = bitpos & (*alignp - 1);
382 return known_alignment;
385 /* For a memory reference expression EXP compute values M and N such that M
386 divides (&EXP - N) and such that N < M. If these numbers can be determined,
387 store M in alignp and N in *BITPOSP and return true. Otherwise return false
388 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
390 bool
391 get_object_alignment_1 (tree exp, unsigned int *alignp,
392 unsigned HOST_WIDE_INT *bitposp)
394 return get_object_alignment_2 (exp, alignp, bitposp, false);
397 /* Return the alignment in bits of EXP, an object. */
399 unsigned int
400 get_object_alignment (tree exp)
402 unsigned HOST_WIDE_INT bitpos = 0;
403 unsigned int align;
405 get_object_alignment_1 (exp, &align, &bitpos);
407 /* align and bitpos now specify known low bits of the pointer.
408 ptr & (align - 1) == bitpos. */
410 if (bitpos != 0)
411 align = least_bit_hwi (bitpos);
412 return align;
415 /* For a pointer valued expression EXP compute values M and N such that M
416 divides (EXP - N) and such that N < M. If these numbers can be determined,
417 store M in alignp and N in *BITPOSP and return true. Return false if
418 the results are just a conservative approximation.
420 If EXP is not a pointer, false is returned too. */
422 bool
423 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
424 unsigned HOST_WIDE_INT *bitposp)
426 STRIP_NOPS (exp);
428 if (TREE_CODE (exp) == ADDR_EXPR)
429 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
430 alignp, bitposp, true);
431 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
433 unsigned int align;
434 unsigned HOST_WIDE_INT bitpos;
435 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
436 &align, &bitpos);
437 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
438 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
439 else
441 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
442 if (trailing_zeros < HOST_BITS_PER_INT)
444 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
445 if (inner)
446 align = MIN (align, inner);
449 *alignp = align;
450 *bitposp = bitpos & (align - 1);
451 return res;
453 else if (TREE_CODE (exp) == SSA_NAME
454 && POINTER_TYPE_P (TREE_TYPE (exp)))
456 unsigned int ptr_align, ptr_misalign;
457 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
459 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
461 *bitposp = ptr_misalign * BITS_PER_UNIT;
462 *alignp = ptr_align * BITS_PER_UNIT;
463 /* Make sure to return a sensible alignment when the multiplication
464 by BITS_PER_UNIT overflowed. */
465 if (*alignp == 0)
466 *alignp = 1u << (HOST_BITS_PER_INT - 1);
467 /* We cannot really tell whether this result is an approximation. */
468 return false;
470 else
472 *bitposp = 0;
473 *alignp = BITS_PER_UNIT;
474 return false;
477 else if (TREE_CODE (exp) == INTEGER_CST)
479 *alignp = BIGGEST_ALIGNMENT;
480 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
481 & (BIGGEST_ALIGNMENT - 1));
482 return true;
485 *bitposp = 0;
486 *alignp = BITS_PER_UNIT;
487 return false;
490 /* Return the alignment in bits of EXP, a pointer valued expression.
491 The alignment returned is, by default, the alignment of the thing that
492 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
494 Otherwise, look at the expression to see if we can do better, i.e., if the
495 expression is actually pointing at an object whose alignment is tighter. */
497 unsigned int
498 get_pointer_alignment (tree exp)
500 unsigned HOST_WIDE_INT bitpos = 0;
501 unsigned int align;
503 get_pointer_alignment_1 (exp, &align, &bitpos);
505 /* align and bitpos now specify known low bits of the pointer.
506 ptr & (align - 1) == bitpos. */
508 if (bitpos != 0)
509 align = least_bit_hwi (bitpos);
511 return align;
514 /* Return the number of non-zero elements in the sequence
515 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
516 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
518 static unsigned
519 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
521 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
523 unsigned n;
525 if (eltsize == 1)
527 /* Optimize the common case of plain char. */
528 for (n = 0; n < maxelts; n++)
530 const char *elt = (const char*) ptr + n;
531 if (!*elt)
532 break;
535 else
537 for (n = 0; n < maxelts; n++)
539 const char *elt = (const char*) ptr + n * eltsize;
540 if (!memcmp (elt, "\0\0\0\0", eltsize))
541 break;
544 return n;
547 /* Compute the length of a null-terminated character string or wide
548 character string handling character sizes of 1, 2, and 4 bytes.
549 TREE_STRING_LENGTH is not the right way because it evaluates to
550 the size of the character array in bytes (as opposed to characters)
551 and because it can contain a zero byte in the middle.
553 ONLY_VALUE should be nonzero if the result is not going to be emitted
554 into the instruction stream and zero if it is going to be expanded.
555 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
556 is returned, otherwise NULL, since
557 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
558 evaluate the side-effects.
560 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
561 accesses. Note that this implies the result is not going to be emitted
562 into the instruction stream.
564 The value returned is of type `ssizetype'.
566 Unfortunately, string_constant can't access the values of const char
567 arrays with initializers, so neither can we do so here. */
569 tree
570 c_strlen (tree src, int only_value)
572 STRIP_NOPS (src);
573 if (TREE_CODE (src) == COND_EXPR
574 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
576 tree len1, len2;
578 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
579 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
580 if (tree_int_cst_equal (len1, len2))
581 return len1;
584 if (TREE_CODE (src) == COMPOUND_EXPR
585 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
586 return c_strlen (TREE_OPERAND (src, 1), only_value);
588 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
590 /* Offset from the beginning of the string in bytes. */
591 tree byteoff;
592 src = string_constant (src, &byteoff);
593 if (src == 0)
594 return NULL_TREE;
596 /* Determine the size of the string element. */
597 unsigned eltsize
598 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src))));
600 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
601 length of SRC. */
602 unsigned maxelts = TREE_STRING_LENGTH (src) / eltsize - 1;
604 /* PTR can point to the byte representation of any string type, including
605 char* and wchar_t*. */
606 const char *ptr = TREE_STRING_POINTER (src);
608 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
610 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
611 compute the offset to the following null if we don't know where to
612 start searching for it. */
613 if (string_length (ptr, eltsize, maxelts) < maxelts)
615 /* Return when an embedded null character is found. */
616 return NULL_TREE;
619 /* We don't know the starting offset, but we do know that the string
620 has no internal zero bytes. We can assume that the offset falls
621 within the bounds of the string; otherwise, the programmer deserves
622 what he gets. Subtract the offset from the length of the string,
623 and return that. This would perhaps not be valid if we were dealing
624 with named arrays in addition to literal string constants. */
626 return size_diffop_loc (loc, size_int (maxelts * eltsize), byteoff);
629 /* Offset from the beginning of the string in elements. */
630 HOST_WIDE_INT eltoff;
632 /* We have a known offset into the string. Start searching there for
633 a null character if we can represent it as a single HOST_WIDE_INT. */
634 if (byteoff == 0)
635 eltoff = 0;
636 else if (! tree_fits_shwi_p (byteoff))
637 eltoff = -1;
638 else
639 eltoff = tree_to_shwi (byteoff) / eltsize;
641 /* If the offset is known to be out of bounds, warn, and call strlen at
642 runtime. */
643 if (eltoff < 0 || eltoff > maxelts)
645 /* Suppress multiple warnings for propagated constant strings. */
646 if (only_value != 2
647 && !TREE_NO_WARNING (src))
649 warning_at (loc, 0, "offset %qwi outside bounds of constant string",
650 eltoff);
651 TREE_NO_WARNING (src) = 1;
653 return NULL_TREE;
656 /* Use strlen to search for the first zero byte. Since any strings
657 constructed with build_string will have nulls appended, we win even
658 if we get handed something like (char[4])"abcd".
660 Since ELTOFF is our starting index into the string, no further
661 calculation is needed. */
662 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
663 maxelts - eltoff);
665 return ssize_int (len);
668 /* Return a constant integer corresponding to target reading
669 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
671 static rtx
672 c_readstr (const char *str, scalar_int_mode mode)
674 HOST_WIDE_INT ch;
675 unsigned int i, j;
676 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
678 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
679 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
680 / HOST_BITS_PER_WIDE_INT;
682 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
683 for (i = 0; i < len; i++)
684 tmp[i] = 0;
686 ch = 1;
687 for (i = 0; i < GET_MODE_SIZE (mode); i++)
689 j = i;
690 if (WORDS_BIG_ENDIAN)
691 j = GET_MODE_SIZE (mode) - i - 1;
692 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
693 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
694 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
695 j *= BITS_PER_UNIT;
697 if (ch)
698 ch = (unsigned char) str[i];
699 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
702 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
703 return immed_wide_int_const (c, mode);
706 /* Cast a target constant CST to target CHAR and if that value fits into
707 host char type, return zero and put that value into variable pointed to by
708 P. */
710 static int
711 target_char_cast (tree cst, char *p)
713 unsigned HOST_WIDE_INT val, hostval;
715 if (TREE_CODE (cst) != INTEGER_CST
716 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
717 return 1;
719 /* Do not care if it fits or not right here. */
720 val = TREE_INT_CST_LOW (cst);
722 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
723 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
725 hostval = val;
726 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
727 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
729 if (val != hostval)
730 return 1;
732 *p = hostval;
733 return 0;
736 /* Similar to save_expr, but assumes that arbitrary code is not executed
737 in between the multiple evaluations. In particular, we assume that a
738 non-addressable local variable will not be modified. */
740 static tree
741 builtin_save_expr (tree exp)
743 if (TREE_CODE (exp) == SSA_NAME
744 || (TREE_ADDRESSABLE (exp) == 0
745 && (TREE_CODE (exp) == PARM_DECL
746 || (VAR_P (exp) && !TREE_STATIC (exp)))))
747 return exp;
749 return save_expr (exp);
752 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
753 times to get the address of either a higher stack frame, or a return
754 address located within it (depending on FNDECL_CODE). */
756 static rtx
757 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
759 int i;
760 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
761 if (tem == NULL_RTX)
763 /* For a zero count with __builtin_return_address, we don't care what
764 frame address we return, because target-specific definitions will
765 override us. Therefore frame pointer elimination is OK, and using
766 the soft frame pointer is OK.
768 For a nonzero count, or a zero count with __builtin_frame_address,
769 we require a stable offset from the current frame pointer to the
770 previous one, so we must use the hard frame pointer, and
771 we must disable frame pointer elimination. */
772 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
773 tem = frame_pointer_rtx;
774 else
776 tem = hard_frame_pointer_rtx;
778 /* Tell reload not to eliminate the frame pointer. */
779 crtl->accesses_prior_frames = 1;
783 if (count > 0)
784 SETUP_FRAME_ADDRESSES ();
786 /* On the SPARC, the return address is not in the frame, it is in a
787 register. There is no way to access it off of the current frame
788 pointer, but it can be accessed off the previous frame pointer by
789 reading the value from the register window save area. */
790 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
791 count--;
793 /* Scan back COUNT frames to the specified frame. */
794 for (i = 0; i < count; i++)
796 /* Assume the dynamic chain pointer is in the word that the
797 frame address points to, unless otherwise specified. */
798 tem = DYNAMIC_CHAIN_ADDRESS (tem);
799 tem = memory_address (Pmode, tem);
800 tem = gen_frame_mem (Pmode, tem);
801 tem = copy_to_reg (tem);
804 /* For __builtin_frame_address, return what we've got. But, on
805 the SPARC for example, we may have to add a bias. */
806 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
807 return FRAME_ADDR_RTX (tem);
809 /* For __builtin_return_address, get the return address from that frame. */
810 #ifdef RETURN_ADDR_RTX
811 tem = RETURN_ADDR_RTX (count, tem);
812 #else
813 tem = memory_address (Pmode,
814 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
815 tem = gen_frame_mem (Pmode, tem);
816 #endif
817 return tem;
820 /* Alias set used for setjmp buffer. */
821 static alias_set_type setjmp_alias_set = -1;
823 /* Construct the leading half of a __builtin_setjmp call. Control will
824 return to RECEIVER_LABEL. This is also called directly by the SJLJ
825 exception handling code. */
827 void
828 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
830 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
831 rtx stack_save;
832 rtx mem;
834 if (setjmp_alias_set == -1)
835 setjmp_alias_set = new_alias_set ();
837 buf_addr = convert_memory_address (Pmode, buf_addr);
839 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
841 /* We store the frame pointer and the address of receiver_label in
842 the buffer and use the rest of it for the stack save area, which
843 is machine-dependent. */
845 mem = gen_rtx_MEM (Pmode, buf_addr);
846 set_mem_alias_set (mem, setjmp_alias_set);
847 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
849 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
850 GET_MODE_SIZE (Pmode))),
851 set_mem_alias_set (mem, setjmp_alias_set);
853 emit_move_insn (validize_mem (mem),
854 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
856 stack_save = gen_rtx_MEM (sa_mode,
857 plus_constant (Pmode, buf_addr,
858 2 * GET_MODE_SIZE (Pmode)));
859 set_mem_alias_set (stack_save, setjmp_alias_set);
860 emit_stack_save (SAVE_NONLOCAL, &stack_save);
862 /* If there is further processing to do, do it. */
863 if (targetm.have_builtin_setjmp_setup ())
864 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
866 /* We have a nonlocal label. */
867 cfun->has_nonlocal_label = 1;
870 /* Construct the trailing part of a __builtin_setjmp call. This is
871 also called directly by the SJLJ exception handling code.
872 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
874 void
875 expand_builtin_setjmp_receiver (rtx receiver_label)
877 rtx chain;
879 /* Mark the FP as used when we get here, so we have to make sure it's
880 marked as used by this function. */
881 emit_use (hard_frame_pointer_rtx);
883 /* Mark the static chain as clobbered here so life information
884 doesn't get messed up for it. */
885 chain = targetm.calls.static_chain (current_function_decl, true);
886 if (chain && REG_P (chain))
887 emit_clobber (chain);
889 /* Now put in the code to restore the frame pointer, and argument
890 pointer, if needed. */
891 if (! targetm.have_nonlocal_goto ())
893 /* First adjust our frame pointer to its actual value. It was
894 previously set to the start of the virtual area corresponding to
895 the stacked variables when we branched here and now needs to be
896 adjusted to the actual hardware fp value.
898 Assignments to virtual registers are converted by
899 instantiate_virtual_regs into the corresponding assignment
900 to the underlying register (fp in this case) that makes
901 the original assignment true.
902 So the following insn will actually be decrementing fp by
903 STARTING_FRAME_OFFSET. */
904 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
906 /* Restoring the frame pointer also modifies the hard frame pointer.
907 Mark it used (so that the previous assignment remains live once
908 the frame pointer is eliminated) and clobbered (to represent the
909 implicit update from the assignment). */
910 emit_use (hard_frame_pointer_rtx);
911 emit_clobber (hard_frame_pointer_rtx);
914 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
916 /* If the argument pointer can be eliminated in favor of the
917 frame pointer, we don't need to restore it. We assume here
918 that if such an elimination is present, it can always be used.
919 This is the case on all known machines; if we don't make this
920 assumption, we do unnecessary saving on many machines. */
921 size_t i;
922 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
924 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
925 if (elim_regs[i].from == ARG_POINTER_REGNUM
926 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
927 break;
929 if (i == ARRAY_SIZE (elim_regs))
931 /* Now restore our arg pointer from the address at which it
932 was saved in our stack frame. */
933 emit_move_insn (crtl->args.internal_arg_pointer,
934 copy_to_reg (get_arg_pointer_save_area ()));
938 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
939 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
940 else if (targetm.have_nonlocal_goto_receiver ())
941 emit_insn (targetm.gen_nonlocal_goto_receiver ());
942 else
943 { /* Nothing */ }
945 /* We must not allow the code we just generated to be reordered by
946 scheduling. Specifically, the update of the frame pointer must
947 happen immediately, not later. */
948 emit_insn (gen_blockage ());
951 /* __builtin_longjmp is passed a pointer to an array of five words (not
952 all will be used on all machines). It operates similarly to the C
953 library function of the same name, but is more efficient. Much of
954 the code below is copied from the handling of non-local gotos. */
956 static void
957 expand_builtin_longjmp (rtx buf_addr, rtx value)
959 rtx fp, lab, stack;
960 rtx_insn *insn, *last;
961 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
963 /* DRAP is needed for stack realign if longjmp is expanded to current
964 function */
965 if (SUPPORTS_STACK_ALIGNMENT)
966 crtl->need_drap = true;
968 if (setjmp_alias_set == -1)
969 setjmp_alias_set = new_alias_set ();
971 buf_addr = convert_memory_address (Pmode, buf_addr);
973 buf_addr = force_reg (Pmode, buf_addr);
975 /* We require that the user must pass a second argument of 1, because
976 that is what builtin_setjmp will return. */
977 gcc_assert (value == const1_rtx);
979 last = get_last_insn ();
980 if (targetm.have_builtin_longjmp ())
981 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
982 else
984 fp = gen_rtx_MEM (Pmode, buf_addr);
985 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
986 GET_MODE_SIZE (Pmode)));
988 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
989 2 * GET_MODE_SIZE (Pmode)));
990 set_mem_alias_set (fp, setjmp_alias_set);
991 set_mem_alias_set (lab, setjmp_alias_set);
992 set_mem_alias_set (stack, setjmp_alias_set);
994 /* Pick up FP, label, and SP from the block and jump. This code is
995 from expand_goto in stmt.c; see there for detailed comments. */
996 if (targetm.have_nonlocal_goto ())
997 /* We have to pass a value to the nonlocal_goto pattern that will
998 get copied into the static_chain pointer, but it does not matter
999 what that value is, because builtin_setjmp does not use it. */
1000 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1001 else
1003 lab = copy_to_reg (lab);
1005 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1006 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1008 emit_move_insn (hard_frame_pointer_rtx, fp);
1009 emit_stack_restore (SAVE_NONLOCAL, stack);
1011 emit_use (hard_frame_pointer_rtx);
1012 emit_use (stack_pointer_rtx);
1013 emit_indirect_jump (lab);
1017 /* Search backwards and mark the jump insn as a non-local goto.
1018 Note that this precludes the use of __builtin_longjmp to a
1019 __builtin_setjmp target in the same function. However, we've
1020 already cautioned the user that these functions are for
1021 internal exception handling use only. */
1022 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1024 gcc_assert (insn != last);
1026 if (JUMP_P (insn))
1028 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1029 break;
1031 else if (CALL_P (insn))
1032 break;
1036 static inline bool
1037 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1039 return (iter->i < iter->n);
1042 /* This function validates the types of a function call argument list
1043 against a specified list of tree_codes. If the last specifier is a 0,
1044 that represents an ellipsis, otherwise the last specifier must be a
1045 VOID_TYPE. */
1047 static bool
1048 validate_arglist (const_tree callexpr, ...)
1050 enum tree_code code;
1051 bool res = 0;
1052 va_list ap;
1053 const_call_expr_arg_iterator iter;
1054 const_tree arg;
1056 va_start (ap, callexpr);
1057 init_const_call_expr_arg_iterator (callexpr, &iter);
1059 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1060 tree fn = CALL_EXPR_FN (callexpr);
1061 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1063 for (unsigned argno = 1; ; ++argno)
1065 code = (enum tree_code) va_arg (ap, int);
1067 switch (code)
1069 case 0:
1070 /* This signifies an ellipses, any further arguments are all ok. */
1071 res = true;
1072 goto end;
1073 case VOID_TYPE:
1074 /* This signifies an endlink, if no arguments remain, return
1075 true, otherwise return false. */
1076 res = !more_const_call_expr_args_p (&iter);
1077 goto end;
1078 case POINTER_TYPE:
1079 /* The actual argument must be nonnull when either the whole
1080 called function has been declared nonnull, or when the formal
1081 argument corresponding to the actual argument has been. */
1082 if (argmap
1083 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1085 arg = next_const_call_expr_arg (&iter);
1086 if (!validate_arg (arg, code) || integer_zerop (arg))
1087 goto end;
1088 break;
1090 /* FALLTHRU */
1091 default:
1092 /* If no parameters remain or the parameter's code does not
1093 match the specified code, return false. Otherwise continue
1094 checking any remaining arguments. */
1095 arg = next_const_call_expr_arg (&iter);
1096 if (!validate_arg (arg, code))
1097 goto end;
1098 break;
1102 /* We need gotos here since we can only have one VA_CLOSE in a
1103 function. */
1104 end: ;
1105 va_end (ap);
1107 BITMAP_FREE (argmap);
1109 return res;
1112 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1113 and the address of the save area. */
1115 static rtx
1116 expand_builtin_nonlocal_goto (tree exp)
1118 tree t_label, t_save_area;
1119 rtx r_label, r_save_area, r_fp, r_sp;
1120 rtx_insn *insn;
1122 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1123 return NULL_RTX;
1125 t_label = CALL_EXPR_ARG (exp, 0);
1126 t_save_area = CALL_EXPR_ARG (exp, 1);
1128 r_label = expand_normal (t_label);
1129 r_label = convert_memory_address (Pmode, r_label);
1130 r_save_area = expand_normal (t_save_area);
1131 r_save_area = convert_memory_address (Pmode, r_save_area);
1132 /* Copy the address of the save location to a register just in case it was
1133 based on the frame pointer. */
1134 r_save_area = copy_to_reg (r_save_area);
1135 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1136 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1137 plus_constant (Pmode, r_save_area,
1138 GET_MODE_SIZE (Pmode)));
1140 crtl->has_nonlocal_goto = 1;
1142 /* ??? We no longer need to pass the static chain value, afaik. */
1143 if (targetm.have_nonlocal_goto ())
1144 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1145 else
1147 r_label = copy_to_reg (r_label);
1149 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1150 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1152 /* Restore frame pointer for containing function. */
1153 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1154 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1156 /* USE of hard_frame_pointer_rtx added for consistency;
1157 not clear if really needed. */
1158 emit_use (hard_frame_pointer_rtx);
1159 emit_use (stack_pointer_rtx);
1161 /* If the architecture is using a GP register, we must
1162 conservatively assume that the target function makes use of it.
1163 The prologue of functions with nonlocal gotos must therefore
1164 initialize the GP register to the appropriate value, and we
1165 must then make sure that this value is live at the point
1166 of the jump. (Note that this doesn't necessarily apply
1167 to targets with a nonlocal_goto pattern; they are free
1168 to implement it in their own way. Note also that this is
1169 a no-op if the GP register is a global invariant.) */
1170 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1171 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1172 emit_use (pic_offset_table_rtx);
1174 emit_indirect_jump (r_label);
1177 /* Search backwards to the jump insn and mark it as a
1178 non-local goto. */
1179 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1181 if (JUMP_P (insn))
1183 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1184 break;
1186 else if (CALL_P (insn))
1187 break;
1190 return const0_rtx;
1193 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1194 (not all will be used on all machines) that was passed to __builtin_setjmp.
1195 It updates the stack pointer in that block to the current value. This is
1196 also called directly by the SJLJ exception handling code. */
1198 void
1199 expand_builtin_update_setjmp_buf (rtx buf_addr)
1201 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1202 buf_addr = convert_memory_address (Pmode, buf_addr);
1203 rtx stack_save
1204 = gen_rtx_MEM (sa_mode,
1205 memory_address
1206 (sa_mode,
1207 plus_constant (Pmode, buf_addr,
1208 2 * GET_MODE_SIZE (Pmode))));
1210 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1213 /* Expand a call to __builtin_prefetch. For a target that does not support
1214 data prefetch, evaluate the memory address argument in case it has side
1215 effects. */
1217 static void
1218 expand_builtin_prefetch (tree exp)
1220 tree arg0, arg1, arg2;
1221 int nargs;
1222 rtx op0, op1, op2;
1224 if (!validate_arglist (exp, POINTER_TYPE, 0))
1225 return;
1227 arg0 = CALL_EXPR_ARG (exp, 0);
1229 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1230 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1231 locality). */
1232 nargs = call_expr_nargs (exp);
1233 if (nargs > 1)
1234 arg1 = CALL_EXPR_ARG (exp, 1);
1235 else
1236 arg1 = integer_zero_node;
1237 if (nargs > 2)
1238 arg2 = CALL_EXPR_ARG (exp, 2);
1239 else
1240 arg2 = integer_three_node;
1242 /* Argument 0 is an address. */
1243 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1245 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1246 if (TREE_CODE (arg1) != INTEGER_CST)
1248 error ("second argument to %<__builtin_prefetch%> must be a constant");
1249 arg1 = integer_zero_node;
1251 op1 = expand_normal (arg1);
1252 /* Argument 1 must be either zero or one. */
1253 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1255 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1256 " using zero");
1257 op1 = const0_rtx;
1260 /* Argument 2 (locality) must be a compile-time constant int. */
1261 if (TREE_CODE (arg2) != INTEGER_CST)
1263 error ("third argument to %<__builtin_prefetch%> must be a constant");
1264 arg2 = integer_zero_node;
1266 op2 = expand_normal (arg2);
1267 /* Argument 2 must be 0, 1, 2, or 3. */
1268 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1270 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1271 op2 = const0_rtx;
1274 if (targetm.have_prefetch ())
1276 struct expand_operand ops[3];
1278 create_address_operand (&ops[0], op0);
1279 create_integer_operand (&ops[1], INTVAL (op1));
1280 create_integer_operand (&ops[2], INTVAL (op2));
1281 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1282 return;
1285 /* Don't do anything with direct references to volatile memory, but
1286 generate code to handle other side effects. */
1287 if (!MEM_P (op0) && side_effects_p (op0))
1288 emit_insn (op0);
1291 /* Get a MEM rtx for expression EXP which is the address of an operand
1292 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1293 the maximum length of the block of memory that might be accessed or
1294 NULL if unknown. */
1296 static rtx
1297 get_memory_rtx (tree exp, tree len)
1299 tree orig_exp = exp;
1300 rtx addr, mem;
1302 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1303 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1304 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1305 exp = TREE_OPERAND (exp, 0);
1307 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1308 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1310 /* Get an expression we can use to find the attributes to assign to MEM.
1311 First remove any nops. */
1312 while (CONVERT_EXPR_P (exp)
1313 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1314 exp = TREE_OPERAND (exp, 0);
1316 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1317 (as builtin stringops may alias with anything). */
1318 exp = fold_build2 (MEM_REF,
1319 build_array_type (char_type_node,
1320 build_range_type (sizetype,
1321 size_one_node, len)),
1322 exp, build_int_cst (ptr_type_node, 0));
1324 /* If the MEM_REF has no acceptable address, try to get the base object
1325 from the original address we got, and build an all-aliasing
1326 unknown-sized access to that one. */
1327 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1328 set_mem_attributes (mem, exp, 0);
1329 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1330 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1331 0))))
1333 exp = build_fold_addr_expr (exp);
1334 exp = fold_build2 (MEM_REF,
1335 build_array_type (char_type_node,
1336 build_range_type (sizetype,
1337 size_zero_node,
1338 NULL)),
1339 exp, build_int_cst (ptr_type_node, 0));
1340 set_mem_attributes (mem, exp, 0);
1342 set_mem_alias_set (mem, 0);
1343 return mem;
1346 /* Built-in functions to perform an untyped call and return. */
1348 #define apply_args_mode \
1349 (this_target_builtins->x_apply_args_mode)
1350 #define apply_result_mode \
1351 (this_target_builtins->x_apply_result_mode)
1353 /* Return the size required for the block returned by __builtin_apply_args,
1354 and initialize apply_args_mode. */
1356 static int
1357 apply_args_size (void)
1359 static int size = -1;
1360 int align;
1361 unsigned int regno;
1362 machine_mode mode;
1364 /* The values computed by this function never change. */
1365 if (size < 0)
1367 /* The first value is the incoming arg-pointer. */
1368 size = GET_MODE_SIZE (Pmode);
1370 /* The second value is the structure value address unless this is
1371 passed as an "invisible" first argument. */
1372 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1373 size += GET_MODE_SIZE (Pmode);
1375 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1376 if (FUNCTION_ARG_REGNO_P (regno))
1378 mode = targetm.calls.get_raw_arg_mode (regno);
1380 gcc_assert (mode != VOIDmode);
1382 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1383 if (size % align != 0)
1384 size = CEIL (size, align) * align;
1385 size += GET_MODE_SIZE (mode);
1386 apply_args_mode[regno] = mode;
1388 else
1390 apply_args_mode[regno] = VOIDmode;
1393 return size;
1396 /* Return the size required for the block returned by __builtin_apply,
1397 and initialize apply_result_mode. */
1399 static int
1400 apply_result_size (void)
1402 static int size = -1;
1403 int align, regno;
1404 machine_mode mode;
1406 /* The values computed by this function never change. */
1407 if (size < 0)
1409 size = 0;
1411 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1412 if (targetm.calls.function_value_regno_p (regno))
1414 mode = targetm.calls.get_raw_result_mode (regno);
1416 gcc_assert (mode != VOIDmode);
1418 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1419 if (size % align != 0)
1420 size = CEIL (size, align) * align;
1421 size += GET_MODE_SIZE (mode);
1422 apply_result_mode[regno] = mode;
1424 else
1425 apply_result_mode[regno] = VOIDmode;
1427 /* Allow targets that use untyped_call and untyped_return to override
1428 the size so that machine-specific information can be stored here. */
1429 #ifdef APPLY_RESULT_SIZE
1430 size = APPLY_RESULT_SIZE;
1431 #endif
1433 return size;
1436 /* Create a vector describing the result block RESULT. If SAVEP is true,
1437 the result block is used to save the values; otherwise it is used to
1438 restore the values. */
1440 static rtx
1441 result_vector (int savep, rtx result)
1443 int regno, size, align, nelts;
1444 machine_mode mode;
1445 rtx reg, mem;
1446 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1448 size = nelts = 0;
1449 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1450 if ((mode = apply_result_mode[regno]) != VOIDmode)
1452 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1453 if (size % align != 0)
1454 size = CEIL (size, align) * align;
1455 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1456 mem = adjust_address (result, mode, size);
1457 savevec[nelts++] = (savep
1458 ? gen_rtx_SET (mem, reg)
1459 : gen_rtx_SET (reg, mem));
1460 size += GET_MODE_SIZE (mode);
1462 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1465 /* Save the state required to perform an untyped call with the same
1466 arguments as were passed to the current function. */
1468 static rtx
1469 expand_builtin_apply_args_1 (void)
1471 rtx registers, tem;
1472 int size, align, regno;
1473 machine_mode mode;
1474 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1476 /* Create a block where the arg-pointer, structure value address,
1477 and argument registers can be saved. */
1478 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1480 /* Walk past the arg-pointer and structure value address. */
1481 size = GET_MODE_SIZE (Pmode);
1482 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1483 size += GET_MODE_SIZE (Pmode);
1485 /* Save each register used in calling a function to the block. */
1486 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1487 if ((mode = apply_args_mode[regno]) != VOIDmode)
1489 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1490 if (size % align != 0)
1491 size = CEIL (size, align) * align;
1493 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1495 emit_move_insn (adjust_address (registers, mode, size), tem);
1496 size += GET_MODE_SIZE (mode);
1499 /* Save the arg pointer to the block. */
1500 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1501 /* We need the pointer as the caller actually passed them to us, not
1502 as we might have pretended they were passed. Make sure it's a valid
1503 operand, as emit_move_insn isn't expected to handle a PLUS. */
1504 if (STACK_GROWS_DOWNWARD)
1506 = force_operand (plus_constant (Pmode, tem,
1507 crtl->args.pretend_args_size),
1508 NULL_RTX);
1509 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1511 size = GET_MODE_SIZE (Pmode);
1513 /* Save the structure value address unless this is passed as an
1514 "invisible" first argument. */
1515 if (struct_incoming_value)
1517 emit_move_insn (adjust_address (registers, Pmode, size),
1518 copy_to_reg (struct_incoming_value));
1519 size += GET_MODE_SIZE (Pmode);
1522 /* Return the address of the block. */
1523 return copy_addr_to_reg (XEXP (registers, 0));
1526 /* __builtin_apply_args returns block of memory allocated on
1527 the stack into which is stored the arg pointer, structure
1528 value address, static chain, and all the registers that might
1529 possibly be used in performing a function call. The code is
1530 moved to the start of the function so the incoming values are
1531 saved. */
1533 static rtx
1534 expand_builtin_apply_args (void)
1536 /* Don't do __builtin_apply_args more than once in a function.
1537 Save the result of the first call and reuse it. */
1538 if (apply_args_value != 0)
1539 return apply_args_value;
1541 /* When this function is called, it means that registers must be
1542 saved on entry to this function. So we migrate the
1543 call to the first insn of this function. */
1544 rtx temp;
1546 start_sequence ();
1547 temp = expand_builtin_apply_args_1 ();
1548 rtx_insn *seq = get_insns ();
1549 end_sequence ();
1551 apply_args_value = temp;
1553 /* Put the insns after the NOTE that starts the function.
1554 If this is inside a start_sequence, make the outer-level insn
1555 chain current, so the code is placed at the start of the
1556 function. If internal_arg_pointer is a non-virtual pseudo,
1557 it needs to be placed after the function that initializes
1558 that pseudo. */
1559 push_topmost_sequence ();
1560 if (REG_P (crtl->args.internal_arg_pointer)
1561 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1562 emit_insn_before (seq, parm_birth_insn);
1563 else
1564 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1565 pop_topmost_sequence ();
1566 return temp;
1570 /* Perform an untyped call and save the state required to perform an
1571 untyped return of whatever value was returned by the given function. */
1573 static rtx
1574 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1576 int size, align, regno;
1577 machine_mode mode;
1578 rtx incoming_args, result, reg, dest, src;
1579 rtx_call_insn *call_insn;
1580 rtx old_stack_level = 0;
1581 rtx call_fusage = 0;
1582 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1584 arguments = convert_memory_address (Pmode, arguments);
1586 /* Create a block where the return registers can be saved. */
1587 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1589 /* Fetch the arg pointer from the ARGUMENTS block. */
1590 incoming_args = gen_reg_rtx (Pmode);
1591 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1592 if (!STACK_GROWS_DOWNWARD)
1593 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1594 incoming_args, 0, OPTAB_LIB_WIDEN);
1596 /* Push a new argument block and copy the arguments. Do not allow
1597 the (potential) memcpy call below to interfere with our stack
1598 manipulations. */
1599 do_pending_stack_adjust ();
1600 NO_DEFER_POP;
1602 /* Save the stack with nonlocal if available. */
1603 if (targetm.have_save_stack_nonlocal ())
1604 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1605 else
1606 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1608 /* Allocate a block of memory onto the stack and copy the memory
1609 arguments to the outgoing arguments address. We can pass TRUE
1610 as the 4th argument because we just saved the stack pointer
1611 and will restore it right after the call. */
1612 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1614 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1615 may have already set current_function_calls_alloca to true.
1616 current_function_calls_alloca won't be set if argsize is zero,
1617 so we have to guarantee need_drap is true here. */
1618 if (SUPPORTS_STACK_ALIGNMENT)
1619 crtl->need_drap = true;
1621 dest = virtual_outgoing_args_rtx;
1622 if (!STACK_GROWS_DOWNWARD)
1624 if (CONST_INT_P (argsize))
1625 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1626 else
1627 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1629 dest = gen_rtx_MEM (BLKmode, dest);
1630 set_mem_align (dest, PARM_BOUNDARY);
1631 src = gen_rtx_MEM (BLKmode, incoming_args);
1632 set_mem_align (src, PARM_BOUNDARY);
1633 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1635 /* Refer to the argument block. */
1636 apply_args_size ();
1637 arguments = gen_rtx_MEM (BLKmode, arguments);
1638 set_mem_align (arguments, PARM_BOUNDARY);
1640 /* Walk past the arg-pointer and structure value address. */
1641 size = GET_MODE_SIZE (Pmode);
1642 if (struct_value)
1643 size += GET_MODE_SIZE (Pmode);
1645 /* Restore each of the registers previously saved. Make USE insns
1646 for each of these registers for use in making the call. */
1647 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1648 if ((mode = apply_args_mode[regno]) != VOIDmode)
1650 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1651 if (size % align != 0)
1652 size = CEIL (size, align) * align;
1653 reg = gen_rtx_REG (mode, regno);
1654 emit_move_insn (reg, adjust_address (arguments, mode, size));
1655 use_reg (&call_fusage, reg);
1656 size += GET_MODE_SIZE (mode);
1659 /* Restore the structure value address unless this is passed as an
1660 "invisible" first argument. */
1661 size = GET_MODE_SIZE (Pmode);
1662 if (struct_value)
1664 rtx value = gen_reg_rtx (Pmode);
1665 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1666 emit_move_insn (struct_value, value);
1667 if (REG_P (struct_value))
1668 use_reg (&call_fusage, struct_value);
1669 size += GET_MODE_SIZE (Pmode);
1672 /* All arguments and registers used for the call are set up by now! */
1673 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1675 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1676 and we don't want to load it into a register as an optimization,
1677 because prepare_call_address already did it if it should be done. */
1678 if (GET_CODE (function) != SYMBOL_REF)
1679 function = memory_address (FUNCTION_MODE, function);
1681 /* Generate the actual call instruction and save the return value. */
1682 if (targetm.have_untyped_call ())
1684 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1685 emit_call_insn (targetm.gen_untyped_call (mem, result,
1686 result_vector (1, result)));
1688 else if (targetm.have_call_value ())
1690 rtx valreg = 0;
1692 /* Locate the unique return register. It is not possible to
1693 express a call that sets more than one return register using
1694 call_value; use untyped_call for that. In fact, untyped_call
1695 only needs to save the return registers in the given block. */
1696 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1697 if ((mode = apply_result_mode[regno]) != VOIDmode)
1699 gcc_assert (!valreg); /* have_untyped_call required. */
1701 valreg = gen_rtx_REG (mode, regno);
1704 emit_insn (targetm.gen_call_value (valreg,
1705 gen_rtx_MEM (FUNCTION_MODE, function),
1706 const0_rtx, NULL_RTX, const0_rtx));
1708 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1710 else
1711 gcc_unreachable ();
1713 /* Find the CALL insn we just emitted, and attach the register usage
1714 information. */
1715 call_insn = last_call_insn ();
1716 add_function_usage_to (call_insn, call_fusage);
1718 /* Restore the stack. */
1719 if (targetm.have_save_stack_nonlocal ())
1720 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1721 else
1722 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1723 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1725 OK_DEFER_POP;
1727 /* Return the address of the result block. */
1728 result = copy_addr_to_reg (XEXP (result, 0));
1729 return convert_memory_address (ptr_mode, result);
1732 /* Perform an untyped return. */
1734 static void
1735 expand_builtin_return (rtx result)
1737 int size, align, regno;
1738 machine_mode mode;
1739 rtx reg;
1740 rtx_insn *call_fusage = 0;
1742 result = convert_memory_address (Pmode, result);
1744 apply_result_size ();
1745 result = gen_rtx_MEM (BLKmode, result);
1747 if (targetm.have_untyped_return ())
1749 rtx vector = result_vector (0, result);
1750 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1751 emit_barrier ();
1752 return;
1755 /* Restore the return value and note that each value is used. */
1756 size = 0;
1757 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1758 if ((mode = apply_result_mode[regno]) != VOIDmode)
1760 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1761 if (size % align != 0)
1762 size = CEIL (size, align) * align;
1763 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1764 emit_move_insn (reg, adjust_address (result, mode, size));
1766 push_to_sequence (call_fusage);
1767 emit_use (reg);
1768 call_fusage = get_insns ();
1769 end_sequence ();
1770 size += GET_MODE_SIZE (mode);
1773 /* Put the USE insns before the return. */
1774 emit_insn (call_fusage);
1776 /* Return whatever values was restored by jumping directly to the end
1777 of the function. */
1778 expand_naked_return ();
1781 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1783 static enum type_class
1784 type_to_class (tree type)
1786 switch (TREE_CODE (type))
1788 case VOID_TYPE: return void_type_class;
1789 case INTEGER_TYPE: return integer_type_class;
1790 case ENUMERAL_TYPE: return enumeral_type_class;
1791 case BOOLEAN_TYPE: return boolean_type_class;
1792 case POINTER_TYPE: return pointer_type_class;
1793 case REFERENCE_TYPE: return reference_type_class;
1794 case OFFSET_TYPE: return offset_type_class;
1795 case REAL_TYPE: return real_type_class;
1796 case COMPLEX_TYPE: return complex_type_class;
1797 case FUNCTION_TYPE: return function_type_class;
1798 case METHOD_TYPE: return method_type_class;
1799 case RECORD_TYPE: return record_type_class;
1800 case UNION_TYPE:
1801 case QUAL_UNION_TYPE: return union_type_class;
1802 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1803 ? string_type_class : array_type_class);
1804 case LANG_TYPE: return lang_type_class;
1805 default: return no_type_class;
1809 /* Expand a call EXP to __builtin_classify_type. */
1811 static rtx
1812 expand_builtin_classify_type (tree exp)
1814 if (call_expr_nargs (exp))
1815 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1816 return GEN_INT (no_type_class);
1819 /* This helper macro, meant to be used in mathfn_built_in below,
1820 determines which among a set of three builtin math functions is
1821 appropriate for a given type mode. The `F' and `L' cases are
1822 automatically generated from the `double' case. */
1823 #define CASE_MATHFN(MATHFN) \
1824 CASE_CFN_##MATHFN: \
1825 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1826 fcodel = BUILT_IN_##MATHFN##L ; break;
1827 /* Similar to above, but appends _R after any F/L suffix. */
1828 #define CASE_MATHFN_REENT(MATHFN) \
1829 case CFN_BUILT_IN_##MATHFN##_R: \
1830 case CFN_BUILT_IN_##MATHFN##F_R: \
1831 case CFN_BUILT_IN_##MATHFN##L_R: \
1832 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1833 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1835 /* Return a function equivalent to FN but operating on floating-point
1836 values of type TYPE, or END_BUILTINS if no such function exists.
1837 This is purely an operation on function codes; it does not guarantee
1838 that the target actually has an implementation of the function. */
1840 static built_in_function
1841 mathfn_built_in_2 (tree type, combined_fn fn)
1843 built_in_function fcode, fcodef, fcodel;
1845 switch (fn)
1847 CASE_MATHFN (ACOS)
1848 CASE_MATHFN (ACOSH)
1849 CASE_MATHFN (ASIN)
1850 CASE_MATHFN (ASINH)
1851 CASE_MATHFN (ATAN)
1852 CASE_MATHFN (ATAN2)
1853 CASE_MATHFN (ATANH)
1854 CASE_MATHFN (CBRT)
1855 CASE_MATHFN (CEIL)
1856 CASE_MATHFN (CEXPI)
1857 CASE_MATHFN (COPYSIGN)
1858 CASE_MATHFN (COS)
1859 CASE_MATHFN (COSH)
1860 CASE_MATHFN (DREM)
1861 CASE_MATHFN (ERF)
1862 CASE_MATHFN (ERFC)
1863 CASE_MATHFN (EXP)
1864 CASE_MATHFN (EXP10)
1865 CASE_MATHFN (EXP2)
1866 CASE_MATHFN (EXPM1)
1867 CASE_MATHFN (FABS)
1868 CASE_MATHFN (FDIM)
1869 CASE_MATHFN (FLOOR)
1870 CASE_MATHFN (FMA)
1871 CASE_MATHFN (FMAX)
1872 CASE_MATHFN (FMIN)
1873 CASE_MATHFN (FMOD)
1874 CASE_MATHFN (FREXP)
1875 CASE_MATHFN (GAMMA)
1876 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1877 CASE_MATHFN (HUGE_VAL)
1878 CASE_MATHFN (HYPOT)
1879 CASE_MATHFN (ILOGB)
1880 CASE_MATHFN (ICEIL)
1881 CASE_MATHFN (IFLOOR)
1882 CASE_MATHFN (INF)
1883 CASE_MATHFN (IRINT)
1884 CASE_MATHFN (IROUND)
1885 CASE_MATHFN (ISINF)
1886 CASE_MATHFN (J0)
1887 CASE_MATHFN (J1)
1888 CASE_MATHFN (JN)
1889 CASE_MATHFN (LCEIL)
1890 CASE_MATHFN (LDEXP)
1891 CASE_MATHFN (LFLOOR)
1892 CASE_MATHFN (LGAMMA)
1893 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1894 CASE_MATHFN (LLCEIL)
1895 CASE_MATHFN (LLFLOOR)
1896 CASE_MATHFN (LLRINT)
1897 CASE_MATHFN (LLROUND)
1898 CASE_MATHFN (LOG)
1899 CASE_MATHFN (LOG10)
1900 CASE_MATHFN (LOG1P)
1901 CASE_MATHFN (LOG2)
1902 CASE_MATHFN (LOGB)
1903 CASE_MATHFN (LRINT)
1904 CASE_MATHFN (LROUND)
1905 CASE_MATHFN (MODF)
1906 CASE_MATHFN (NAN)
1907 CASE_MATHFN (NANS)
1908 CASE_MATHFN (NEARBYINT)
1909 CASE_MATHFN (NEXTAFTER)
1910 CASE_MATHFN (NEXTTOWARD)
1911 CASE_MATHFN (POW)
1912 CASE_MATHFN (POWI)
1913 CASE_MATHFN (POW10)
1914 CASE_MATHFN (REMAINDER)
1915 CASE_MATHFN (REMQUO)
1916 CASE_MATHFN (RINT)
1917 CASE_MATHFN (ROUND)
1918 CASE_MATHFN (SCALB)
1919 CASE_MATHFN (SCALBLN)
1920 CASE_MATHFN (SCALBN)
1921 CASE_MATHFN (SIGNBIT)
1922 CASE_MATHFN (SIGNIFICAND)
1923 CASE_MATHFN (SIN)
1924 CASE_MATHFN (SINCOS)
1925 CASE_MATHFN (SINH)
1926 CASE_MATHFN (SQRT)
1927 CASE_MATHFN (TAN)
1928 CASE_MATHFN (TANH)
1929 CASE_MATHFN (TGAMMA)
1930 CASE_MATHFN (TRUNC)
1931 CASE_MATHFN (Y0)
1932 CASE_MATHFN (Y1)
1933 CASE_MATHFN (YN)
1935 default:
1936 return END_BUILTINS;
1939 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1940 return fcode;
1941 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1942 return fcodef;
1943 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1944 return fcodel;
1945 else
1946 return END_BUILTINS;
1949 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1950 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1951 otherwise use the explicit declaration. If we can't do the conversion,
1952 return null. */
1954 static tree
1955 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
1957 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1958 if (fcode2 == END_BUILTINS)
1959 return NULL_TREE;
1961 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1962 return NULL_TREE;
1964 return builtin_decl_explicit (fcode2);
1967 /* Like mathfn_built_in_1, but always use the implicit array. */
1969 tree
1970 mathfn_built_in (tree type, combined_fn fn)
1972 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1975 /* Like mathfn_built_in_1, but take a built_in_function and
1976 always use the implicit array. */
1978 tree
1979 mathfn_built_in (tree type, enum built_in_function fn)
1981 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
1984 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
1985 return its code, otherwise return IFN_LAST. Note that this function
1986 only tests whether the function is defined in internals.def, not whether
1987 it is actually available on the target. */
1989 internal_fn
1990 associated_internal_fn (tree fndecl)
1992 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
1993 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
1994 switch (DECL_FUNCTION_CODE (fndecl))
1996 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
1997 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1998 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
1999 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2000 #include "internal-fn.def"
2002 CASE_FLT_FN (BUILT_IN_POW10):
2003 return IFN_EXP10;
2005 CASE_FLT_FN (BUILT_IN_DREM):
2006 return IFN_REMAINDER;
2008 CASE_FLT_FN (BUILT_IN_SCALBN):
2009 CASE_FLT_FN (BUILT_IN_SCALBLN):
2010 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2011 return IFN_LDEXP;
2012 return IFN_LAST;
2014 default:
2015 return IFN_LAST;
2019 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2020 on the current target by a call to an internal function, return the
2021 code of that internal function, otherwise return IFN_LAST. The caller
2022 is responsible for ensuring that any side-effects of the built-in
2023 call are dealt with correctly. E.g. if CALL sets errno, the caller
2024 must decide that the errno result isn't needed or make it available
2025 in some other way. */
2027 internal_fn
2028 replacement_internal_fn (gcall *call)
2030 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2032 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2033 if (ifn != IFN_LAST)
2035 tree_pair types = direct_internal_fn_types (ifn, call);
2036 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2037 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2038 return ifn;
2041 return IFN_LAST;
2044 /* Expand a call to the builtin trinary math functions (fma).
2045 Return NULL_RTX if a normal call should be emitted rather than expanding the
2046 function in-line. EXP is the expression that is a call to the builtin
2047 function; if convenient, the result should be placed in TARGET.
2048 SUBTARGET may be used as the target for computing one of EXP's
2049 operands. */
2051 static rtx
2052 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2054 optab builtin_optab;
2055 rtx op0, op1, op2, result;
2056 rtx_insn *insns;
2057 tree fndecl = get_callee_fndecl (exp);
2058 tree arg0, arg1, arg2;
2059 machine_mode mode;
2061 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2062 return NULL_RTX;
2064 arg0 = CALL_EXPR_ARG (exp, 0);
2065 arg1 = CALL_EXPR_ARG (exp, 1);
2066 arg2 = CALL_EXPR_ARG (exp, 2);
2068 switch (DECL_FUNCTION_CODE (fndecl))
2070 CASE_FLT_FN (BUILT_IN_FMA):
2071 builtin_optab = fma_optab; break;
2072 default:
2073 gcc_unreachable ();
2076 /* Make a suitable register to place result in. */
2077 mode = TYPE_MODE (TREE_TYPE (exp));
2079 /* Before working hard, check whether the instruction is available. */
2080 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2081 return NULL_RTX;
2083 result = gen_reg_rtx (mode);
2085 /* Always stabilize the argument list. */
2086 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2087 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2088 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2090 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2091 op1 = expand_normal (arg1);
2092 op2 = expand_normal (arg2);
2094 start_sequence ();
2096 /* Compute into RESULT.
2097 Set RESULT to wherever the result comes back. */
2098 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2099 result, 0);
2101 /* If we were unable to expand via the builtin, stop the sequence
2102 (without outputting the insns) and call to the library function
2103 with the stabilized argument list. */
2104 if (result == 0)
2106 end_sequence ();
2107 return expand_call (exp, target, target == const0_rtx);
2110 /* Output the entire sequence. */
2111 insns = get_insns ();
2112 end_sequence ();
2113 emit_insn (insns);
2115 return result;
2118 /* Expand a call to the builtin sin and cos math functions.
2119 Return NULL_RTX if a normal call should be emitted rather than expanding the
2120 function in-line. EXP is the expression that is a call to the builtin
2121 function; if convenient, the result should be placed in TARGET.
2122 SUBTARGET may be used as the target for computing one of EXP's
2123 operands. */
2125 static rtx
2126 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2128 optab builtin_optab;
2129 rtx op0;
2130 rtx_insn *insns;
2131 tree fndecl = get_callee_fndecl (exp);
2132 machine_mode mode;
2133 tree arg;
2135 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2136 return NULL_RTX;
2138 arg = CALL_EXPR_ARG (exp, 0);
2140 switch (DECL_FUNCTION_CODE (fndecl))
2142 CASE_FLT_FN (BUILT_IN_SIN):
2143 CASE_FLT_FN (BUILT_IN_COS):
2144 builtin_optab = sincos_optab; break;
2145 default:
2146 gcc_unreachable ();
2149 /* Make a suitable register to place result in. */
2150 mode = TYPE_MODE (TREE_TYPE (exp));
2152 /* Check if sincos insn is available, otherwise fallback
2153 to sin or cos insn. */
2154 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2155 switch (DECL_FUNCTION_CODE (fndecl))
2157 CASE_FLT_FN (BUILT_IN_SIN):
2158 builtin_optab = sin_optab; break;
2159 CASE_FLT_FN (BUILT_IN_COS):
2160 builtin_optab = cos_optab; break;
2161 default:
2162 gcc_unreachable ();
2165 /* Before working hard, check whether the instruction is available. */
2166 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2168 rtx result = gen_reg_rtx (mode);
2170 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2171 need to expand the argument again. This way, we will not perform
2172 side-effects more the once. */
2173 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2175 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2177 start_sequence ();
2179 /* Compute into RESULT.
2180 Set RESULT to wherever the result comes back. */
2181 if (builtin_optab == sincos_optab)
2183 int ok;
2185 switch (DECL_FUNCTION_CODE (fndecl))
2187 CASE_FLT_FN (BUILT_IN_SIN):
2188 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2189 break;
2190 CASE_FLT_FN (BUILT_IN_COS):
2191 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2192 break;
2193 default:
2194 gcc_unreachable ();
2196 gcc_assert (ok);
2198 else
2199 result = expand_unop (mode, builtin_optab, op0, result, 0);
2201 if (result != 0)
2203 /* Output the entire sequence. */
2204 insns = get_insns ();
2205 end_sequence ();
2206 emit_insn (insns);
2207 return result;
2210 /* If we were unable to expand via the builtin, stop the sequence
2211 (without outputting the insns) and call to the library function
2212 with the stabilized argument list. */
2213 end_sequence ();
2216 return expand_call (exp, target, target == const0_rtx);
2219 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2220 return an RTL instruction code that implements the functionality.
2221 If that isn't possible or available return CODE_FOR_nothing. */
2223 static enum insn_code
2224 interclass_mathfn_icode (tree arg, tree fndecl)
2226 bool errno_set = false;
2227 optab builtin_optab = unknown_optab;
2228 machine_mode mode;
2230 switch (DECL_FUNCTION_CODE (fndecl))
2232 CASE_FLT_FN (BUILT_IN_ILOGB):
2233 errno_set = true; builtin_optab = ilogb_optab; break;
2234 CASE_FLT_FN (BUILT_IN_ISINF):
2235 builtin_optab = isinf_optab; break;
2236 case BUILT_IN_ISNORMAL:
2237 case BUILT_IN_ISFINITE:
2238 CASE_FLT_FN (BUILT_IN_FINITE):
2239 case BUILT_IN_FINITED32:
2240 case BUILT_IN_FINITED64:
2241 case BUILT_IN_FINITED128:
2242 case BUILT_IN_ISINFD32:
2243 case BUILT_IN_ISINFD64:
2244 case BUILT_IN_ISINFD128:
2245 /* These builtins have no optabs (yet). */
2246 break;
2247 default:
2248 gcc_unreachable ();
2251 /* There's no easy way to detect the case we need to set EDOM. */
2252 if (flag_errno_math && errno_set)
2253 return CODE_FOR_nothing;
2255 /* Optab mode depends on the mode of the input argument. */
2256 mode = TYPE_MODE (TREE_TYPE (arg));
2258 if (builtin_optab)
2259 return optab_handler (builtin_optab, mode);
2260 return CODE_FOR_nothing;
2263 /* Expand a call to one of the builtin math functions that operate on
2264 floating point argument and output an integer result (ilogb, isinf,
2265 isnan, etc).
2266 Return 0 if a normal call should be emitted rather than expanding the
2267 function in-line. EXP is the expression that is a call to the builtin
2268 function; if convenient, the result should be placed in TARGET. */
2270 static rtx
2271 expand_builtin_interclass_mathfn (tree exp, rtx target)
2273 enum insn_code icode = CODE_FOR_nothing;
2274 rtx op0;
2275 tree fndecl = get_callee_fndecl (exp);
2276 machine_mode mode;
2277 tree arg;
2279 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2280 return NULL_RTX;
2282 arg = CALL_EXPR_ARG (exp, 0);
2283 icode = interclass_mathfn_icode (arg, fndecl);
2284 mode = TYPE_MODE (TREE_TYPE (arg));
2286 if (icode != CODE_FOR_nothing)
2288 struct expand_operand ops[1];
2289 rtx_insn *last = get_last_insn ();
2290 tree orig_arg = arg;
2292 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2293 need to expand the argument again. This way, we will not perform
2294 side-effects more the once. */
2295 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2297 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2299 if (mode != GET_MODE (op0))
2300 op0 = convert_to_mode (mode, op0, 0);
2302 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2303 if (maybe_legitimize_operands (icode, 0, 1, ops)
2304 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2305 return ops[0].value;
2307 delete_insns_since (last);
2308 CALL_EXPR_ARG (exp, 0) = orig_arg;
2311 return NULL_RTX;
2314 /* Expand a call to the builtin sincos math function.
2315 Return NULL_RTX if a normal call should be emitted rather than expanding the
2316 function in-line. EXP is the expression that is a call to the builtin
2317 function. */
2319 static rtx
2320 expand_builtin_sincos (tree exp)
2322 rtx op0, op1, op2, target1, target2;
2323 machine_mode mode;
2324 tree arg, sinp, cosp;
2325 int result;
2326 location_t loc = EXPR_LOCATION (exp);
2327 tree alias_type, alias_off;
2329 if (!validate_arglist (exp, REAL_TYPE,
2330 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2331 return NULL_RTX;
2333 arg = CALL_EXPR_ARG (exp, 0);
2334 sinp = CALL_EXPR_ARG (exp, 1);
2335 cosp = CALL_EXPR_ARG (exp, 2);
2337 /* Make a suitable register to place result in. */
2338 mode = TYPE_MODE (TREE_TYPE (arg));
2340 /* Check if sincos insn is available, otherwise emit the call. */
2341 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2342 return NULL_RTX;
2344 target1 = gen_reg_rtx (mode);
2345 target2 = gen_reg_rtx (mode);
2347 op0 = expand_normal (arg);
2348 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2349 alias_off = build_int_cst (alias_type, 0);
2350 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2351 sinp, alias_off));
2352 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2353 cosp, alias_off));
2355 /* Compute into target1 and target2.
2356 Set TARGET to wherever the result comes back. */
2357 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2358 gcc_assert (result);
2360 /* Move target1 and target2 to the memory locations indicated
2361 by op1 and op2. */
2362 emit_move_insn (op1, target1);
2363 emit_move_insn (op2, target2);
2365 return const0_rtx;
2368 /* Expand a call to the internal cexpi builtin to the sincos math function.
2369 EXP is the expression that is a call to the builtin function; if convenient,
2370 the result should be placed in TARGET. */
2372 static rtx
2373 expand_builtin_cexpi (tree exp, rtx target)
2375 tree fndecl = get_callee_fndecl (exp);
2376 tree arg, type;
2377 machine_mode mode;
2378 rtx op0, op1, op2;
2379 location_t loc = EXPR_LOCATION (exp);
2381 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2382 return NULL_RTX;
2384 arg = CALL_EXPR_ARG (exp, 0);
2385 type = TREE_TYPE (arg);
2386 mode = TYPE_MODE (TREE_TYPE (arg));
2388 /* Try expanding via a sincos optab, fall back to emitting a libcall
2389 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2390 is only generated from sincos, cexp or if we have either of them. */
2391 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2393 op1 = gen_reg_rtx (mode);
2394 op2 = gen_reg_rtx (mode);
2396 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2398 /* Compute into op1 and op2. */
2399 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2401 else if (targetm.libc_has_function (function_sincos))
2403 tree call, fn = NULL_TREE;
2404 tree top1, top2;
2405 rtx op1a, op2a;
2407 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2408 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2409 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2410 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2411 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2412 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2413 else
2414 gcc_unreachable ();
2416 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2417 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2418 op1a = copy_addr_to_reg (XEXP (op1, 0));
2419 op2a = copy_addr_to_reg (XEXP (op2, 0));
2420 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2421 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2423 /* Make sure not to fold the sincos call again. */
2424 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2425 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2426 call, 3, arg, top1, top2));
2428 else
2430 tree call, fn = NULL_TREE, narg;
2431 tree ctype = build_complex_type (type);
2433 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2434 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2435 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2436 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2437 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2438 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2439 else
2440 gcc_unreachable ();
2442 /* If we don't have a decl for cexp create one. This is the
2443 friendliest fallback if the user calls __builtin_cexpi
2444 without full target C99 function support. */
2445 if (fn == NULL_TREE)
2447 tree fntype;
2448 const char *name = NULL;
2450 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2451 name = "cexpf";
2452 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2453 name = "cexp";
2454 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2455 name = "cexpl";
2457 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2458 fn = build_fn_decl (name, fntype);
2461 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2462 build_real (type, dconst0), arg);
2464 /* Make sure not to fold the cexp call again. */
2465 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2466 return expand_expr (build_call_nary (ctype, call, 1, narg),
2467 target, VOIDmode, EXPAND_NORMAL);
2470 /* Now build the proper return type. */
2471 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2472 make_tree (TREE_TYPE (arg), op2),
2473 make_tree (TREE_TYPE (arg), op1)),
2474 target, VOIDmode, EXPAND_NORMAL);
2477 /* Conveniently construct a function call expression. FNDECL names the
2478 function to be called, N is the number of arguments, and the "..."
2479 parameters are the argument expressions. Unlike build_call_exr
2480 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2482 static tree
2483 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2485 va_list ap;
2486 tree fntype = TREE_TYPE (fndecl);
2487 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2489 va_start (ap, n);
2490 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2491 va_end (ap);
2492 SET_EXPR_LOCATION (fn, loc);
2493 return fn;
2496 /* Expand a call to one of the builtin rounding functions gcc defines
2497 as an extension (lfloor and lceil). As these are gcc extensions we
2498 do not need to worry about setting errno to EDOM.
2499 If expanding via optab fails, lower expression to (int)(floor(x)).
2500 EXP is the expression that is a call to the builtin function;
2501 if convenient, the result should be placed in TARGET. */
2503 static rtx
2504 expand_builtin_int_roundingfn (tree exp, rtx target)
2506 convert_optab builtin_optab;
2507 rtx op0, tmp;
2508 rtx_insn *insns;
2509 tree fndecl = get_callee_fndecl (exp);
2510 enum built_in_function fallback_fn;
2511 tree fallback_fndecl;
2512 machine_mode mode;
2513 tree arg;
2515 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2516 gcc_unreachable ();
2518 arg = CALL_EXPR_ARG (exp, 0);
2520 switch (DECL_FUNCTION_CODE (fndecl))
2522 CASE_FLT_FN (BUILT_IN_ICEIL):
2523 CASE_FLT_FN (BUILT_IN_LCEIL):
2524 CASE_FLT_FN (BUILT_IN_LLCEIL):
2525 builtin_optab = lceil_optab;
2526 fallback_fn = BUILT_IN_CEIL;
2527 break;
2529 CASE_FLT_FN (BUILT_IN_IFLOOR):
2530 CASE_FLT_FN (BUILT_IN_LFLOOR):
2531 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2532 builtin_optab = lfloor_optab;
2533 fallback_fn = BUILT_IN_FLOOR;
2534 break;
2536 default:
2537 gcc_unreachable ();
2540 /* Make a suitable register to place result in. */
2541 mode = TYPE_MODE (TREE_TYPE (exp));
2543 target = gen_reg_rtx (mode);
2545 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2546 need to expand the argument again. This way, we will not perform
2547 side-effects more the once. */
2548 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2550 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2552 start_sequence ();
2554 /* Compute into TARGET. */
2555 if (expand_sfix_optab (target, op0, builtin_optab))
2557 /* Output the entire sequence. */
2558 insns = get_insns ();
2559 end_sequence ();
2560 emit_insn (insns);
2561 return target;
2564 /* If we were unable to expand via the builtin, stop the sequence
2565 (without outputting the insns). */
2566 end_sequence ();
2568 /* Fall back to floating point rounding optab. */
2569 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2571 /* For non-C99 targets we may end up without a fallback fndecl here
2572 if the user called __builtin_lfloor directly. In this case emit
2573 a call to the floor/ceil variants nevertheless. This should result
2574 in the best user experience for not full C99 targets. */
2575 if (fallback_fndecl == NULL_TREE)
2577 tree fntype;
2578 const char *name = NULL;
2580 switch (DECL_FUNCTION_CODE (fndecl))
2582 case BUILT_IN_ICEIL:
2583 case BUILT_IN_LCEIL:
2584 case BUILT_IN_LLCEIL:
2585 name = "ceil";
2586 break;
2587 case BUILT_IN_ICEILF:
2588 case BUILT_IN_LCEILF:
2589 case BUILT_IN_LLCEILF:
2590 name = "ceilf";
2591 break;
2592 case BUILT_IN_ICEILL:
2593 case BUILT_IN_LCEILL:
2594 case BUILT_IN_LLCEILL:
2595 name = "ceill";
2596 break;
2597 case BUILT_IN_IFLOOR:
2598 case BUILT_IN_LFLOOR:
2599 case BUILT_IN_LLFLOOR:
2600 name = "floor";
2601 break;
2602 case BUILT_IN_IFLOORF:
2603 case BUILT_IN_LFLOORF:
2604 case BUILT_IN_LLFLOORF:
2605 name = "floorf";
2606 break;
2607 case BUILT_IN_IFLOORL:
2608 case BUILT_IN_LFLOORL:
2609 case BUILT_IN_LLFLOORL:
2610 name = "floorl";
2611 break;
2612 default:
2613 gcc_unreachable ();
2616 fntype = build_function_type_list (TREE_TYPE (arg),
2617 TREE_TYPE (arg), NULL_TREE);
2618 fallback_fndecl = build_fn_decl (name, fntype);
2621 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2623 tmp = expand_normal (exp);
2624 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2626 /* Truncate the result of floating point optab to integer
2627 via expand_fix (). */
2628 target = gen_reg_rtx (mode);
2629 expand_fix (target, tmp, 0);
2631 return target;
2634 /* Expand a call to one of the builtin math functions doing integer
2635 conversion (lrint).
2636 Return 0 if a normal call should be emitted rather than expanding the
2637 function in-line. EXP is the expression that is a call to the builtin
2638 function; if convenient, the result should be placed in TARGET. */
2640 static rtx
2641 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2643 convert_optab builtin_optab;
2644 rtx op0;
2645 rtx_insn *insns;
2646 tree fndecl = get_callee_fndecl (exp);
2647 tree arg;
2648 machine_mode mode;
2649 enum built_in_function fallback_fn = BUILT_IN_NONE;
2651 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2652 gcc_unreachable ();
2654 arg = CALL_EXPR_ARG (exp, 0);
2656 switch (DECL_FUNCTION_CODE (fndecl))
2658 CASE_FLT_FN (BUILT_IN_IRINT):
2659 fallback_fn = BUILT_IN_LRINT;
2660 gcc_fallthrough ();
2661 CASE_FLT_FN (BUILT_IN_LRINT):
2662 CASE_FLT_FN (BUILT_IN_LLRINT):
2663 builtin_optab = lrint_optab;
2664 break;
2666 CASE_FLT_FN (BUILT_IN_IROUND):
2667 fallback_fn = BUILT_IN_LROUND;
2668 gcc_fallthrough ();
2669 CASE_FLT_FN (BUILT_IN_LROUND):
2670 CASE_FLT_FN (BUILT_IN_LLROUND):
2671 builtin_optab = lround_optab;
2672 break;
2674 default:
2675 gcc_unreachable ();
2678 /* There's no easy way to detect the case we need to set EDOM. */
2679 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2680 return NULL_RTX;
2682 /* Make a suitable register to place result in. */
2683 mode = TYPE_MODE (TREE_TYPE (exp));
2685 /* There's no easy way to detect the case we need to set EDOM. */
2686 if (!flag_errno_math)
2688 rtx result = gen_reg_rtx (mode);
2690 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2691 need to expand the argument again. This way, we will not perform
2692 side-effects more the once. */
2693 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2695 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2697 start_sequence ();
2699 if (expand_sfix_optab (result, op0, builtin_optab))
2701 /* Output the entire sequence. */
2702 insns = get_insns ();
2703 end_sequence ();
2704 emit_insn (insns);
2705 return result;
2708 /* If we were unable to expand via the builtin, stop the sequence
2709 (without outputting the insns) and call to the library function
2710 with the stabilized argument list. */
2711 end_sequence ();
2714 if (fallback_fn != BUILT_IN_NONE)
2716 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2717 targets, (int) round (x) should never be transformed into
2718 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2719 a call to lround in the hope that the target provides at least some
2720 C99 functions. This should result in the best user experience for
2721 not full C99 targets. */
2722 tree fallback_fndecl = mathfn_built_in_1
2723 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2725 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2726 fallback_fndecl, 1, arg);
2728 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2729 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2730 return convert_to_mode (mode, target, 0);
2733 return expand_call (exp, target, target == const0_rtx);
2736 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2737 a normal call should be emitted rather than expanding the function
2738 in-line. EXP is the expression that is a call to the builtin
2739 function; if convenient, the result should be placed in TARGET. */
2741 static rtx
2742 expand_builtin_powi (tree exp, rtx target)
2744 tree arg0, arg1;
2745 rtx op0, op1;
2746 machine_mode mode;
2747 machine_mode mode2;
2749 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2750 return NULL_RTX;
2752 arg0 = CALL_EXPR_ARG (exp, 0);
2753 arg1 = CALL_EXPR_ARG (exp, 1);
2754 mode = TYPE_MODE (TREE_TYPE (exp));
2756 /* Emit a libcall to libgcc. */
2758 /* Mode of the 2nd argument must match that of an int. */
2759 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2761 if (target == NULL_RTX)
2762 target = gen_reg_rtx (mode);
2764 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2765 if (GET_MODE (op0) != mode)
2766 op0 = convert_to_mode (mode, op0, 0);
2767 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2768 if (GET_MODE (op1) != mode2)
2769 op1 = convert_to_mode (mode2, op1, 0);
2771 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2772 target, LCT_CONST, mode,
2773 op0, mode, op1, mode2);
2775 return target;
2778 /* Expand expression EXP which is a call to the strlen builtin. Return
2779 NULL_RTX if we failed the caller should emit a normal call, otherwise
2780 try to get the result in TARGET, if convenient. */
2782 static rtx
2783 expand_builtin_strlen (tree exp, rtx target,
2784 machine_mode target_mode)
2786 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2787 return NULL_RTX;
2788 else
2790 struct expand_operand ops[4];
2791 rtx pat;
2792 tree len;
2793 tree src = CALL_EXPR_ARG (exp, 0);
2794 rtx src_reg;
2795 rtx_insn *before_strlen;
2796 machine_mode insn_mode;
2797 enum insn_code icode = CODE_FOR_nothing;
2798 unsigned int align;
2800 /* If the length can be computed at compile-time, return it. */
2801 len = c_strlen (src, 0);
2802 if (len)
2803 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2805 /* If the length can be computed at compile-time and is constant
2806 integer, but there are side-effects in src, evaluate
2807 src for side-effects, then return len.
2808 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2809 can be optimized into: i++; x = 3; */
2810 len = c_strlen (src, 1);
2811 if (len && TREE_CODE (len) == INTEGER_CST)
2813 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2814 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2817 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2819 /* If SRC is not a pointer type, don't do this operation inline. */
2820 if (align == 0)
2821 return NULL_RTX;
2823 /* Bail out if we can't compute strlen in the right mode. */
2824 FOR_EACH_MODE_FROM (insn_mode, target_mode)
2826 icode = optab_handler (strlen_optab, insn_mode);
2827 if (icode != CODE_FOR_nothing)
2828 break;
2830 if (insn_mode == VOIDmode)
2831 return NULL_RTX;
2833 /* Make a place to hold the source address. We will not expand
2834 the actual source until we are sure that the expansion will
2835 not fail -- there are trees that cannot be expanded twice. */
2836 src_reg = gen_reg_rtx (Pmode);
2838 /* Mark the beginning of the strlen sequence so we can emit the
2839 source operand later. */
2840 before_strlen = get_last_insn ();
2842 create_output_operand (&ops[0], target, insn_mode);
2843 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2844 create_integer_operand (&ops[2], 0);
2845 create_integer_operand (&ops[3], align);
2846 if (!maybe_expand_insn (icode, 4, ops))
2847 return NULL_RTX;
2849 /* Now that we are assured of success, expand the source. */
2850 start_sequence ();
2851 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2852 if (pat != src_reg)
2854 #ifdef POINTERS_EXTEND_UNSIGNED
2855 if (GET_MODE (pat) != Pmode)
2856 pat = convert_to_mode (Pmode, pat,
2857 POINTERS_EXTEND_UNSIGNED);
2858 #endif
2859 emit_move_insn (src_reg, pat);
2861 pat = get_insns ();
2862 end_sequence ();
2864 if (before_strlen)
2865 emit_insn_after (pat, before_strlen);
2866 else
2867 emit_insn_before (pat, get_insns ());
2869 /* Return the value in the proper mode for this function. */
2870 if (GET_MODE (ops[0].value) == target_mode)
2871 target = ops[0].value;
2872 else if (target != 0)
2873 convert_move (target, ops[0].value, 0);
2874 else
2875 target = convert_to_mode (target_mode, ops[0].value, 0);
2877 return target;
2881 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2882 bytes from constant string DATA + OFFSET and return it as target
2883 constant. */
2885 static rtx
2886 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2887 scalar_int_mode mode)
2889 const char *str = (const char *) data;
2891 gcc_assert (offset >= 0
2892 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2893 <= strlen (str) + 1));
2895 return c_readstr (str + offset, mode);
2898 /* LEN specify length of the block of memcpy/memset operation.
2899 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2900 In some cases we can make very likely guess on max size, then we
2901 set it into PROBABLE_MAX_SIZE. */
2903 static void
2904 determine_block_size (tree len, rtx len_rtx,
2905 unsigned HOST_WIDE_INT *min_size,
2906 unsigned HOST_WIDE_INT *max_size,
2907 unsigned HOST_WIDE_INT *probable_max_size)
2909 if (CONST_INT_P (len_rtx))
2911 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
2912 return;
2914 else
2916 wide_int min, max;
2917 enum value_range_type range_type = VR_UNDEFINED;
2919 /* Determine bounds from the type. */
2920 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2921 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2922 else
2923 *min_size = 0;
2924 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2925 *probable_max_size = *max_size
2926 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
2927 else
2928 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2930 if (TREE_CODE (len) == SSA_NAME)
2931 range_type = get_range_info (len, &min, &max);
2932 if (range_type == VR_RANGE)
2934 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
2935 *min_size = min.to_uhwi ();
2936 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
2937 *probable_max_size = *max_size = max.to_uhwi ();
2939 else if (range_type == VR_ANTI_RANGE)
2941 /* Anti range 0...N lets us to determine minimal size to N+1. */
2942 if (min == 0)
2944 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2945 *min_size = max.to_uhwi () + 1;
2947 /* Code like
2949 int n;
2950 if (n < 100)
2951 memcpy (a, b, n)
2953 Produce anti range allowing negative values of N. We still
2954 can use the information and make a guess that N is not negative.
2956 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
2957 *probable_max_size = min.to_uhwi () - 1;
2960 gcc_checking_assert (*max_size <=
2961 (unsigned HOST_WIDE_INT)
2962 GET_MODE_MASK (GET_MODE (len_rtx)));
2965 /* Try to verify that the sizes and lengths of the arguments to a string
2966 manipulation function given by EXP are within valid bounds and that
2967 the operation does not lead to buffer overflow. Arguments other than
2968 EXP may be null. When non-null, the arguments have the following
2969 meaning:
2970 SIZE is the user-supplied size argument to the function (such as in
2971 memcpy(d, s, SIZE) or strncpy(d, s, SIZE). It specifies the exact
2972 number of bytes to write.
2973 MAXLEN is the user-supplied bound on the length of the source sequence
2974 (such as in strncat(d, s, N). It specifies the upper limit on the number
2975 of bytes to write.
2976 SRC is the source string (such as in strcpy(d, s)) when the expression
2977 EXP is a string function call (as opposed to a memory call like memcpy).
2978 As an exception, SRC can also be an integer denoting the precomputed
2979 size of the source string or object (for functions like memcpy).
2980 OBJSIZE is the size of the destination object specified by the last
2981 argument to the _chk builtins, typically resulting from the expansion
2982 of __builtin_object_size (such as in __builtin___strcpy_chk(d, s,
2983 OBJSIZE).
2985 When SIZE is null LEN is checked to verify that it doesn't exceed
2986 SIZE_MAX.
2988 If the call is successfully verified as safe from buffer overflow
2989 the function returns true, otherwise false.. */
2991 static bool
2992 check_sizes (int opt, tree exp, tree size, tree maxlen, tree src, tree objsize)
2994 /* The size of the largest object is half the address space, or
2995 SSIZE_MAX. (This is way too permissive.) */
2996 tree maxobjsize = TYPE_MAX_VALUE (ssizetype);
2998 tree slen = NULL_TREE;
3000 tree range[2] = { NULL_TREE, NULL_TREE };
3002 /* Set to true when the exact number of bytes written by a string
3003 function like strcpy is not known and the only thing that is
3004 known is that it must be at least one (for the terminating nul). */
3005 bool at_least_one = false;
3006 if (src)
3008 /* SRC is normally a pointer to string but as a special case
3009 it can be an integer denoting the length of a string. */
3010 if (POINTER_TYPE_P (TREE_TYPE (src)))
3012 /* Try to determine the range of lengths the source string
3013 refers to. If it can be determined and is less than
3014 the upper bound given by MAXLEN add one to it for
3015 the terminating nul. Otherwise, set it to one for
3016 the same reason, or to MAXLEN as appropriate. */
3017 get_range_strlen (src, range);
3018 if (range[0] && (!maxlen || TREE_CODE (maxlen) == INTEGER_CST))
3020 if (maxlen && tree_int_cst_le (maxlen, range[0]))
3021 range[0] = range[1] = maxlen;
3022 else
3023 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3024 range[0], size_one_node);
3026 if (maxlen && tree_int_cst_le (maxlen, range[1]))
3027 range[1] = maxlen;
3028 else if (!integer_all_onesp (range[1]))
3029 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3030 range[1], size_one_node);
3032 slen = range[0];
3034 else
3036 at_least_one = true;
3037 slen = size_one_node;
3040 else
3041 slen = src;
3044 if (!size && !maxlen)
3046 /* When the only available piece of data is the object size
3047 there is nothing to do. */
3048 if (!slen)
3049 return true;
3051 /* Otherwise, when the length of the source sequence is known
3052 (as with with strlen), set SIZE to it. */
3053 if (!range[0])
3054 size = slen;
3057 if (!objsize)
3058 objsize = maxobjsize;
3060 /* The SIZE is exact if it's non-null, constant, and in range of
3061 unsigned HOST_WIDE_INT. */
3062 bool exactsize = size && tree_fits_uhwi_p (size);
3064 if (size)
3065 get_size_range (size, range);
3067 /* First check the number of bytes to be written against the maximum
3068 object size. */
3069 if (range[0] && tree_int_cst_lt (maxobjsize, range[0]))
3071 location_t loc = tree_nonartificial_location (exp);
3072 loc = expansion_point_location_if_in_system_header (loc);
3074 if (range[0] == range[1])
3075 warning_at (loc, opt,
3076 "%K%qD specified size %E "
3077 "exceeds maximum object size %E",
3078 exp, get_callee_fndecl (exp), range[0], maxobjsize);
3079 else
3080 warning_at (loc, opt,
3081 "%K%qD specified size between %E and %E "
3082 "exceeds maximum object size %E",
3083 exp, get_callee_fndecl (exp),
3084 range[0], range[1], maxobjsize);
3085 return false;
3088 /* Next check the number of bytes to be written against the destination
3089 object size. */
3090 if (range[0] || !exactsize || integer_all_onesp (size))
3092 if (range[0]
3093 && ((tree_fits_uhwi_p (objsize)
3094 && tree_int_cst_lt (objsize, range[0]))
3095 || (tree_fits_uhwi_p (size)
3096 && tree_int_cst_lt (size, range[0]))))
3098 location_t loc = tree_nonartificial_location (exp);
3099 loc = expansion_point_location_if_in_system_header (loc);
3101 if (size == slen && at_least_one)
3103 /* This is a call to strcpy with a destination of 0 size
3104 and a source of unknown length. The call will write
3105 at least one byte past the end of the destination. */
3106 warning_at (loc, opt,
3107 "%K%qD writing %E or more bytes into a region "
3108 "of size %E overflows the destination",
3109 exp, get_callee_fndecl (exp), range[0], objsize);
3111 else if (tree_int_cst_equal (range[0], range[1]))
3112 warning_at (loc, opt,
3113 (integer_onep (range[0])
3114 ? G_("%K%qD writing %E byte into a region "
3115 "of size %E overflows the destination")
3116 : G_("%K%qD writing %E bytes into a region "
3117 "of size %E overflows the destination")),
3118 exp, get_callee_fndecl (exp), range[0], objsize);
3119 else if (tree_int_cst_sign_bit (range[1]))
3121 /* Avoid printing the upper bound if it's invalid. */
3122 warning_at (loc, opt,
3123 "%K%qD writing %E or more bytes into a region "
3124 "of size %E overflows the destination",
3125 exp, get_callee_fndecl (exp), range[0], objsize);
3127 else
3128 warning_at (loc, opt,
3129 "%K%qD writing between %E and %E bytes into "
3130 "a region of size %E overflows the destination",
3131 exp, get_callee_fndecl (exp), range[0], range[1],
3132 objsize);
3134 /* Return error when an overflow has been detected. */
3135 return false;
3139 /* Check the maximum length of the source sequence against the size
3140 of the destination object if known, or against the maximum size
3141 of an object. */
3142 if (maxlen)
3144 get_size_range (maxlen, range);
3146 if (range[0] && objsize && tree_fits_uhwi_p (objsize))
3148 location_t loc = tree_nonartificial_location (exp);
3149 loc = expansion_point_location_if_in_system_header (loc);
3151 if (tree_int_cst_lt (maxobjsize, range[0]))
3153 /* Warn about crazy big sizes first since that's more
3154 likely to be meaningful than saying that the bound
3155 is greater than the object size if both are big. */
3156 if (range[0] == range[1])
3157 warning_at (loc, opt,
3158 "%K%qD specified bound %E "
3159 "exceeds maximum object size %E",
3160 exp, get_callee_fndecl (exp),
3161 range[0], maxobjsize);
3162 else
3163 warning_at (loc, opt,
3164 "%K%qD specified bound between %E and %E "
3165 "exceeds maximum object size %E",
3166 exp, get_callee_fndecl (exp),
3167 range[0], range[1], maxobjsize);
3169 return false;
3172 if (objsize != maxobjsize && tree_int_cst_lt (objsize, range[0]))
3174 if (tree_int_cst_equal (range[0], range[1]))
3175 warning_at (loc, opt,
3176 "%K%qD specified bound %E "
3177 "exceeds destination size %E",
3178 exp, get_callee_fndecl (exp),
3179 range[0], objsize);
3180 else
3181 warning_at (loc, opt,
3182 "%K%qD specified bound between %E and %E "
3183 "exceeds destination size %E",
3184 exp, get_callee_fndecl (exp),
3185 range[0], range[1], objsize);
3186 return false;
3191 if (slen
3192 && slen == src
3193 && size && range[0]
3194 && tree_int_cst_lt (slen, range[0]))
3196 location_t loc = tree_nonartificial_location (exp);
3198 if (tree_int_cst_equal (range[0], range[1]))
3199 warning_at (loc, opt,
3200 (tree_int_cst_equal (range[0], integer_one_node)
3201 ? G_("%K%qD reading %E byte from a region of size %E")
3202 : G_("%K%qD reading %E bytes from a region of size %E")),
3203 exp, get_callee_fndecl (exp), range[0], slen);
3204 else if (tree_int_cst_sign_bit (range[1]))
3206 /* Avoid printing the upper bound if it's invalid. */
3207 warning_at (loc, opt,
3208 "%K%qD reading %E or more bytes from a region "
3209 "of size %E",
3210 exp, get_callee_fndecl (exp), range[0], slen);
3212 else
3213 warning_at (loc, opt,
3214 "%K%qD reading between %E and %E bytes from a region "
3215 "of size %E",
3216 exp, get_callee_fndecl (exp), range[0], range[1], slen);
3217 return false;
3220 return true;
3223 /* Helper to compute the size of the object referenced by the DEST
3224 expression which must of of pointer type, using Object Size type
3225 OSTYPE (only the least significant 2 bits are used). Return
3226 the size of the object if successful or NULL when the size cannot
3227 be determined. */
3229 static inline tree
3230 compute_objsize (tree dest, int ostype)
3232 unsigned HOST_WIDE_INT size;
3233 if (compute_builtin_object_size (dest, ostype & 3, &size))
3234 return build_int_cst (sizetype, size);
3236 return NULL_TREE;
3239 /* Helper to determine and check the sizes of the source and the destination
3240 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3241 call expression, DEST is the destination argument, SRC is the source
3242 argument or null, and LEN is the number of bytes. Use Object Size type-0
3243 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3244 (no overflow or invalid sizes), false otherwise. */
3246 static bool
3247 check_memop_sizes (tree exp, tree dest, tree src, tree size)
3249 if (!warn_stringop_overflow)
3250 return true;
3252 /* For functions like memset and memcpy that operate on raw memory
3253 try to determine the size of the largest source and destination
3254 object using type-0 Object Size regardless of the object size
3255 type specified by the option. */
3256 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3257 tree dstsize = compute_objsize (dest, 0);
3259 return check_sizes (OPT_Wstringop_overflow_, exp,
3260 size, /*maxlen=*/NULL_TREE, srcsize, dstsize);
3263 /* Validate memchr arguments without performing any expansion.
3264 Return NULL_RTX. */
3266 static rtx
3267 expand_builtin_memchr (tree exp, rtx)
3269 if (!validate_arglist (exp,
3270 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3271 return NULL_RTX;
3273 tree arg1 = CALL_EXPR_ARG (exp, 0);
3274 tree len = CALL_EXPR_ARG (exp, 2);
3276 /* Diagnose calls where the specified length exceeds the size
3277 of the object. */
3278 if (warn_stringop_overflow)
3280 tree size = compute_objsize (arg1, 0);
3281 check_sizes (OPT_Wstringop_overflow_,
3282 exp, len, /*maxlen=*/NULL_TREE,
3283 size, /*objsize=*/NULL_TREE);
3286 return NULL_RTX;
3289 /* Expand a call EXP to the memcpy builtin.
3290 Return NULL_RTX if we failed, the caller should emit a normal call,
3291 otherwise try to get the result in TARGET, if convenient (and in
3292 mode MODE if that's convenient). */
3294 static rtx
3295 expand_builtin_memcpy (tree exp, rtx target)
3297 if (!validate_arglist (exp,
3298 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3299 return NULL_RTX;
3301 tree dest = CALL_EXPR_ARG (exp, 0);
3302 tree src = CALL_EXPR_ARG (exp, 1);
3303 tree len = CALL_EXPR_ARG (exp, 2);
3305 check_memop_sizes (exp, dest, src, len);
3307 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3308 /*endp=*/ 0);
3311 /* Check a call EXP to the memmove built-in for validity.
3312 Return NULL_RTX on both success and failure. */
3314 static rtx
3315 expand_builtin_memmove (tree exp, rtx)
3317 if (!validate_arglist (exp,
3318 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3319 return NULL_RTX;
3321 tree dest = CALL_EXPR_ARG (exp, 0);
3322 tree src = CALL_EXPR_ARG (exp, 1);
3323 tree len = CALL_EXPR_ARG (exp, 2);
3325 check_memop_sizes (exp, dest, src, len);
3327 return NULL_RTX;
3330 /* Expand an instrumented call EXP to the memcpy builtin.
3331 Return NULL_RTX if we failed, the caller should emit a normal call,
3332 otherwise try to get the result in TARGET, if convenient (and in
3333 mode MODE if that's convenient). */
3335 static rtx
3336 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3338 if (!validate_arglist (exp,
3339 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3340 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3341 INTEGER_TYPE, VOID_TYPE))
3342 return NULL_RTX;
3343 else
3345 tree dest = CALL_EXPR_ARG (exp, 0);
3346 tree src = CALL_EXPR_ARG (exp, 2);
3347 tree len = CALL_EXPR_ARG (exp, 4);
3348 rtx res = expand_builtin_memory_copy_args (dest, src, len, target, exp,
3349 /*end_p=*/ 0);
3351 /* Return src bounds with the result. */
3352 if (res)
3354 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3355 expand_normal (CALL_EXPR_ARG (exp, 1)));
3356 res = chkp_join_splitted_slot (res, bnd);
3358 return res;
3362 /* Expand a call EXP to the mempcpy builtin.
3363 Return NULL_RTX if we failed; the caller should emit a normal call,
3364 otherwise try to get the result in TARGET, if convenient (and in
3365 mode MODE if that's convenient). If ENDP is 0 return the
3366 destination pointer, if ENDP is 1 return the end pointer ala
3367 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3368 stpcpy. */
3370 static rtx
3371 expand_builtin_mempcpy (tree exp, rtx target)
3373 if (!validate_arglist (exp,
3374 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3375 return NULL_RTX;
3377 tree dest = CALL_EXPR_ARG (exp, 0);
3378 tree src = CALL_EXPR_ARG (exp, 1);
3379 tree len = CALL_EXPR_ARG (exp, 2);
3381 /* Avoid expanding mempcpy into memcpy when the call is determined
3382 to overflow the buffer. This also prevents the same overflow
3383 from being diagnosed again when expanding memcpy. */
3384 if (!check_memop_sizes (exp, dest, src, len))
3385 return NULL_RTX;
3387 return expand_builtin_mempcpy_args (dest, src, len,
3388 target, exp, /*endp=*/ 1);
3391 /* Expand an instrumented call EXP to the mempcpy builtin.
3392 Return NULL_RTX if we failed, the caller should emit a normal call,
3393 otherwise try to get the result in TARGET, if convenient (and in
3394 mode MODE if that's convenient). */
3396 static rtx
3397 expand_builtin_mempcpy_with_bounds (tree exp, rtx target)
3399 if (!validate_arglist (exp,
3400 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3401 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3402 INTEGER_TYPE, VOID_TYPE))
3403 return NULL_RTX;
3404 else
3406 tree dest = CALL_EXPR_ARG (exp, 0);
3407 tree src = CALL_EXPR_ARG (exp, 2);
3408 tree len = CALL_EXPR_ARG (exp, 4);
3409 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3410 exp, 1);
3412 /* Return src bounds with the result. */
3413 if (res)
3415 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3416 expand_normal (CALL_EXPR_ARG (exp, 1)));
3417 res = chkp_join_splitted_slot (res, bnd);
3419 return res;
3423 /* Helper function to do the actual work for expand of memory copy family
3424 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3425 of memory from SRC to DEST and assign to TARGET if convenient.
3426 If ENDP is 0 return the
3427 destination pointer, if ENDP is 1 return the end pointer ala
3428 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3429 stpcpy. */
3431 static rtx
3432 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3433 rtx target, tree exp, int endp)
3435 const char *src_str;
3436 unsigned int src_align = get_pointer_alignment (src);
3437 unsigned int dest_align = get_pointer_alignment (dest);
3438 rtx dest_mem, src_mem, dest_addr, len_rtx;
3439 HOST_WIDE_INT expected_size = -1;
3440 unsigned int expected_align = 0;
3441 unsigned HOST_WIDE_INT min_size;
3442 unsigned HOST_WIDE_INT max_size;
3443 unsigned HOST_WIDE_INT probable_max_size;
3445 /* If DEST is not a pointer type, call the normal function. */
3446 if (dest_align == 0)
3447 return NULL_RTX;
3449 /* If either SRC is not a pointer type, don't do this
3450 operation in-line. */
3451 if (src_align == 0)
3452 return NULL_RTX;
3454 if (currently_expanding_gimple_stmt)
3455 stringop_block_profile (currently_expanding_gimple_stmt,
3456 &expected_align, &expected_size);
3458 if (expected_align < dest_align)
3459 expected_align = dest_align;
3460 dest_mem = get_memory_rtx (dest, len);
3461 set_mem_align (dest_mem, dest_align);
3462 len_rtx = expand_normal (len);
3463 determine_block_size (len, len_rtx, &min_size, &max_size,
3464 &probable_max_size);
3465 src_str = c_getstr (src);
3467 /* If SRC is a string constant and block move would be done
3468 by pieces, we can avoid loading the string from memory
3469 and only stored the computed constants. */
3470 if (src_str
3471 && CONST_INT_P (len_rtx)
3472 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3473 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3474 CONST_CAST (char *, src_str),
3475 dest_align, false))
3477 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3478 builtin_memcpy_read_str,
3479 CONST_CAST (char *, src_str),
3480 dest_align, false, endp);
3481 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3482 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3483 return dest_mem;
3486 src_mem = get_memory_rtx (src, len);
3487 set_mem_align (src_mem, src_align);
3489 /* Copy word part most expediently. */
3490 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3491 CALL_EXPR_TAILCALL (exp)
3492 && (endp == 0 || target == const0_rtx)
3493 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3494 expected_align, expected_size,
3495 min_size, max_size, probable_max_size);
3497 if (dest_addr == 0)
3499 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3500 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3503 if (endp && target != const0_rtx)
3505 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3506 /* stpcpy pointer to last byte. */
3507 if (endp == 2)
3508 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3511 return dest_addr;
3514 static rtx
3515 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3516 rtx target, tree orig_exp, int endp)
3518 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3519 endp);
3522 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3523 we failed, the caller should emit a normal call, otherwise try to
3524 get the result in TARGET, if convenient. If ENDP is 0 return the
3525 destination pointer, if ENDP is 1 return the end pointer ala
3526 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3527 stpcpy. */
3529 static rtx
3530 expand_movstr (tree dest, tree src, rtx target, int endp)
3532 struct expand_operand ops[3];
3533 rtx dest_mem;
3534 rtx src_mem;
3536 if (!targetm.have_movstr ())
3537 return NULL_RTX;
3539 dest_mem = get_memory_rtx (dest, NULL);
3540 src_mem = get_memory_rtx (src, NULL);
3541 if (!endp)
3543 target = force_reg (Pmode, XEXP (dest_mem, 0));
3544 dest_mem = replace_equiv_address (dest_mem, target);
3547 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3548 create_fixed_operand (&ops[1], dest_mem);
3549 create_fixed_operand (&ops[2], src_mem);
3550 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3551 return NULL_RTX;
3553 if (endp && target != const0_rtx)
3555 target = ops[0].value;
3556 /* movstr is supposed to set end to the address of the NUL
3557 terminator. If the caller requested a mempcpy-like return value,
3558 adjust it. */
3559 if (endp == 1)
3561 rtx tem = plus_constant (GET_MODE (target),
3562 gen_lowpart (GET_MODE (target), target), 1);
3563 emit_move_insn (target, force_operand (tem, NULL_RTX));
3566 return target;
3569 /* Do some very basic size validation of a call to the strcpy builtin
3570 given by EXP. Return NULL_RTX to have the built-in expand to a call
3571 to the library function. */
3573 static rtx
3574 expand_builtin_strcat (tree exp, rtx)
3576 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3577 || !warn_stringop_overflow)
3578 return NULL_RTX;
3580 tree dest = CALL_EXPR_ARG (exp, 0);
3581 tree src = CALL_EXPR_ARG (exp, 1);
3583 /* There is no way here to determine the length of the string in
3584 the destination to which the SRC string is being appended so
3585 just diagnose cases when the souce string is longer than
3586 the destination object. */
3588 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3590 check_sizes (OPT_Wstringop_overflow_,
3591 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3593 return NULL_RTX;
3596 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3597 NULL_RTX if we failed the caller should emit a normal call, otherwise
3598 try to get the result in TARGET, if convenient (and in mode MODE if that's
3599 convenient). */
3601 static rtx
3602 expand_builtin_strcpy (tree exp, rtx target)
3604 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3605 return NULL_RTX;
3607 tree dest = CALL_EXPR_ARG (exp, 0);
3608 tree src = CALL_EXPR_ARG (exp, 1);
3610 if (warn_stringop_overflow)
3612 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3613 check_sizes (OPT_Wstringop_overflow_,
3614 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3617 return expand_builtin_strcpy_args (dest, src, target);
3620 /* Helper function to do the actual work for expand_builtin_strcpy. The
3621 arguments to the builtin_strcpy call DEST and SRC are broken out
3622 so that this can also be called without constructing an actual CALL_EXPR.
3623 The other arguments and return value are the same as for
3624 expand_builtin_strcpy. */
3626 static rtx
3627 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3629 return expand_movstr (dest, src, target, /*endp=*/0);
3632 /* Expand a call EXP to the stpcpy builtin.
3633 Return NULL_RTX if we failed the caller should emit a normal call,
3634 otherwise try to get the result in TARGET, if convenient (and in
3635 mode MODE if that's convenient). */
3637 static rtx
3638 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3640 tree dst, src;
3641 location_t loc = EXPR_LOCATION (exp);
3643 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3644 return NULL_RTX;
3646 dst = CALL_EXPR_ARG (exp, 0);
3647 src = CALL_EXPR_ARG (exp, 1);
3649 if (warn_stringop_overflow)
3651 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
3652 check_sizes (OPT_Wstringop_overflow_,
3653 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3656 /* If return value is ignored, transform stpcpy into strcpy. */
3657 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3659 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3660 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3661 return expand_expr (result, target, mode, EXPAND_NORMAL);
3663 else
3665 tree len, lenp1;
3666 rtx ret;
3668 /* Ensure we get an actual string whose length can be evaluated at
3669 compile-time, not an expression containing a string. This is
3670 because the latter will potentially produce pessimized code
3671 when used to produce the return value. */
3672 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3673 return expand_movstr (dst, src, target, /*endp=*/2);
3675 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3676 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3677 target, exp, /*endp=*/2);
3679 if (ret)
3680 return ret;
3682 if (TREE_CODE (len) == INTEGER_CST)
3684 rtx len_rtx = expand_normal (len);
3686 if (CONST_INT_P (len_rtx))
3688 ret = expand_builtin_strcpy_args (dst, src, target);
3690 if (ret)
3692 if (! target)
3694 if (mode != VOIDmode)
3695 target = gen_reg_rtx (mode);
3696 else
3697 target = gen_reg_rtx (GET_MODE (ret));
3699 if (GET_MODE (target) != GET_MODE (ret))
3700 ret = gen_lowpart (GET_MODE (target), ret);
3702 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3703 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3704 gcc_assert (ret);
3706 return target;
3711 return expand_movstr (dst, src, target, /*endp=*/2);
3715 /* Check a call EXP to the stpncpy built-in for validity.
3716 Return NULL_RTX on both success and failure. */
3718 static rtx
3719 expand_builtin_stpncpy (tree exp, rtx)
3721 if (!validate_arglist (exp,
3722 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3723 || !warn_stringop_overflow)
3724 return NULL_RTX;
3726 /* The source and destination of the call. */
3727 tree dest = CALL_EXPR_ARG (exp, 0);
3728 tree src = CALL_EXPR_ARG (exp, 1);
3730 /* The exact number of bytes to write (not the maximum). */
3731 tree len = CALL_EXPR_ARG (exp, 2);
3733 /* The size of the destination object. */
3734 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3736 check_sizes (OPT_Wstringop_overflow_,
3737 exp, len, /*maxlen=*/NULL_TREE, src, destsize);
3739 return NULL_RTX;
3742 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3743 bytes from constant string DATA + OFFSET and return it as target
3744 constant. */
3747 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3748 scalar_int_mode mode)
3750 const char *str = (const char *) data;
3752 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3753 return const0_rtx;
3755 return c_readstr (str + offset, mode);
3758 /* Helper to check the sizes of sequences and the destination of calls
3759 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3760 success (no overflow or invalid sizes), false otherwise. */
3762 static bool
3763 check_strncat_sizes (tree exp, tree objsize)
3765 tree dest = CALL_EXPR_ARG (exp, 0);
3766 tree src = CALL_EXPR_ARG (exp, 1);
3767 tree maxlen = CALL_EXPR_ARG (exp, 2);
3769 /* Try to determine the range of lengths that the source expression
3770 refers to. */
3771 tree lenrange[2];
3772 get_range_strlen (src, lenrange);
3774 /* Try to verify that the destination is big enough for the shortest
3775 string. */
3777 if (!objsize && warn_stringop_overflow)
3779 /* If it hasn't been provided by __strncat_chk, try to determine
3780 the size of the destination object into which the source is
3781 being copied. */
3782 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
3785 /* Add one for the terminating nul. */
3786 tree srclen = (lenrange[0]
3787 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3788 size_one_node)
3789 : NULL_TREE);
3791 /* Strncat copies at most MAXLEN bytes and always appends the terminating
3792 nul so the specified upper bound should never be equal to (or greater
3793 than) the size of the destination. */
3794 if (tree_fits_uhwi_p (maxlen) && tree_fits_uhwi_p (objsize)
3795 && tree_int_cst_equal (objsize, maxlen))
3797 location_t loc = tree_nonartificial_location (exp);
3798 loc = expansion_point_location_if_in_system_header (loc);
3800 warning_at (loc, OPT_Wstringop_overflow_,
3801 "%K%qD specified bound %E equals destination size",
3802 exp, get_callee_fndecl (exp), maxlen);
3804 return false;
3807 if (!srclen
3808 || (maxlen && tree_fits_uhwi_p (maxlen)
3809 && tree_fits_uhwi_p (srclen)
3810 && tree_int_cst_lt (maxlen, srclen)))
3811 srclen = maxlen;
3813 /* The number of bytes to write is LEN but check_sizes will also
3814 check SRCLEN if LEN's value isn't known. */
3815 return check_sizes (OPT_Wstringop_overflow_,
3816 exp, /*size=*/NULL_TREE, maxlen, srclen, objsize);
3819 /* Similar to expand_builtin_strcat, do some very basic size validation
3820 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
3821 the built-in expand to a call to the library function. */
3823 static rtx
3824 expand_builtin_strncat (tree exp, rtx)
3826 if (!validate_arglist (exp,
3827 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3828 || !warn_stringop_overflow)
3829 return NULL_RTX;
3831 tree dest = CALL_EXPR_ARG (exp, 0);
3832 tree src = CALL_EXPR_ARG (exp, 1);
3833 /* The upper bound on the number of bytes to write. */
3834 tree maxlen = CALL_EXPR_ARG (exp, 2);
3835 /* The length of the source sequence. */
3836 tree slen = c_strlen (src, 1);
3838 /* Try to determine the range of lengths that the source expression
3839 refers to. */
3840 tree lenrange[2];
3841 if (slen)
3842 lenrange[0] = lenrange[1] = slen;
3843 else
3844 get_range_strlen (src, lenrange);
3846 /* Try to verify that the destination is big enough for the shortest
3847 string. First try to determine the size of the destination object
3848 into which the source is being copied. */
3849 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3851 /* Add one for the terminating nul. */
3852 tree srclen = (lenrange[0]
3853 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3854 size_one_node)
3855 : NULL_TREE);
3857 /* Strncat copies at most MAXLEN bytes and always appends the terminating
3858 nul so the specified upper bound should never be equal to (or greater
3859 than) the size of the destination. */
3860 if (tree_fits_uhwi_p (maxlen) && tree_fits_uhwi_p (destsize)
3861 && tree_int_cst_equal (destsize, maxlen))
3863 location_t loc = tree_nonartificial_location (exp);
3864 loc = expansion_point_location_if_in_system_header (loc);
3866 warning_at (loc, OPT_Wstringop_overflow_,
3867 "%K%qD specified bound %E equals destination size",
3868 exp, get_callee_fndecl (exp), maxlen);
3870 return NULL_RTX;
3873 if (!srclen
3874 || (maxlen && tree_fits_uhwi_p (maxlen)
3875 && tree_fits_uhwi_p (srclen)
3876 && tree_int_cst_lt (maxlen, srclen)))
3877 srclen = maxlen;
3879 /* The number of bytes to write is LEN but check_sizes will also
3880 check SRCLEN if LEN's value isn't known. */
3881 check_sizes (OPT_Wstringop_overflow_,
3882 exp, /*size=*/NULL_TREE, maxlen, srclen, destsize);
3884 return NULL_RTX;
3887 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3888 NULL_RTX if we failed the caller should emit a normal call. */
3890 static rtx
3891 expand_builtin_strncpy (tree exp, rtx target)
3893 location_t loc = EXPR_LOCATION (exp);
3895 if (validate_arglist (exp,
3896 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3898 tree dest = CALL_EXPR_ARG (exp, 0);
3899 tree src = CALL_EXPR_ARG (exp, 1);
3900 /* The number of bytes to write (not the maximum). */
3901 tree len = CALL_EXPR_ARG (exp, 2);
3902 /* The length of the source sequence. */
3903 tree slen = c_strlen (src, 1);
3905 if (warn_stringop_overflow)
3907 tree destsize = compute_objsize (dest,
3908 warn_stringop_overflow - 1);
3910 /* The number of bytes to write is LEN but check_sizes will also
3911 check SLEN if LEN's value isn't known. */
3912 check_sizes (OPT_Wstringop_overflow_,
3913 exp, len, /*maxlen=*/NULL_TREE, src, destsize);
3916 /* We must be passed a constant len and src parameter. */
3917 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3918 return NULL_RTX;
3920 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3922 /* We're required to pad with trailing zeros if the requested
3923 len is greater than strlen(s2)+1. In that case try to
3924 use store_by_pieces, if it fails, punt. */
3925 if (tree_int_cst_lt (slen, len))
3927 unsigned int dest_align = get_pointer_alignment (dest);
3928 const char *p = c_getstr (src);
3929 rtx dest_mem;
3931 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3932 || !can_store_by_pieces (tree_to_uhwi (len),
3933 builtin_strncpy_read_str,
3934 CONST_CAST (char *, p),
3935 dest_align, false))
3936 return NULL_RTX;
3938 dest_mem = get_memory_rtx (dest, len);
3939 store_by_pieces (dest_mem, tree_to_uhwi (len),
3940 builtin_strncpy_read_str,
3941 CONST_CAST (char *, p), dest_align, false, 0);
3942 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3943 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3944 return dest_mem;
3947 return NULL_RTX;
3950 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3951 bytes from constant string DATA + OFFSET and return it as target
3952 constant. */
3955 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3956 scalar_int_mode mode)
3958 const char *c = (const char *) data;
3959 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3961 memset (p, *c, GET_MODE_SIZE (mode));
3963 return c_readstr (p, mode);
3966 /* Callback routine for store_by_pieces. Return the RTL of a register
3967 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3968 char value given in the RTL register data. For example, if mode is
3969 4 bytes wide, return the RTL for 0x01010101*data. */
3971 static rtx
3972 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3973 scalar_int_mode mode)
3975 rtx target, coeff;
3976 size_t size;
3977 char *p;
3979 size = GET_MODE_SIZE (mode);
3980 if (size == 1)
3981 return (rtx) data;
3983 p = XALLOCAVEC (char, size);
3984 memset (p, 1, size);
3985 coeff = c_readstr (p, mode);
3987 target = convert_to_mode (mode, (rtx) data, 1);
3988 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3989 return force_reg (mode, target);
3992 /* Expand expression EXP, which is a call to the memset builtin. Return
3993 NULL_RTX if we failed the caller should emit a normal call, otherwise
3994 try to get the result in TARGET, if convenient (and in mode MODE if that's
3995 convenient). */
3997 static rtx
3998 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4000 if (!validate_arglist (exp,
4001 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4002 return NULL_RTX;
4004 tree dest = CALL_EXPR_ARG (exp, 0);
4005 tree val = CALL_EXPR_ARG (exp, 1);
4006 tree len = CALL_EXPR_ARG (exp, 2);
4008 check_memop_sizes (exp, dest, NULL_TREE, len);
4010 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4013 /* Expand expression EXP, which is an instrumented call to the memset builtin.
4014 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
4015 try to get the result in TARGET, if convenient (and in mode MODE if that's
4016 convenient). */
4018 static rtx
4019 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
4021 if (!validate_arglist (exp,
4022 POINTER_TYPE, POINTER_BOUNDS_TYPE,
4023 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4024 return NULL_RTX;
4025 else
4027 tree dest = CALL_EXPR_ARG (exp, 0);
4028 tree val = CALL_EXPR_ARG (exp, 2);
4029 tree len = CALL_EXPR_ARG (exp, 3);
4030 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
4032 /* Return src bounds with the result. */
4033 if (res)
4035 rtx bnd = force_reg (targetm.chkp_bound_mode (),
4036 expand_normal (CALL_EXPR_ARG (exp, 1)));
4037 res = chkp_join_splitted_slot (res, bnd);
4039 return res;
4043 /* Helper function to do the actual work for expand_builtin_memset. The
4044 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4045 so that this can also be called without constructing an actual CALL_EXPR.
4046 The other arguments and return value are the same as for
4047 expand_builtin_memset. */
4049 static rtx
4050 expand_builtin_memset_args (tree dest, tree val, tree len,
4051 rtx target, machine_mode mode, tree orig_exp)
4053 tree fndecl, fn;
4054 enum built_in_function fcode;
4055 machine_mode val_mode;
4056 char c;
4057 unsigned int dest_align;
4058 rtx dest_mem, dest_addr, len_rtx;
4059 HOST_WIDE_INT expected_size = -1;
4060 unsigned int expected_align = 0;
4061 unsigned HOST_WIDE_INT min_size;
4062 unsigned HOST_WIDE_INT max_size;
4063 unsigned HOST_WIDE_INT probable_max_size;
4065 dest_align = get_pointer_alignment (dest);
4067 /* If DEST is not a pointer type, don't do this operation in-line. */
4068 if (dest_align == 0)
4069 return NULL_RTX;
4071 if (currently_expanding_gimple_stmt)
4072 stringop_block_profile (currently_expanding_gimple_stmt,
4073 &expected_align, &expected_size);
4075 if (expected_align < dest_align)
4076 expected_align = dest_align;
4078 /* If the LEN parameter is zero, return DEST. */
4079 if (integer_zerop (len))
4081 /* Evaluate and ignore VAL in case it has side-effects. */
4082 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4083 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4086 /* Stabilize the arguments in case we fail. */
4087 dest = builtin_save_expr (dest);
4088 val = builtin_save_expr (val);
4089 len = builtin_save_expr (len);
4091 len_rtx = expand_normal (len);
4092 determine_block_size (len, len_rtx, &min_size, &max_size,
4093 &probable_max_size);
4094 dest_mem = get_memory_rtx (dest, len);
4095 val_mode = TYPE_MODE (unsigned_char_type_node);
4097 if (TREE_CODE (val) != INTEGER_CST)
4099 rtx val_rtx;
4101 val_rtx = expand_normal (val);
4102 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4104 /* Assume that we can memset by pieces if we can store
4105 * the coefficients by pieces (in the required modes).
4106 * We can't pass builtin_memset_gen_str as that emits RTL. */
4107 c = 1;
4108 if (tree_fits_uhwi_p (len)
4109 && can_store_by_pieces (tree_to_uhwi (len),
4110 builtin_memset_read_str, &c, dest_align,
4111 true))
4113 val_rtx = force_reg (val_mode, val_rtx);
4114 store_by_pieces (dest_mem, tree_to_uhwi (len),
4115 builtin_memset_gen_str, val_rtx, dest_align,
4116 true, 0);
4118 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4119 dest_align, expected_align,
4120 expected_size, min_size, max_size,
4121 probable_max_size))
4122 goto do_libcall;
4124 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4125 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4126 return dest_mem;
4129 if (target_char_cast (val, &c))
4130 goto do_libcall;
4132 if (c)
4134 if (tree_fits_uhwi_p (len)
4135 && can_store_by_pieces (tree_to_uhwi (len),
4136 builtin_memset_read_str, &c, dest_align,
4137 true))
4138 store_by_pieces (dest_mem, tree_to_uhwi (len),
4139 builtin_memset_read_str, &c, dest_align, true, 0);
4140 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4141 gen_int_mode (c, val_mode),
4142 dest_align, expected_align,
4143 expected_size, min_size, max_size,
4144 probable_max_size))
4145 goto do_libcall;
4147 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4148 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4149 return dest_mem;
4152 set_mem_align (dest_mem, dest_align);
4153 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4154 CALL_EXPR_TAILCALL (orig_exp)
4155 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4156 expected_align, expected_size,
4157 min_size, max_size,
4158 probable_max_size);
4160 if (dest_addr == 0)
4162 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4163 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4166 return dest_addr;
4168 do_libcall:
4169 fndecl = get_callee_fndecl (orig_exp);
4170 fcode = DECL_FUNCTION_CODE (fndecl);
4171 if (fcode == BUILT_IN_MEMSET
4172 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
4173 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4174 dest, val, len);
4175 else if (fcode == BUILT_IN_BZERO)
4176 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4177 dest, len);
4178 else
4179 gcc_unreachable ();
4180 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4181 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4182 return expand_call (fn, target, target == const0_rtx);
4185 /* Expand expression EXP, which is a call to the bzero builtin. Return
4186 NULL_RTX if we failed the caller should emit a normal call. */
4188 static rtx
4189 expand_builtin_bzero (tree exp)
4191 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4192 return NULL_RTX;
4194 tree dest = CALL_EXPR_ARG (exp, 0);
4195 tree size = CALL_EXPR_ARG (exp, 1);
4197 check_memop_sizes (exp, dest, NULL_TREE, size);
4199 /* New argument list transforming bzero(ptr x, int y) to
4200 memset(ptr x, int 0, size_t y). This is done this way
4201 so that if it isn't expanded inline, we fallback to
4202 calling bzero instead of memset. */
4204 location_t loc = EXPR_LOCATION (exp);
4206 return expand_builtin_memset_args (dest, integer_zero_node,
4207 fold_convert_loc (loc,
4208 size_type_node, size),
4209 const0_rtx, VOIDmode, exp);
4212 /* Try to expand cmpstr operation ICODE with the given operands.
4213 Return the result rtx on success, otherwise return null. */
4215 static rtx
4216 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4217 HOST_WIDE_INT align)
4219 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4221 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4222 target = NULL_RTX;
4224 struct expand_operand ops[4];
4225 create_output_operand (&ops[0], target, insn_mode);
4226 create_fixed_operand (&ops[1], arg1_rtx);
4227 create_fixed_operand (&ops[2], arg2_rtx);
4228 create_integer_operand (&ops[3], align);
4229 if (maybe_expand_insn (icode, 4, ops))
4230 return ops[0].value;
4231 return NULL_RTX;
4234 /* Expand expression EXP, which is a call to the memcmp built-in function.
4235 Return NULL_RTX if we failed and the caller should emit a normal call,
4236 otherwise try to get the result in TARGET, if convenient.
4237 RESULT_EQ is true if we can relax the returned value to be either zero
4238 or nonzero, without caring about the sign. */
4240 static rtx
4241 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4243 if (!validate_arglist (exp,
4244 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4245 return NULL_RTX;
4247 tree arg1 = CALL_EXPR_ARG (exp, 0);
4248 tree arg2 = CALL_EXPR_ARG (exp, 1);
4249 tree len = CALL_EXPR_ARG (exp, 2);
4251 /* Diagnose calls where the specified length exceeds the size of either
4252 object. */
4253 if (warn_stringop_overflow)
4255 tree size = compute_objsize (arg1, 0);
4256 if (check_sizes (OPT_Wstringop_overflow_,
4257 exp, len, /*maxlen=*/NULL_TREE,
4258 size, /*objsize=*/NULL_TREE))
4260 size = compute_objsize (arg2, 0);
4261 check_sizes (OPT_Wstringop_overflow_,
4262 exp, len, /*maxlen=*/NULL_TREE,
4263 size, /*objsize=*/NULL_TREE);
4267 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4268 location_t loc = EXPR_LOCATION (exp);
4270 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4271 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4273 /* If we don't have POINTER_TYPE, call the function. */
4274 if (arg1_align == 0 || arg2_align == 0)
4275 return NULL_RTX;
4277 rtx arg1_rtx = get_memory_rtx (arg1, len);
4278 rtx arg2_rtx = get_memory_rtx (arg2, len);
4279 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4281 /* Set MEM_SIZE as appropriate. */
4282 if (CONST_INT_P (len_rtx))
4284 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4285 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4288 by_pieces_constfn constfn = NULL;
4290 const char *src_str = c_getstr (arg2);
4291 if (result_eq && src_str == NULL)
4293 src_str = c_getstr (arg1);
4294 if (src_str != NULL)
4295 std::swap (arg1_rtx, arg2_rtx);
4298 /* If SRC is a string constant and block move would be done
4299 by pieces, we can avoid loading the string from memory
4300 and only stored the computed constants. */
4301 if (src_str
4302 && CONST_INT_P (len_rtx)
4303 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4304 constfn = builtin_memcpy_read_str;
4306 rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4307 TREE_TYPE (len), target,
4308 result_eq, constfn,
4309 CONST_CAST (char *, src_str));
4311 if (result)
4313 /* Return the value in the proper mode for this function. */
4314 if (GET_MODE (result) == mode)
4315 return result;
4317 if (target != 0)
4319 convert_move (target, result, 0);
4320 return target;
4323 return convert_to_mode (mode, result, 0);
4326 return NULL_RTX;
4329 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4330 if we failed the caller should emit a normal call, otherwise try to get
4331 the result in TARGET, if convenient. */
4333 static rtx
4334 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4336 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4337 return NULL_RTX;
4339 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4340 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4341 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
4343 rtx arg1_rtx, arg2_rtx;
4344 tree fndecl, fn;
4345 tree arg1 = CALL_EXPR_ARG (exp, 0);
4346 tree arg2 = CALL_EXPR_ARG (exp, 1);
4347 rtx result = NULL_RTX;
4349 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4350 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4352 /* If we don't have POINTER_TYPE, call the function. */
4353 if (arg1_align == 0 || arg2_align == 0)
4354 return NULL_RTX;
4356 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4357 arg1 = builtin_save_expr (arg1);
4358 arg2 = builtin_save_expr (arg2);
4360 arg1_rtx = get_memory_rtx (arg1, NULL);
4361 arg2_rtx = get_memory_rtx (arg2, NULL);
4363 /* Try to call cmpstrsi. */
4364 if (cmpstr_icode != CODE_FOR_nothing)
4365 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4366 MIN (arg1_align, arg2_align));
4368 /* Try to determine at least one length and call cmpstrnsi. */
4369 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4371 tree len;
4372 rtx arg3_rtx;
4374 tree len1 = c_strlen (arg1, 1);
4375 tree len2 = c_strlen (arg2, 1);
4377 if (len1)
4378 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4379 if (len2)
4380 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4382 /* If we don't have a constant length for the first, use the length
4383 of the second, if we know it. We don't require a constant for
4384 this case; some cost analysis could be done if both are available
4385 but neither is constant. For now, assume they're equally cheap,
4386 unless one has side effects. If both strings have constant lengths,
4387 use the smaller. */
4389 if (!len1)
4390 len = len2;
4391 else if (!len2)
4392 len = len1;
4393 else if (TREE_SIDE_EFFECTS (len1))
4394 len = len2;
4395 else if (TREE_SIDE_EFFECTS (len2))
4396 len = len1;
4397 else if (TREE_CODE (len1) != INTEGER_CST)
4398 len = len2;
4399 else if (TREE_CODE (len2) != INTEGER_CST)
4400 len = len1;
4401 else if (tree_int_cst_lt (len1, len2))
4402 len = len1;
4403 else
4404 len = len2;
4406 /* If both arguments have side effects, we cannot optimize. */
4407 if (len && !TREE_SIDE_EFFECTS (len))
4409 arg3_rtx = expand_normal (len);
4410 result = expand_cmpstrn_or_cmpmem
4411 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4412 arg3_rtx, MIN (arg1_align, arg2_align));
4416 if (result)
4418 /* Return the value in the proper mode for this function. */
4419 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4420 if (GET_MODE (result) == mode)
4421 return result;
4422 if (target == 0)
4423 return convert_to_mode (mode, result, 0);
4424 convert_move (target, result, 0);
4425 return target;
4428 /* Expand the library call ourselves using a stabilized argument
4429 list to avoid re-evaluating the function's arguments twice. */
4430 fndecl = get_callee_fndecl (exp);
4431 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4432 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4433 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4434 return expand_call (fn, target, target == const0_rtx);
4436 return NULL_RTX;
4439 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4440 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4441 the result in TARGET, if convenient. */
4443 static rtx
4444 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4445 ATTRIBUTE_UNUSED machine_mode mode)
4447 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4449 if (!validate_arglist (exp,
4450 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4451 return NULL_RTX;
4453 /* If c_strlen can determine an expression for one of the string
4454 lengths, and it doesn't have side effects, then emit cmpstrnsi
4455 using length MIN(strlen(string)+1, arg3). */
4456 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4457 if (cmpstrn_icode != CODE_FOR_nothing)
4459 tree len, len1, len2, len3;
4460 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4461 rtx result;
4462 tree fndecl, fn;
4463 tree arg1 = CALL_EXPR_ARG (exp, 0);
4464 tree arg2 = CALL_EXPR_ARG (exp, 1);
4465 tree arg3 = CALL_EXPR_ARG (exp, 2);
4467 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4468 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4470 len1 = c_strlen (arg1, 1);
4471 len2 = c_strlen (arg2, 1);
4473 if (len1)
4474 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4475 if (len2)
4476 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4478 len3 = fold_convert_loc (loc, sizetype, arg3);
4480 /* If we don't have a constant length for the first, use the length
4481 of the second, if we know it. If neither string is constant length,
4482 use the given length argument. We don't require a constant for
4483 this case; some cost analysis could be done if both are available
4484 but neither is constant. For now, assume they're equally cheap,
4485 unless one has side effects. If both strings have constant lengths,
4486 use the smaller. */
4488 if (!len1 && !len2)
4489 len = len3;
4490 else if (!len1)
4491 len = len2;
4492 else if (!len2)
4493 len = len1;
4494 else if (TREE_SIDE_EFFECTS (len1))
4495 len = len2;
4496 else if (TREE_SIDE_EFFECTS (len2))
4497 len = len1;
4498 else if (TREE_CODE (len1) != INTEGER_CST)
4499 len = len2;
4500 else if (TREE_CODE (len2) != INTEGER_CST)
4501 len = len1;
4502 else if (tree_int_cst_lt (len1, len2))
4503 len = len1;
4504 else
4505 len = len2;
4507 /* If we are not using the given length, we must incorporate it here.
4508 The actual new length parameter will be MIN(len,arg3) in this case. */
4509 if (len != len3)
4510 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4511 arg1_rtx = get_memory_rtx (arg1, len);
4512 arg2_rtx = get_memory_rtx (arg2, len);
4513 arg3_rtx = expand_normal (len);
4514 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4515 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4516 MIN (arg1_align, arg2_align));
4517 if (result)
4519 /* Return the value in the proper mode for this function. */
4520 mode = TYPE_MODE (TREE_TYPE (exp));
4521 if (GET_MODE (result) == mode)
4522 return result;
4523 if (target == 0)
4524 return convert_to_mode (mode, result, 0);
4525 convert_move (target, result, 0);
4526 return target;
4529 /* Expand the library call ourselves using a stabilized argument
4530 list to avoid re-evaluating the function's arguments twice. */
4531 fndecl = get_callee_fndecl (exp);
4532 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4533 arg1, arg2, len);
4534 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4535 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4536 return expand_call (fn, target, target == const0_rtx);
4538 return NULL_RTX;
4541 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4542 if that's convenient. */
4545 expand_builtin_saveregs (void)
4547 rtx val;
4548 rtx_insn *seq;
4550 /* Don't do __builtin_saveregs more than once in a function.
4551 Save the result of the first call and reuse it. */
4552 if (saveregs_value != 0)
4553 return saveregs_value;
4555 /* When this function is called, it means that registers must be
4556 saved on entry to this function. So we migrate the call to the
4557 first insn of this function. */
4559 start_sequence ();
4561 /* Do whatever the machine needs done in this case. */
4562 val = targetm.calls.expand_builtin_saveregs ();
4564 seq = get_insns ();
4565 end_sequence ();
4567 saveregs_value = val;
4569 /* Put the insns after the NOTE that starts the function. If this
4570 is inside a start_sequence, make the outer-level insn chain current, so
4571 the code is placed at the start of the function. */
4572 push_topmost_sequence ();
4573 emit_insn_after (seq, entry_of_function ());
4574 pop_topmost_sequence ();
4576 return val;
4579 /* Expand a call to __builtin_next_arg. */
4581 static rtx
4582 expand_builtin_next_arg (void)
4584 /* Checking arguments is already done in fold_builtin_next_arg
4585 that must be called before this function. */
4586 return expand_binop (ptr_mode, add_optab,
4587 crtl->args.internal_arg_pointer,
4588 crtl->args.arg_offset_rtx,
4589 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4592 /* Make it easier for the backends by protecting the valist argument
4593 from multiple evaluations. */
4595 static tree
4596 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4598 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4600 /* The current way of determining the type of valist is completely
4601 bogus. We should have the information on the va builtin instead. */
4602 if (!vatype)
4603 vatype = targetm.fn_abi_va_list (cfun->decl);
4605 if (TREE_CODE (vatype) == ARRAY_TYPE)
4607 if (TREE_SIDE_EFFECTS (valist))
4608 valist = save_expr (valist);
4610 /* For this case, the backends will be expecting a pointer to
4611 vatype, but it's possible we've actually been given an array
4612 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4613 So fix it. */
4614 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4616 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4617 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4620 else
4622 tree pt = build_pointer_type (vatype);
4624 if (! needs_lvalue)
4626 if (! TREE_SIDE_EFFECTS (valist))
4627 return valist;
4629 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4630 TREE_SIDE_EFFECTS (valist) = 1;
4633 if (TREE_SIDE_EFFECTS (valist))
4634 valist = save_expr (valist);
4635 valist = fold_build2_loc (loc, MEM_REF,
4636 vatype, valist, build_int_cst (pt, 0));
4639 return valist;
4642 /* The "standard" definition of va_list is void*. */
4644 tree
4645 std_build_builtin_va_list (void)
4647 return ptr_type_node;
4650 /* The "standard" abi va_list is va_list_type_node. */
4652 tree
4653 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4655 return va_list_type_node;
4658 /* The "standard" type of va_list is va_list_type_node. */
4660 tree
4661 std_canonical_va_list_type (tree type)
4663 tree wtype, htype;
4665 wtype = va_list_type_node;
4666 htype = type;
4668 if (TREE_CODE (wtype) == ARRAY_TYPE)
4670 /* If va_list is an array type, the argument may have decayed
4671 to a pointer type, e.g. by being passed to another function.
4672 In that case, unwrap both types so that we can compare the
4673 underlying records. */
4674 if (TREE_CODE (htype) == ARRAY_TYPE
4675 || POINTER_TYPE_P (htype))
4677 wtype = TREE_TYPE (wtype);
4678 htype = TREE_TYPE (htype);
4681 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4682 return va_list_type_node;
4684 return NULL_TREE;
4687 /* The "standard" implementation of va_start: just assign `nextarg' to
4688 the variable. */
4690 void
4691 std_expand_builtin_va_start (tree valist, rtx nextarg)
4693 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4694 convert_move (va_r, nextarg, 0);
4696 /* We do not have any valid bounds for the pointer, so
4697 just store zero bounds for it. */
4698 if (chkp_function_instrumented_p (current_function_decl))
4699 chkp_expand_bounds_reset_for_mem (valist,
4700 make_tree (TREE_TYPE (valist),
4701 nextarg));
4704 /* Expand EXP, a call to __builtin_va_start. */
4706 static rtx
4707 expand_builtin_va_start (tree exp)
4709 rtx nextarg;
4710 tree valist;
4711 location_t loc = EXPR_LOCATION (exp);
4713 if (call_expr_nargs (exp) < 2)
4715 error_at (loc, "too few arguments to function %<va_start%>");
4716 return const0_rtx;
4719 if (fold_builtin_next_arg (exp, true))
4720 return const0_rtx;
4722 nextarg = expand_builtin_next_arg ();
4723 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4725 if (targetm.expand_builtin_va_start)
4726 targetm.expand_builtin_va_start (valist, nextarg);
4727 else
4728 std_expand_builtin_va_start (valist, nextarg);
4730 return const0_rtx;
4733 /* Expand EXP, a call to __builtin_va_end. */
4735 static rtx
4736 expand_builtin_va_end (tree exp)
4738 tree valist = CALL_EXPR_ARG (exp, 0);
4740 /* Evaluate for side effects, if needed. I hate macros that don't
4741 do that. */
4742 if (TREE_SIDE_EFFECTS (valist))
4743 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4745 return const0_rtx;
4748 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4749 builtin rather than just as an assignment in stdarg.h because of the
4750 nastiness of array-type va_list types. */
4752 static rtx
4753 expand_builtin_va_copy (tree exp)
4755 tree dst, src, t;
4756 location_t loc = EXPR_LOCATION (exp);
4758 dst = CALL_EXPR_ARG (exp, 0);
4759 src = CALL_EXPR_ARG (exp, 1);
4761 dst = stabilize_va_list_loc (loc, dst, 1);
4762 src = stabilize_va_list_loc (loc, src, 0);
4764 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4766 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4768 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4769 TREE_SIDE_EFFECTS (t) = 1;
4770 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4772 else
4774 rtx dstb, srcb, size;
4776 /* Evaluate to pointers. */
4777 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4778 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4779 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4780 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4782 dstb = convert_memory_address (Pmode, dstb);
4783 srcb = convert_memory_address (Pmode, srcb);
4785 /* "Dereference" to BLKmode memories. */
4786 dstb = gen_rtx_MEM (BLKmode, dstb);
4787 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4788 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4789 srcb = gen_rtx_MEM (BLKmode, srcb);
4790 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4791 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4793 /* Copy. */
4794 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4797 return const0_rtx;
4800 /* Expand a call to one of the builtin functions __builtin_frame_address or
4801 __builtin_return_address. */
4803 static rtx
4804 expand_builtin_frame_address (tree fndecl, tree exp)
4806 /* The argument must be a nonnegative integer constant.
4807 It counts the number of frames to scan up the stack.
4808 The value is either the frame pointer value or the return
4809 address saved in that frame. */
4810 if (call_expr_nargs (exp) == 0)
4811 /* Warning about missing arg was already issued. */
4812 return const0_rtx;
4813 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4815 error ("invalid argument to %qD", fndecl);
4816 return const0_rtx;
4818 else
4820 /* Number of frames to scan up the stack. */
4821 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4823 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4825 /* Some ports cannot access arbitrary stack frames. */
4826 if (tem == NULL)
4828 warning (0, "unsupported argument to %qD", fndecl);
4829 return const0_rtx;
4832 if (count)
4834 /* Warn since no effort is made to ensure that any frame
4835 beyond the current one exists or can be safely reached. */
4836 warning (OPT_Wframe_address, "calling %qD with "
4837 "a nonzero argument is unsafe", fndecl);
4840 /* For __builtin_frame_address, return what we've got. */
4841 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4842 return tem;
4844 if (!REG_P (tem)
4845 && ! CONSTANT_P (tem))
4846 tem = copy_addr_to_reg (tem);
4847 return tem;
4851 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4852 failed and the caller should emit a normal call. */
4854 static rtx
4855 expand_builtin_alloca (tree exp)
4857 rtx op0;
4858 rtx result;
4859 unsigned int align;
4860 tree fndecl = get_callee_fndecl (exp);
4861 bool alloca_with_align = (DECL_FUNCTION_CODE (fndecl)
4862 == BUILT_IN_ALLOCA_WITH_ALIGN);
4863 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
4864 bool valid_arglist
4865 = (alloca_with_align
4866 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4867 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4869 if (!valid_arglist)
4870 return NULL_RTX;
4872 if ((alloca_with_align && !warn_vla_limit)
4873 || (!alloca_with_align && !warn_alloca_limit))
4875 /* -Walloca-larger-than and -Wvla-larger-than settings override
4876 the more general -Walloc-size-larger-than so unless either of
4877 the former options is specified check the alloca arguments for
4878 overflow. */
4879 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
4880 int idx[] = { 0, -1 };
4881 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
4884 /* Compute the argument. */
4885 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4887 /* Compute the alignment. */
4888 align = (alloca_with_align
4889 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4890 : BIGGEST_ALIGNMENT);
4892 /* Allocate the desired space. If the allocation stems from the declaration
4893 of a variable-sized object, it cannot accumulate. */
4894 result = allocate_dynamic_stack_space (op0, 0, align, alloca_for_var);
4895 result = convert_memory_address (ptr_mode, result);
4897 return result;
4900 /* Emit a call to __asan_allocas_unpoison call in EXP. Replace second argument
4901 of the call with virtual_stack_dynamic_rtx because in asan pass we emit a
4902 dummy value into second parameter relying on this function to perform the
4903 change. See motivation for this in comment to handle_builtin_stack_restore
4904 function. */
4906 static rtx
4907 expand_asan_emit_allocas_unpoison (tree exp)
4909 tree arg0 = CALL_EXPR_ARG (exp, 0);
4910 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
4911 rtx bot = convert_memory_address (ptr_mode, virtual_stack_dynamic_rtx);
4912 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
4913 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
4914 top, ptr_mode, bot, ptr_mode);
4915 return ret;
4918 /* Expand a call to bswap builtin in EXP.
4919 Return NULL_RTX if a normal call should be emitted rather than expanding the
4920 function in-line. If convenient, the result should be placed in TARGET.
4921 SUBTARGET may be used as the target for computing one of EXP's operands. */
4923 static rtx
4924 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4925 rtx subtarget)
4927 tree arg;
4928 rtx op0;
4930 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4931 return NULL_RTX;
4933 arg = CALL_EXPR_ARG (exp, 0);
4934 op0 = expand_expr (arg,
4935 subtarget && GET_MODE (subtarget) == target_mode
4936 ? subtarget : NULL_RTX,
4937 target_mode, EXPAND_NORMAL);
4938 if (GET_MODE (op0) != target_mode)
4939 op0 = convert_to_mode (target_mode, op0, 1);
4941 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4943 gcc_assert (target);
4945 return convert_to_mode (target_mode, target, 1);
4948 /* Expand a call to a unary builtin in EXP.
4949 Return NULL_RTX if a normal call should be emitted rather than expanding the
4950 function in-line. If convenient, the result should be placed in TARGET.
4951 SUBTARGET may be used as the target for computing one of EXP's operands. */
4953 static rtx
4954 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4955 rtx subtarget, optab op_optab)
4957 rtx op0;
4959 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4960 return NULL_RTX;
4962 /* Compute the argument. */
4963 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4964 (subtarget
4965 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4966 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4967 VOIDmode, EXPAND_NORMAL);
4968 /* Compute op, into TARGET if possible.
4969 Set TARGET to wherever the result comes back. */
4970 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4971 op_optab, op0, target, op_optab != clrsb_optab);
4972 gcc_assert (target);
4974 return convert_to_mode (target_mode, target, 0);
4977 /* Expand a call to __builtin_expect. We just return our argument
4978 as the builtin_expect semantic should've been already executed by
4979 tree branch prediction pass. */
4981 static rtx
4982 expand_builtin_expect (tree exp, rtx target)
4984 tree arg;
4986 if (call_expr_nargs (exp) < 2)
4987 return const0_rtx;
4988 arg = CALL_EXPR_ARG (exp, 0);
4990 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4991 /* When guessing was done, the hints should be already stripped away. */
4992 gcc_assert (!flag_guess_branch_prob
4993 || optimize == 0 || seen_error ());
4994 return target;
4997 /* Expand a call to __builtin_assume_aligned. We just return our first
4998 argument as the builtin_assume_aligned semantic should've been already
4999 executed by CCP. */
5001 static rtx
5002 expand_builtin_assume_aligned (tree exp, rtx target)
5004 if (call_expr_nargs (exp) < 2)
5005 return const0_rtx;
5006 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5007 EXPAND_NORMAL);
5008 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5009 && (call_expr_nargs (exp) < 3
5010 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5011 return target;
5014 void
5015 expand_builtin_trap (void)
5017 if (targetm.have_trap ())
5019 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5020 /* For trap insns when not accumulating outgoing args force
5021 REG_ARGS_SIZE note to prevent crossjumping of calls with
5022 different args sizes. */
5023 if (!ACCUMULATE_OUTGOING_ARGS)
5024 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
5026 else
5028 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5029 tree call_expr = build_call_expr (fn, 0);
5030 expand_call (call_expr, NULL_RTX, false);
5033 emit_barrier ();
5036 /* Expand a call to __builtin_unreachable. We do nothing except emit
5037 a barrier saying that control flow will not pass here.
5039 It is the responsibility of the program being compiled to ensure
5040 that control flow does never reach __builtin_unreachable. */
5041 static void
5042 expand_builtin_unreachable (void)
5044 emit_barrier ();
5047 /* Expand EXP, a call to fabs, fabsf or fabsl.
5048 Return NULL_RTX if a normal call should be emitted rather than expanding
5049 the function inline. If convenient, the result should be placed
5050 in TARGET. SUBTARGET may be used as the target for computing
5051 the operand. */
5053 static rtx
5054 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5056 machine_mode mode;
5057 tree arg;
5058 rtx op0;
5060 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5061 return NULL_RTX;
5063 arg = CALL_EXPR_ARG (exp, 0);
5064 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5065 mode = TYPE_MODE (TREE_TYPE (arg));
5066 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5067 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5070 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5071 Return NULL is a normal call should be emitted rather than expanding the
5072 function inline. If convenient, the result should be placed in TARGET.
5073 SUBTARGET may be used as the target for computing the operand. */
5075 static rtx
5076 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5078 rtx op0, op1;
5079 tree arg;
5081 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5082 return NULL_RTX;
5084 arg = CALL_EXPR_ARG (exp, 0);
5085 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5087 arg = CALL_EXPR_ARG (exp, 1);
5088 op1 = expand_normal (arg);
5090 return expand_copysign (op0, op1, target);
5093 /* Expand a call to __builtin___clear_cache. */
5095 static rtx
5096 expand_builtin___clear_cache (tree exp)
5098 if (!targetm.code_for_clear_cache)
5100 #ifdef CLEAR_INSN_CACHE
5101 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5102 does something. Just do the default expansion to a call to
5103 __clear_cache(). */
5104 return NULL_RTX;
5105 #else
5106 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5107 does nothing. There is no need to call it. Do nothing. */
5108 return const0_rtx;
5109 #endif /* CLEAR_INSN_CACHE */
5112 /* We have a "clear_cache" insn, and it will handle everything. */
5113 tree begin, end;
5114 rtx begin_rtx, end_rtx;
5116 /* We must not expand to a library call. If we did, any
5117 fallback library function in libgcc that might contain a call to
5118 __builtin___clear_cache() would recurse infinitely. */
5119 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5121 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5122 return const0_rtx;
5125 if (targetm.have_clear_cache ())
5127 struct expand_operand ops[2];
5129 begin = CALL_EXPR_ARG (exp, 0);
5130 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5132 end = CALL_EXPR_ARG (exp, 1);
5133 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5135 create_address_operand (&ops[0], begin_rtx);
5136 create_address_operand (&ops[1], end_rtx);
5137 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5138 return const0_rtx;
5140 return const0_rtx;
5143 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5145 static rtx
5146 round_trampoline_addr (rtx tramp)
5148 rtx temp, addend, mask;
5150 /* If we don't need too much alignment, we'll have been guaranteed
5151 proper alignment by get_trampoline_type. */
5152 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5153 return tramp;
5155 /* Round address up to desired boundary. */
5156 temp = gen_reg_rtx (Pmode);
5157 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5158 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5160 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5161 temp, 0, OPTAB_LIB_WIDEN);
5162 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5163 temp, 0, OPTAB_LIB_WIDEN);
5165 return tramp;
5168 static rtx
5169 expand_builtin_init_trampoline (tree exp, bool onstack)
5171 tree t_tramp, t_func, t_chain;
5172 rtx m_tramp, r_tramp, r_chain, tmp;
5174 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5175 POINTER_TYPE, VOID_TYPE))
5176 return NULL_RTX;
5178 t_tramp = CALL_EXPR_ARG (exp, 0);
5179 t_func = CALL_EXPR_ARG (exp, 1);
5180 t_chain = CALL_EXPR_ARG (exp, 2);
5182 r_tramp = expand_normal (t_tramp);
5183 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5184 MEM_NOTRAP_P (m_tramp) = 1;
5186 /* If ONSTACK, the TRAMP argument should be the address of a field
5187 within the local function's FRAME decl. Either way, let's see if
5188 we can fill in the MEM_ATTRs for this memory. */
5189 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5190 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5192 /* Creator of a heap trampoline is responsible for making sure the
5193 address is aligned to at least STACK_BOUNDARY. Normally malloc
5194 will ensure this anyhow. */
5195 tmp = round_trampoline_addr (r_tramp);
5196 if (tmp != r_tramp)
5198 m_tramp = change_address (m_tramp, BLKmode, tmp);
5199 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5200 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5203 /* The FUNC argument should be the address of the nested function.
5204 Extract the actual function decl to pass to the hook. */
5205 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5206 t_func = TREE_OPERAND (t_func, 0);
5207 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5209 r_chain = expand_normal (t_chain);
5211 /* Generate insns to initialize the trampoline. */
5212 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5214 if (onstack)
5216 trampolines_created = 1;
5218 if (targetm.calls.custom_function_descriptors != 0)
5219 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5220 "trampoline generated for nested function %qD", t_func);
5223 return const0_rtx;
5226 static rtx
5227 expand_builtin_adjust_trampoline (tree exp)
5229 rtx tramp;
5231 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5232 return NULL_RTX;
5234 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5235 tramp = round_trampoline_addr (tramp);
5236 if (targetm.calls.trampoline_adjust_address)
5237 tramp = targetm.calls.trampoline_adjust_address (tramp);
5239 return tramp;
5242 /* Expand a call to the builtin descriptor initialization routine.
5243 A descriptor is made up of a couple of pointers to the static
5244 chain and the code entry in this order. */
5246 static rtx
5247 expand_builtin_init_descriptor (tree exp)
5249 tree t_descr, t_func, t_chain;
5250 rtx m_descr, r_descr, r_func, r_chain;
5252 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5253 VOID_TYPE))
5254 return NULL_RTX;
5256 t_descr = CALL_EXPR_ARG (exp, 0);
5257 t_func = CALL_EXPR_ARG (exp, 1);
5258 t_chain = CALL_EXPR_ARG (exp, 2);
5260 r_descr = expand_normal (t_descr);
5261 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5262 MEM_NOTRAP_P (m_descr) = 1;
5264 r_func = expand_normal (t_func);
5265 r_chain = expand_normal (t_chain);
5267 /* Generate insns to initialize the descriptor. */
5268 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5269 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5270 POINTER_SIZE / BITS_PER_UNIT), r_func);
5272 return const0_rtx;
5275 /* Expand a call to the builtin descriptor adjustment routine. */
5277 static rtx
5278 expand_builtin_adjust_descriptor (tree exp)
5280 rtx tramp;
5282 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5283 return NULL_RTX;
5285 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5287 /* Unalign the descriptor to allow runtime identification. */
5288 tramp = plus_constant (ptr_mode, tramp,
5289 targetm.calls.custom_function_descriptors);
5291 return force_operand (tramp, NULL_RTX);
5294 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5295 function. The function first checks whether the back end provides
5296 an insn to implement signbit for the respective mode. If not, it
5297 checks whether the floating point format of the value is such that
5298 the sign bit can be extracted. If that is not the case, error out.
5299 EXP is the expression that is a call to the builtin function; if
5300 convenient, the result should be placed in TARGET. */
5301 static rtx
5302 expand_builtin_signbit (tree exp, rtx target)
5304 const struct real_format *fmt;
5305 scalar_float_mode fmode;
5306 scalar_int_mode rmode, imode;
5307 tree arg;
5308 int word, bitpos;
5309 enum insn_code icode;
5310 rtx temp;
5311 location_t loc = EXPR_LOCATION (exp);
5313 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5314 return NULL_RTX;
5316 arg = CALL_EXPR_ARG (exp, 0);
5317 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5318 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5319 fmt = REAL_MODE_FORMAT (fmode);
5321 arg = builtin_save_expr (arg);
5323 /* Expand the argument yielding a RTX expression. */
5324 temp = expand_normal (arg);
5326 /* Check if the back end provides an insn that handles signbit for the
5327 argument's mode. */
5328 icode = optab_handler (signbit_optab, fmode);
5329 if (icode != CODE_FOR_nothing)
5331 rtx_insn *last = get_last_insn ();
5332 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5333 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5334 return target;
5335 delete_insns_since (last);
5338 /* For floating point formats without a sign bit, implement signbit
5339 as "ARG < 0.0". */
5340 bitpos = fmt->signbit_ro;
5341 if (bitpos < 0)
5343 /* But we can't do this if the format supports signed zero. */
5344 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5346 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5347 build_real (TREE_TYPE (arg), dconst0));
5348 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5351 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5353 imode = int_mode_for_mode (fmode).require ();
5354 temp = gen_lowpart (imode, temp);
5356 else
5358 imode = word_mode;
5359 /* Handle targets with different FP word orders. */
5360 if (FLOAT_WORDS_BIG_ENDIAN)
5361 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5362 else
5363 word = bitpos / BITS_PER_WORD;
5364 temp = operand_subword_force (temp, word, fmode);
5365 bitpos = bitpos % BITS_PER_WORD;
5368 /* Force the intermediate word_mode (or narrower) result into a
5369 register. This avoids attempting to create paradoxical SUBREGs
5370 of floating point modes below. */
5371 temp = force_reg (imode, temp);
5373 /* If the bitpos is within the "result mode" lowpart, the operation
5374 can be implement with a single bitwise AND. Otherwise, we need
5375 a right shift and an AND. */
5377 if (bitpos < GET_MODE_BITSIZE (rmode))
5379 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5381 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5382 temp = gen_lowpart (rmode, temp);
5383 temp = expand_binop (rmode, and_optab, temp,
5384 immed_wide_int_const (mask, rmode),
5385 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5387 else
5389 /* Perform a logical right shift to place the signbit in the least
5390 significant bit, then truncate the result to the desired mode
5391 and mask just this bit. */
5392 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5393 temp = gen_lowpart (rmode, temp);
5394 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5395 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5398 return temp;
5401 /* Expand fork or exec calls. TARGET is the desired target of the
5402 call. EXP is the call. FN is the
5403 identificator of the actual function. IGNORE is nonzero if the
5404 value is to be ignored. */
5406 static rtx
5407 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5409 tree id, decl;
5410 tree call;
5412 /* If we are not profiling, just call the function. */
5413 if (!profile_arc_flag)
5414 return NULL_RTX;
5416 /* Otherwise call the wrapper. This should be equivalent for the rest of
5417 compiler, so the code does not diverge, and the wrapper may run the
5418 code necessary for keeping the profiling sane. */
5420 switch (DECL_FUNCTION_CODE (fn))
5422 case BUILT_IN_FORK:
5423 id = get_identifier ("__gcov_fork");
5424 break;
5426 case BUILT_IN_EXECL:
5427 id = get_identifier ("__gcov_execl");
5428 break;
5430 case BUILT_IN_EXECV:
5431 id = get_identifier ("__gcov_execv");
5432 break;
5434 case BUILT_IN_EXECLP:
5435 id = get_identifier ("__gcov_execlp");
5436 break;
5438 case BUILT_IN_EXECLE:
5439 id = get_identifier ("__gcov_execle");
5440 break;
5442 case BUILT_IN_EXECVP:
5443 id = get_identifier ("__gcov_execvp");
5444 break;
5446 case BUILT_IN_EXECVE:
5447 id = get_identifier ("__gcov_execve");
5448 break;
5450 default:
5451 gcc_unreachable ();
5454 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5455 FUNCTION_DECL, id, TREE_TYPE (fn));
5456 DECL_EXTERNAL (decl) = 1;
5457 TREE_PUBLIC (decl) = 1;
5458 DECL_ARTIFICIAL (decl) = 1;
5459 TREE_NOTHROW (decl) = 1;
5460 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5461 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5462 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5463 return expand_call (call, target, ignore);
5468 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5469 the pointer in these functions is void*, the tree optimizers may remove
5470 casts. The mode computed in expand_builtin isn't reliable either, due
5471 to __sync_bool_compare_and_swap.
5473 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5474 group of builtins. This gives us log2 of the mode size. */
5476 static inline machine_mode
5477 get_builtin_sync_mode (int fcode_diff)
5479 /* The size is not negotiable, so ask not to get BLKmode in return
5480 if the target indicates that a smaller size would be better. */
5481 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5484 /* Expand the memory expression LOC and return the appropriate memory operand
5485 for the builtin_sync operations. */
5487 static rtx
5488 get_builtin_sync_mem (tree loc, machine_mode mode)
5490 rtx addr, mem;
5492 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5493 addr = convert_memory_address (Pmode, addr);
5495 /* Note that we explicitly do not want any alias information for this
5496 memory, so that we kill all other live memories. Otherwise we don't
5497 satisfy the full barrier semantics of the intrinsic. */
5498 mem = validize_mem (gen_rtx_MEM (mode, addr));
5500 /* The alignment needs to be at least according to that of the mode. */
5501 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5502 get_pointer_alignment (loc)));
5503 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5504 MEM_VOLATILE_P (mem) = 1;
5506 return mem;
5509 /* Make sure an argument is in the right mode.
5510 EXP is the tree argument.
5511 MODE is the mode it should be in. */
5513 static rtx
5514 expand_expr_force_mode (tree exp, machine_mode mode)
5516 rtx val;
5517 machine_mode old_mode;
5519 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5520 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5521 of CONST_INTs, where we know the old_mode only from the call argument. */
5523 old_mode = GET_MODE (val);
5524 if (old_mode == VOIDmode)
5525 old_mode = TYPE_MODE (TREE_TYPE (exp));
5526 val = convert_modes (mode, old_mode, val, 1);
5527 return val;
5531 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5532 EXP is the CALL_EXPR. CODE is the rtx code
5533 that corresponds to the arithmetic or logical operation from the name;
5534 an exception here is that NOT actually means NAND. TARGET is an optional
5535 place for us to store the results; AFTER is true if this is the
5536 fetch_and_xxx form. */
5538 static rtx
5539 expand_builtin_sync_operation (machine_mode mode, tree exp,
5540 enum rtx_code code, bool after,
5541 rtx target)
5543 rtx val, mem;
5544 location_t loc = EXPR_LOCATION (exp);
5546 if (code == NOT && warn_sync_nand)
5548 tree fndecl = get_callee_fndecl (exp);
5549 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5551 static bool warned_f_a_n, warned_n_a_f;
5553 switch (fcode)
5555 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5556 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5557 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5558 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5559 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5560 if (warned_f_a_n)
5561 break;
5563 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5564 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5565 warned_f_a_n = true;
5566 break;
5568 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5569 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5570 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5571 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5572 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5573 if (warned_n_a_f)
5574 break;
5576 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5577 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5578 warned_n_a_f = true;
5579 break;
5581 default:
5582 gcc_unreachable ();
5586 /* Expand the operands. */
5587 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5588 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5590 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5591 after);
5594 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5595 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5596 true if this is the boolean form. TARGET is a place for us to store the
5597 results; this is NOT optional if IS_BOOL is true. */
5599 static rtx
5600 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5601 bool is_bool, rtx target)
5603 rtx old_val, new_val, mem;
5604 rtx *pbool, *poval;
5606 /* Expand the operands. */
5607 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5608 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5609 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5611 pbool = poval = NULL;
5612 if (target != const0_rtx)
5614 if (is_bool)
5615 pbool = &target;
5616 else
5617 poval = &target;
5619 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5620 false, MEMMODEL_SYNC_SEQ_CST,
5621 MEMMODEL_SYNC_SEQ_CST))
5622 return NULL_RTX;
5624 return target;
5627 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5628 general form is actually an atomic exchange, and some targets only
5629 support a reduced form with the second argument being a constant 1.
5630 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5631 the results. */
5633 static rtx
5634 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5635 rtx target)
5637 rtx val, mem;
5639 /* Expand the operands. */
5640 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5641 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5643 return expand_sync_lock_test_and_set (target, mem, val);
5646 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5648 static void
5649 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5651 rtx mem;
5653 /* Expand the operands. */
5654 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5656 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5659 /* Given an integer representing an ``enum memmodel'', verify its
5660 correctness and return the memory model enum. */
5662 static enum memmodel
5663 get_memmodel (tree exp)
5665 rtx op;
5666 unsigned HOST_WIDE_INT val;
5667 source_location loc
5668 = expansion_point_location_if_in_system_header (input_location);
5670 /* If the parameter is not a constant, it's a run time value so we'll just
5671 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5672 if (TREE_CODE (exp) != INTEGER_CST)
5673 return MEMMODEL_SEQ_CST;
5675 op = expand_normal (exp);
5677 val = INTVAL (op);
5678 if (targetm.memmodel_check)
5679 val = targetm.memmodel_check (val);
5680 else if (val & ~MEMMODEL_MASK)
5682 warning_at (loc, OPT_Winvalid_memory_model,
5683 "unknown architecture specifier in memory model to builtin");
5684 return MEMMODEL_SEQ_CST;
5687 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5688 if (memmodel_base (val) >= MEMMODEL_LAST)
5690 warning_at (loc, OPT_Winvalid_memory_model,
5691 "invalid memory model argument to builtin");
5692 return MEMMODEL_SEQ_CST;
5695 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5696 be conservative and promote consume to acquire. */
5697 if (val == MEMMODEL_CONSUME)
5698 val = MEMMODEL_ACQUIRE;
5700 return (enum memmodel) val;
5703 /* Expand the __atomic_exchange intrinsic:
5704 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5705 EXP is the CALL_EXPR.
5706 TARGET is an optional place for us to store the results. */
5708 static rtx
5709 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5711 rtx val, mem;
5712 enum memmodel model;
5714 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5716 if (!flag_inline_atomics)
5717 return NULL_RTX;
5719 /* Expand the operands. */
5720 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5721 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5723 return expand_atomic_exchange (target, mem, val, model);
5726 /* Expand the __atomic_compare_exchange intrinsic:
5727 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5728 TYPE desired, BOOL weak,
5729 enum memmodel success,
5730 enum memmodel failure)
5731 EXP is the CALL_EXPR.
5732 TARGET is an optional place for us to store the results. */
5734 static rtx
5735 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5736 rtx target)
5738 rtx expect, desired, mem, oldval;
5739 rtx_code_label *label;
5740 enum memmodel success, failure;
5741 tree weak;
5742 bool is_weak;
5743 source_location loc
5744 = expansion_point_location_if_in_system_header (input_location);
5746 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5747 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5749 if (failure > success)
5751 warning_at (loc, OPT_Winvalid_memory_model,
5752 "failure memory model cannot be stronger than success "
5753 "memory model for %<__atomic_compare_exchange%>");
5754 success = MEMMODEL_SEQ_CST;
5757 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5759 warning_at (loc, OPT_Winvalid_memory_model,
5760 "invalid failure memory model for "
5761 "%<__atomic_compare_exchange%>");
5762 failure = MEMMODEL_SEQ_CST;
5763 success = MEMMODEL_SEQ_CST;
5767 if (!flag_inline_atomics)
5768 return NULL_RTX;
5770 /* Expand the operands. */
5771 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5773 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5774 expect = convert_memory_address (Pmode, expect);
5775 expect = gen_rtx_MEM (mode, expect);
5776 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5778 weak = CALL_EXPR_ARG (exp, 3);
5779 is_weak = false;
5780 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5781 is_weak = true;
5783 if (target == const0_rtx)
5784 target = NULL;
5786 /* Lest the rtl backend create a race condition with an imporoper store
5787 to memory, always create a new pseudo for OLDVAL. */
5788 oldval = NULL;
5790 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5791 is_weak, success, failure))
5792 return NULL_RTX;
5794 /* Conditionally store back to EXPECT, lest we create a race condition
5795 with an improper store to memory. */
5796 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5797 the normal case where EXPECT is totally private, i.e. a register. At
5798 which point the store can be unconditional. */
5799 label = gen_label_rtx ();
5800 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5801 GET_MODE (target), 1, label);
5802 emit_move_insn (expect, oldval);
5803 emit_label (label);
5805 return target;
5808 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5809 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5810 call. The weak parameter must be dropped to match the expected parameter
5811 list and the expected argument changed from value to pointer to memory
5812 slot. */
5814 static void
5815 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5817 unsigned int z;
5818 vec<tree, va_gc> *vec;
5820 vec_alloc (vec, 5);
5821 vec->quick_push (gimple_call_arg (call, 0));
5822 tree expected = gimple_call_arg (call, 1);
5823 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5824 TREE_TYPE (expected));
5825 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5826 if (expd != x)
5827 emit_move_insn (x, expd);
5828 tree v = make_tree (TREE_TYPE (expected), x);
5829 vec->quick_push (build1 (ADDR_EXPR,
5830 build_pointer_type (TREE_TYPE (expected)), v));
5831 vec->quick_push (gimple_call_arg (call, 2));
5832 /* Skip the boolean weak parameter. */
5833 for (z = 4; z < 6; z++)
5834 vec->quick_push (gimple_call_arg (call, z));
5835 built_in_function fncode
5836 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5837 + exact_log2 (GET_MODE_SIZE (mode)));
5838 tree fndecl = builtin_decl_explicit (fncode);
5839 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
5840 fndecl);
5841 tree exp = build_call_vec (boolean_type_node, fn, vec);
5842 tree lhs = gimple_call_lhs (call);
5843 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
5844 if (lhs)
5846 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5847 if (GET_MODE (boolret) != mode)
5848 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5849 x = force_reg (mode, x);
5850 write_complex_part (target, boolret, true);
5851 write_complex_part (target, x, false);
5855 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
5857 void
5858 expand_ifn_atomic_compare_exchange (gcall *call)
5860 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
5861 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
5862 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
5863 rtx expect, desired, mem, oldval, boolret;
5864 enum memmodel success, failure;
5865 tree lhs;
5866 bool is_weak;
5867 source_location loc
5868 = expansion_point_location_if_in_system_header (gimple_location (call));
5870 success = get_memmodel (gimple_call_arg (call, 4));
5871 failure = get_memmodel (gimple_call_arg (call, 5));
5873 if (failure > success)
5875 warning_at (loc, OPT_Winvalid_memory_model,
5876 "failure memory model cannot be stronger than success "
5877 "memory model for %<__atomic_compare_exchange%>");
5878 success = MEMMODEL_SEQ_CST;
5881 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5883 warning_at (loc, OPT_Winvalid_memory_model,
5884 "invalid failure memory model for "
5885 "%<__atomic_compare_exchange%>");
5886 failure = MEMMODEL_SEQ_CST;
5887 success = MEMMODEL_SEQ_CST;
5890 if (!flag_inline_atomics)
5892 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5893 return;
5896 /* Expand the operands. */
5897 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
5899 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
5900 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
5902 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
5904 boolret = NULL;
5905 oldval = NULL;
5907 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
5908 is_weak, success, failure))
5910 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5911 return;
5914 lhs = gimple_call_lhs (call);
5915 if (lhs)
5917 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5918 if (GET_MODE (boolret) != mode)
5919 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5920 write_complex_part (target, boolret, true);
5921 write_complex_part (target, oldval, false);
5925 /* Expand the __atomic_load intrinsic:
5926 TYPE __atomic_load (TYPE *object, enum memmodel)
5927 EXP is the CALL_EXPR.
5928 TARGET is an optional place for us to store the results. */
5930 static rtx
5931 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5933 rtx mem;
5934 enum memmodel model;
5936 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5937 if (is_mm_release (model) || is_mm_acq_rel (model))
5939 source_location loc
5940 = expansion_point_location_if_in_system_header (input_location);
5941 warning_at (loc, OPT_Winvalid_memory_model,
5942 "invalid memory model for %<__atomic_load%>");
5943 model = MEMMODEL_SEQ_CST;
5946 if (!flag_inline_atomics)
5947 return NULL_RTX;
5949 /* Expand the operand. */
5950 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5952 return expand_atomic_load (target, mem, model);
5956 /* Expand the __atomic_store intrinsic:
5957 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5958 EXP is the CALL_EXPR.
5959 TARGET is an optional place for us to store the results. */
5961 static rtx
5962 expand_builtin_atomic_store (machine_mode mode, tree exp)
5964 rtx mem, val;
5965 enum memmodel model;
5967 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5968 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5969 || is_mm_release (model)))
5971 source_location loc
5972 = expansion_point_location_if_in_system_header (input_location);
5973 warning_at (loc, OPT_Winvalid_memory_model,
5974 "invalid memory model for %<__atomic_store%>");
5975 model = MEMMODEL_SEQ_CST;
5978 if (!flag_inline_atomics)
5979 return NULL_RTX;
5981 /* Expand the operands. */
5982 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5983 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5985 return expand_atomic_store (mem, val, model, false);
5988 /* Expand the __atomic_fetch_XXX intrinsic:
5989 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5990 EXP is the CALL_EXPR.
5991 TARGET is an optional place for us to store the results.
5992 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5993 FETCH_AFTER is true if returning the result of the operation.
5994 FETCH_AFTER is false if returning the value before the operation.
5995 IGNORE is true if the result is not used.
5996 EXT_CALL is the correct builtin for an external call if this cannot be
5997 resolved to an instruction sequence. */
5999 static rtx
6000 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6001 enum rtx_code code, bool fetch_after,
6002 bool ignore, enum built_in_function ext_call)
6004 rtx val, mem, ret;
6005 enum memmodel model;
6006 tree fndecl;
6007 tree addr;
6009 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6011 /* Expand the operands. */
6012 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6013 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6015 /* Only try generating instructions if inlining is turned on. */
6016 if (flag_inline_atomics)
6018 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6019 if (ret)
6020 return ret;
6023 /* Return if a different routine isn't needed for the library call. */
6024 if (ext_call == BUILT_IN_NONE)
6025 return NULL_RTX;
6027 /* Change the call to the specified function. */
6028 fndecl = get_callee_fndecl (exp);
6029 addr = CALL_EXPR_FN (exp);
6030 STRIP_NOPS (addr);
6032 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6033 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6035 /* If we will emit code after the call, the call can not be a tail call.
6036 If it is emitted as a tail call, a barrier is emitted after it, and
6037 then all trailing code is removed. */
6038 if (!ignore)
6039 CALL_EXPR_TAILCALL (exp) = 0;
6041 /* Expand the call here so we can emit trailing code. */
6042 ret = expand_call (exp, target, ignore);
6044 /* Replace the original function just in case it matters. */
6045 TREE_OPERAND (addr, 0) = fndecl;
6047 /* Then issue the arithmetic correction to return the right result. */
6048 if (!ignore)
6050 if (code == NOT)
6052 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6053 OPTAB_LIB_WIDEN);
6054 ret = expand_simple_unop (mode, NOT, ret, target, true);
6056 else
6057 ret = expand_simple_binop (mode, code, ret, val, target, true,
6058 OPTAB_LIB_WIDEN);
6060 return ret;
6063 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6065 void
6066 expand_ifn_atomic_bit_test_and (gcall *call)
6068 tree ptr = gimple_call_arg (call, 0);
6069 tree bit = gimple_call_arg (call, 1);
6070 tree flag = gimple_call_arg (call, 2);
6071 tree lhs = gimple_call_lhs (call);
6072 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6073 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6074 enum rtx_code code;
6075 optab optab;
6076 struct expand_operand ops[5];
6078 gcc_assert (flag_inline_atomics);
6080 if (gimple_call_num_args (call) == 4)
6081 model = get_memmodel (gimple_call_arg (call, 3));
6083 rtx mem = get_builtin_sync_mem (ptr, mode);
6084 rtx val = expand_expr_force_mode (bit, mode);
6086 switch (gimple_call_internal_fn (call))
6088 case IFN_ATOMIC_BIT_TEST_AND_SET:
6089 code = IOR;
6090 optab = atomic_bit_test_and_set_optab;
6091 break;
6092 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6093 code = XOR;
6094 optab = atomic_bit_test_and_complement_optab;
6095 break;
6096 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6097 code = AND;
6098 optab = atomic_bit_test_and_reset_optab;
6099 break;
6100 default:
6101 gcc_unreachable ();
6104 if (lhs == NULL_TREE)
6106 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6107 val, NULL_RTX, true, OPTAB_DIRECT);
6108 if (code == AND)
6109 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6110 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6111 return;
6114 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6115 enum insn_code icode = direct_optab_handler (optab, mode);
6116 gcc_assert (icode != CODE_FOR_nothing);
6117 create_output_operand (&ops[0], target, mode);
6118 create_fixed_operand (&ops[1], mem);
6119 create_convert_operand_to (&ops[2], val, mode, true);
6120 create_integer_operand (&ops[3], model);
6121 create_integer_operand (&ops[4], integer_onep (flag));
6122 if (maybe_expand_insn (icode, 5, ops))
6123 return;
6125 rtx bitval = val;
6126 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6127 val, NULL_RTX, true, OPTAB_DIRECT);
6128 rtx maskval = val;
6129 if (code == AND)
6130 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6131 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6132 code, model, false);
6133 if (integer_onep (flag))
6135 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6136 NULL_RTX, true, OPTAB_DIRECT);
6137 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6138 true, OPTAB_DIRECT);
6140 else
6141 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6142 OPTAB_DIRECT);
6143 if (result != target)
6144 emit_move_insn (target, result);
6147 /* Expand an atomic clear operation.
6148 void _atomic_clear (BOOL *obj, enum memmodel)
6149 EXP is the call expression. */
6151 static rtx
6152 expand_builtin_atomic_clear (tree exp)
6154 machine_mode mode;
6155 rtx mem, ret;
6156 enum memmodel model;
6158 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6159 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6160 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6162 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6164 source_location loc
6165 = expansion_point_location_if_in_system_header (input_location);
6166 warning_at (loc, OPT_Winvalid_memory_model,
6167 "invalid memory model for %<__atomic_store%>");
6168 model = MEMMODEL_SEQ_CST;
6171 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6172 Failing that, a store is issued by __atomic_store. The only way this can
6173 fail is if the bool type is larger than a word size. Unlikely, but
6174 handle it anyway for completeness. Assume a single threaded model since
6175 there is no atomic support in this case, and no barriers are required. */
6176 ret = expand_atomic_store (mem, const0_rtx, model, true);
6177 if (!ret)
6178 emit_move_insn (mem, const0_rtx);
6179 return const0_rtx;
6182 /* Expand an atomic test_and_set operation.
6183 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6184 EXP is the call expression. */
6186 static rtx
6187 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6189 rtx mem;
6190 enum memmodel model;
6191 machine_mode mode;
6193 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6194 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6195 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6197 return expand_atomic_test_and_set (target, mem, model);
6201 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6202 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6204 static tree
6205 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6207 int size;
6208 machine_mode mode;
6209 unsigned int mode_align, type_align;
6211 if (TREE_CODE (arg0) != INTEGER_CST)
6212 return NULL_TREE;
6214 /* We need a corresponding integer mode for the access to be lock-free. */
6215 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6216 if (!int_mode_for_size (size, 0).exists (&mode))
6217 return boolean_false_node;
6219 mode_align = GET_MODE_ALIGNMENT (mode);
6221 if (TREE_CODE (arg1) == INTEGER_CST)
6223 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6225 /* Either this argument is null, or it's a fake pointer encoding
6226 the alignment of the object. */
6227 val = least_bit_hwi (val);
6228 val *= BITS_PER_UNIT;
6230 if (val == 0 || mode_align < val)
6231 type_align = mode_align;
6232 else
6233 type_align = val;
6235 else
6237 tree ttype = TREE_TYPE (arg1);
6239 /* This function is usually invoked and folded immediately by the front
6240 end before anything else has a chance to look at it. The pointer
6241 parameter at this point is usually cast to a void *, so check for that
6242 and look past the cast. */
6243 if (CONVERT_EXPR_P (arg1)
6244 && POINTER_TYPE_P (ttype)
6245 && VOID_TYPE_P (TREE_TYPE (ttype))
6246 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6247 arg1 = TREE_OPERAND (arg1, 0);
6249 ttype = TREE_TYPE (arg1);
6250 gcc_assert (POINTER_TYPE_P (ttype));
6252 /* Get the underlying type of the object. */
6253 ttype = TREE_TYPE (ttype);
6254 type_align = TYPE_ALIGN (ttype);
6257 /* If the object has smaller alignment, the lock free routines cannot
6258 be used. */
6259 if (type_align < mode_align)
6260 return boolean_false_node;
6262 /* Check if a compare_and_swap pattern exists for the mode which represents
6263 the required size. The pattern is not allowed to fail, so the existence
6264 of the pattern indicates support is present. Also require that an
6265 atomic load exists for the required size. */
6266 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6267 return boolean_true_node;
6268 else
6269 return boolean_false_node;
6272 /* Return true if the parameters to call EXP represent an object which will
6273 always generate lock free instructions. The first argument represents the
6274 size of the object, and the second parameter is a pointer to the object
6275 itself. If NULL is passed for the object, then the result is based on
6276 typical alignment for an object of the specified size. Otherwise return
6277 false. */
6279 static rtx
6280 expand_builtin_atomic_always_lock_free (tree exp)
6282 tree size;
6283 tree arg0 = CALL_EXPR_ARG (exp, 0);
6284 tree arg1 = CALL_EXPR_ARG (exp, 1);
6286 if (TREE_CODE (arg0) != INTEGER_CST)
6288 error ("non-constant argument 1 to __atomic_always_lock_free");
6289 return const0_rtx;
6292 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6293 if (size == boolean_true_node)
6294 return const1_rtx;
6295 return const0_rtx;
6298 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6299 is lock free on this architecture. */
6301 static tree
6302 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6304 if (!flag_inline_atomics)
6305 return NULL_TREE;
6307 /* If it isn't always lock free, don't generate a result. */
6308 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6309 return boolean_true_node;
6311 return NULL_TREE;
6314 /* Return true if the parameters to call EXP represent an object which will
6315 always generate lock free instructions. The first argument represents the
6316 size of the object, and the second parameter is a pointer to the object
6317 itself. If NULL is passed for the object, then the result is based on
6318 typical alignment for an object of the specified size. Otherwise return
6319 NULL*/
6321 static rtx
6322 expand_builtin_atomic_is_lock_free (tree exp)
6324 tree size;
6325 tree arg0 = CALL_EXPR_ARG (exp, 0);
6326 tree arg1 = CALL_EXPR_ARG (exp, 1);
6328 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6330 error ("non-integer argument 1 to __atomic_is_lock_free");
6331 return NULL_RTX;
6334 if (!flag_inline_atomics)
6335 return NULL_RTX;
6337 /* If the value is known at compile time, return the RTX for it. */
6338 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6339 if (size == boolean_true_node)
6340 return const1_rtx;
6342 return NULL_RTX;
6345 /* Expand the __atomic_thread_fence intrinsic:
6346 void __atomic_thread_fence (enum memmodel)
6347 EXP is the CALL_EXPR. */
6349 static void
6350 expand_builtin_atomic_thread_fence (tree exp)
6352 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6353 expand_mem_thread_fence (model);
6356 /* Expand the __atomic_signal_fence intrinsic:
6357 void __atomic_signal_fence (enum memmodel)
6358 EXP is the CALL_EXPR. */
6360 static void
6361 expand_builtin_atomic_signal_fence (tree exp)
6363 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6364 expand_mem_signal_fence (model);
6367 /* Expand the __sync_synchronize intrinsic. */
6369 static void
6370 expand_builtin_sync_synchronize (void)
6372 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6375 static rtx
6376 expand_builtin_thread_pointer (tree exp, rtx target)
6378 enum insn_code icode;
6379 if (!validate_arglist (exp, VOID_TYPE))
6380 return const0_rtx;
6381 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6382 if (icode != CODE_FOR_nothing)
6384 struct expand_operand op;
6385 /* If the target is not sutitable then create a new target. */
6386 if (target == NULL_RTX
6387 || !REG_P (target)
6388 || GET_MODE (target) != Pmode)
6389 target = gen_reg_rtx (Pmode);
6390 create_output_operand (&op, target, Pmode);
6391 expand_insn (icode, 1, &op);
6392 return target;
6394 error ("__builtin_thread_pointer is not supported on this target");
6395 return const0_rtx;
6398 static void
6399 expand_builtin_set_thread_pointer (tree exp)
6401 enum insn_code icode;
6402 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6403 return;
6404 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6405 if (icode != CODE_FOR_nothing)
6407 struct expand_operand op;
6408 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6409 Pmode, EXPAND_NORMAL);
6410 create_input_operand (&op, val, Pmode);
6411 expand_insn (icode, 1, &op);
6412 return;
6414 error ("__builtin_set_thread_pointer is not supported on this target");
6418 /* Emit code to restore the current value of stack. */
6420 static void
6421 expand_stack_restore (tree var)
6423 rtx_insn *prev;
6424 rtx sa = expand_normal (var);
6426 sa = convert_memory_address (Pmode, sa);
6428 prev = get_last_insn ();
6429 emit_stack_restore (SAVE_BLOCK, sa);
6431 record_new_stack_level ();
6433 fixup_args_size_notes (prev, get_last_insn (), 0);
6436 /* Emit code to save the current value of stack. */
6438 static rtx
6439 expand_stack_save (void)
6441 rtx ret = NULL_RTX;
6443 emit_stack_save (SAVE_BLOCK, &ret);
6444 return ret;
6448 /* Expand an expression EXP that calls a built-in function,
6449 with result going to TARGET if that's convenient
6450 (and in mode MODE if that's convenient).
6451 SUBTARGET may be used as the target for computing one of EXP's operands.
6452 IGNORE is nonzero if the value is to be ignored. */
6455 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
6456 int ignore)
6458 tree fndecl = get_callee_fndecl (exp);
6459 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6460 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6461 int flags;
6463 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6464 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6466 /* When ASan is enabled, we don't want to expand some memory/string
6467 builtins and rely on libsanitizer's hooks. This allows us to avoid
6468 redundant checks and be sure, that possible overflow will be detected
6469 by ASan. */
6471 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
6472 return expand_call (exp, target, ignore);
6474 /* When not optimizing, generate calls to library functions for a certain
6475 set of builtins. */
6476 if (!optimize
6477 && !called_as_built_in (fndecl)
6478 && fcode != BUILT_IN_FORK
6479 && fcode != BUILT_IN_EXECL
6480 && fcode != BUILT_IN_EXECV
6481 && fcode != BUILT_IN_EXECLP
6482 && fcode != BUILT_IN_EXECLE
6483 && fcode != BUILT_IN_EXECVP
6484 && fcode != BUILT_IN_EXECVE
6485 && fcode != BUILT_IN_ALLOCA
6486 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
6487 && fcode != BUILT_IN_FREE
6488 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
6489 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
6490 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
6491 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
6492 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6493 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
6494 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
6495 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
6496 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
6497 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
6498 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
6499 && fcode != BUILT_IN_CHKP_BNDRET)
6500 return expand_call (exp, target, ignore);
6502 /* The built-in function expanders test for target == const0_rtx
6503 to determine whether the function's result will be ignored. */
6504 if (ignore)
6505 target = const0_rtx;
6507 /* If the result of a pure or const built-in function is ignored, and
6508 none of its arguments are volatile, we can avoid expanding the
6509 built-in call and just evaluate the arguments for side-effects. */
6510 if (target == const0_rtx
6511 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6512 && !(flags & ECF_LOOPING_CONST_OR_PURE))
6514 bool volatilep = false;
6515 tree arg;
6516 call_expr_arg_iterator iter;
6518 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6519 if (TREE_THIS_VOLATILE (arg))
6521 volatilep = true;
6522 break;
6525 if (! volatilep)
6527 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6528 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6529 return const0_rtx;
6533 /* expand_builtin_with_bounds is supposed to be used for
6534 instrumented builtin calls. */
6535 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
6537 switch (fcode)
6539 CASE_FLT_FN (BUILT_IN_FABS):
6540 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
6541 case BUILT_IN_FABSD32:
6542 case BUILT_IN_FABSD64:
6543 case BUILT_IN_FABSD128:
6544 target = expand_builtin_fabs (exp, target, subtarget);
6545 if (target)
6546 return target;
6547 break;
6549 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6550 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
6551 target = expand_builtin_copysign (exp, target, subtarget);
6552 if (target)
6553 return target;
6554 break;
6556 /* Just do a normal library call if we were unable to fold
6557 the values. */
6558 CASE_FLT_FN (BUILT_IN_CABS):
6559 break;
6561 CASE_FLT_FN (BUILT_IN_FMA):
6562 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6563 if (target)
6564 return target;
6565 break;
6567 CASE_FLT_FN (BUILT_IN_ILOGB):
6568 if (! flag_unsafe_math_optimizations)
6569 break;
6570 gcc_fallthrough ();
6571 CASE_FLT_FN (BUILT_IN_ISINF):
6572 CASE_FLT_FN (BUILT_IN_FINITE):
6573 case BUILT_IN_ISFINITE:
6574 case BUILT_IN_ISNORMAL:
6575 target = expand_builtin_interclass_mathfn (exp, target);
6576 if (target)
6577 return target;
6578 break;
6580 CASE_FLT_FN (BUILT_IN_ICEIL):
6581 CASE_FLT_FN (BUILT_IN_LCEIL):
6582 CASE_FLT_FN (BUILT_IN_LLCEIL):
6583 CASE_FLT_FN (BUILT_IN_LFLOOR):
6584 CASE_FLT_FN (BUILT_IN_IFLOOR):
6585 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6586 target = expand_builtin_int_roundingfn (exp, target);
6587 if (target)
6588 return target;
6589 break;
6591 CASE_FLT_FN (BUILT_IN_IRINT):
6592 CASE_FLT_FN (BUILT_IN_LRINT):
6593 CASE_FLT_FN (BUILT_IN_LLRINT):
6594 CASE_FLT_FN (BUILT_IN_IROUND):
6595 CASE_FLT_FN (BUILT_IN_LROUND):
6596 CASE_FLT_FN (BUILT_IN_LLROUND):
6597 target = expand_builtin_int_roundingfn_2 (exp, target);
6598 if (target)
6599 return target;
6600 break;
6602 CASE_FLT_FN (BUILT_IN_POWI):
6603 target = expand_builtin_powi (exp, target);
6604 if (target)
6605 return target;
6606 break;
6608 CASE_FLT_FN (BUILT_IN_CEXPI):
6609 target = expand_builtin_cexpi (exp, target);
6610 gcc_assert (target);
6611 return target;
6613 CASE_FLT_FN (BUILT_IN_SIN):
6614 CASE_FLT_FN (BUILT_IN_COS):
6615 if (! flag_unsafe_math_optimizations)
6616 break;
6617 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6618 if (target)
6619 return target;
6620 break;
6622 CASE_FLT_FN (BUILT_IN_SINCOS):
6623 if (! flag_unsafe_math_optimizations)
6624 break;
6625 target = expand_builtin_sincos (exp);
6626 if (target)
6627 return target;
6628 break;
6630 case BUILT_IN_APPLY_ARGS:
6631 return expand_builtin_apply_args ();
6633 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6634 FUNCTION with a copy of the parameters described by
6635 ARGUMENTS, and ARGSIZE. It returns a block of memory
6636 allocated on the stack into which is stored all the registers
6637 that might possibly be used for returning the result of a
6638 function. ARGUMENTS is the value returned by
6639 __builtin_apply_args. ARGSIZE is the number of bytes of
6640 arguments that must be copied. ??? How should this value be
6641 computed? We'll also need a safe worst case value for varargs
6642 functions. */
6643 case BUILT_IN_APPLY:
6644 if (!validate_arglist (exp, POINTER_TYPE,
6645 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6646 && !validate_arglist (exp, REFERENCE_TYPE,
6647 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6648 return const0_rtx;
6649 else
6651 rtx ops[3];
6653 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6654 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6655 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6657 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6660 /* __builtin_return (RESULT) causes the function to return the
6661 value described by RESULT. RESULT is address of the block of
6662 memory returned by __builtin_apply. */
6663 case BUILT_IN_RETURN:
6664 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6665 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6666 return const0_rtx;
6668 case BUILT_IN_SAVEREGS:
6669 return expand_builtin_saveregs ();
6671 case BUILT_IN_VA_ARG_PACK:
6672 /* All valid uses of __builtin_va_arg_pack () are removed during
6673 inlining. */
6674 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6675 return const0_rtx;
6677 case BUILT_IN_VA_ARG_PACK_LEN:
6678 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6679 inlining. */
6680 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6681 return const0_rtx;
6683 /* Return the address of the first anonymous stack arg. */
6684 case BUILT_IN_NEXT_ARG:
6685 if (fold_builtin_next_arg (exp, false))
6686 return const0_rtx;
6687 return expand_builtin_next_arg ();
6689 case BUILT_IN_CLEAR_CACHE:
6690 target = expand_builtin___clear_cache (exp);
6691 if (target)
6692 return target;
6693 break;
6695 case BUILT_IN_CLASSIFY_TYPE:
6696 return expand_builtin_classify_type (exp);
6698 case BUILT_IN_CONSTANT_P:
6699 return const0_rtx;
6701 case BUILT_IN_FRAME_ADDRESS:
6702 case BUILT_IN_RETURN_ADDRESS:
6703 return expand_builtin_frame_address (fndecl, exp);
6705 /* Returns the address of the area where the structure is returned.
6706 0 otherwise. */
6707 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6708 if (call_expr_nargs (exp) != 0
6709 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6710 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6711 return const0_rtx;
6712 else
6713 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6715 case BUILT_IN_ALLOCA:
6716 case BUILT_IN_ALLOCA_WITH_ALIGN:
6717 target = expand_builtin_alloca (exp);
6718 if (target)
6719 return target;
6720 break;
6722 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
6723 return expand_asan_emit_allocas_unpoison (exp);
6725 case BUILT_IN_STACK_SAVE:
6726 return expand_stack_save ();
6728 case BUILT_IN_STACK_RESTORE:
6729 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6730 return const0_rtx;
6732 case BUILT_IN_BSWAP16:
6733 case BUILT_IN_BSWAP32:
6734 case BUILT_IN_BSWAP64:
6735 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6736 if (target)
6737 return target;
6738 break;
6740 CASE_INT_FN (BUILT_IN_FFS):
6741 target = expand_builtin_unop (target_mode, exp, target,
6742 subtarget, ffs_optab);
6743 if (target)
6744 return target;
6745 break;
6747 CASE_INT_FN (BUILT_IN_CLZ):
6748 target = expand_builtin_unop (target_mode, exp, target,
6749 subtarget, clz_optab);
6750 if (target)
6751 return target;
6752 break;
6754 CASE_INT_FN (BUILT_IN_CTZ):
6755 target = expand_builtin_unop (target_mode, exp, target,
6756 subtarget, ctz_optab);
6757 if (target)
6758 return target;
6759 break;
6761 CASE_INT_FN (BUILT_IN_CLRSB):
6762 target = expand_builtin_unop (target_mode, exp, target,
6763 subtarget, clrsb_optab);
6764 if (target)
6765 return target;
6766 break;
6768 CASE_INT_FN (BUILT_IN_POPCOUNT):
6769 target = expand_builtin_unop (target_mode, exp, target,
6770 subtarget, popcount_optab);
6771 if (target)
6772 return target;
6773 break;
6775 CASE_INT_FN (BUILT_IN_PARITY):
6776 target = expand_builtin_unop (target_mode, exp, target,
6777 subtarget, parity_optab);
6778 if (target)
6779 return target;
6780 break;
6782 case BUILT_IN_STRLEN:
6783 target = expand_builtin_strlen (exp, target, target_mode);
6784 if (target)
6785 return target;
6786 break;
6788 case BUILT_IN_STRCAT:
6789 target = expand_builtin_strcat (exp, target);
6790 if (target)
6791 return target;
6792 break;
6794 case BUILT_IN_STRCPY:
6795 target = expand_builtin_strcpy (exp, target);
6796 if (target)
6797 return target;
6798 break;
6800 case BUILT_IN_STRNCAT:
6801 target = expand_builtin_strncat (exp, target);
6802 if (target)
6803 return target;
6804 break;
6806 case BUILT_IN_STRNCPY:
6807 target = expand_builtin_strncpy (exp, target);
6808 if (target)
6809 return target;
6810 break;
6812 case BUILT_IN_STPCPY:
6813 target = expand_builtin_stpcpy (exp, target, mode);
6814 if (target)
6815 return target;
6816 break;
6818 case BUILT_IN_STPNCPY:
6819 target = expand_builtin_stpncpy (exp, target);
6820 if (target)
6821 return target;
6822 break;
6824 case BUILT_IN_MEMCHR:
6825 target = expand_builtin_memchr (exp, target);
6826 if (target)
6827 return target;
6828 break;
6830 case BUILT_IN_MEMCPY:
6831 target = expand_builtin_memcpy (exp, target);
6832 if (target)
6833 return target;
6834 break;
6836 case BUILT_IN_MEMMOVE:
6837 target = expand_builtin_memmove (exp, target);
6838 if (target)
6839 return target;
6840 break;
6842 case BUILT_IN_MEMPCPY:
6843 target = expand_builtin_mempcpy (exp, target);
6844 if (target)
6845 return target;
6846 break;
6848 case BUILT_IN_MEMSET:
6849 target = expand_builtin_memset (exp, target, mode);
6850 if (target)
6851 return target;
6852 break;
6854 case BUILT_IN_BZERO:
6855 target = expand_builtin_bzero (exp);
6856 if (target)
6857 return target;
6858 break;
6860 case BUILT_IN_STRCMP:
6861 target = expand_builtin_strcmp (exp, target);
6862 if (target)
6863 return target;
6864 break;
6866 case BUILT_IN_STRNCMP:
6867 target = expand_builtin_strncmp (exp, target, mode);
6868 if (target)
6869 return target;
6870 break;
6872 case BUILT_IN_BCMP:
6873 case BUILT_IN_MEMCMP:
6874 case BUILT_IN_MEMCMP_EQ:
6875 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
6876 if (target)
6877 return target;
6878 if (fcode == BUILT_IN_MEMCMP_EQ)
6880 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
6881 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
6883 break;
6885 case BUILT_IN_SETJMP:
6886 /* This should have been lowered to the builtins below. */
6887 gcc_unreachable ();
6889 case BUILT_IN_SETJMP_SETUP:
6890 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6891 and the receiver label. */
6892 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6894 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6895 VOIDmode, EXPAND_NORMAL);
6896 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6897 rtx_insn *label_r = label_rtx (label);
6899 /* This is copied from the handling of non-local gotos. */
6900 expand_builtin_setjmp_setup (buf_addr, label_r);
6901 nonlocal_goto_handler_labels
6902 = gen_rtx_INSN_LIST (VOIDmode, label_r,
6903 nonlocal_goto_handler_labels);
6904 /* ??? Do not let expand_label treat us as such since we would
6905 not want to be both on the list of non-local labels and on
6906 the list of forced labels. */
6907 FORCED_LABEL (label) = 0;
6908 return const0_rtx;
6910 break;
6912 case BUILT_IN_SETJMP_RECEIVER:
6913 /* __builtin_setjmp_receiver is passed the receiver label. */
6914 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6916 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6917 rtx_insn *label_r = label_rtx (label);
6919 expand_builtin_setjmp_receiver (label_r);
6920 return const0_rtx;
6922 break;
6924 /* __builtin_longjmp is passed a pointer to an array of five words.
6925 It's similar to the C library longjmp function but works with
6926 __builtin_setjmp above. */
6927 case BUILT_IN_LONGJMP:
6928 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6930 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6931 VOIDmode, EXPAND_NORMAL);
6932 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6934 if (value != const1_rtx)
6936 error ("%<__builtin_longjmp%> second argument must be 1");
6937 return const0_rtx;
6940 expand_builtin_longjmp (buf_addr, value);
6941 return const0_rtx;
6943 break;
6945 case BUILT_IN_NONLOCAL_GOTO:
6946 target = expand_builtin_nonlocal_goto (exp);
6947 if (target)
6948 return target;
6949 break;
6951 /* This updates the setjmp buffer that is its argument with the value
6952 of the current stack pointer. */
6953 case BUILT_IN_UPDATE_SETJMP_BUF:
6954 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6956 rtx buf_addr
6957 = expand_normal (CALL_EXPR_ARG (exp, 0));
6959 expand_builtin_update_setjmp_buf (buf_addr);
6960 return const0_rtx;
6962 break;
6964 case BUILT_IN_TRAP:
6965 expand_builtin_trap ();
6966 return const0_rtx;
6968 case BUILT_IN_UNREACHABLE:
6969 expand_builtin_unreachable ();
6970 return const0_rtx;
6972 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6973 case BUILT_IN_SIGNBITD32:
6974 case BUILT_IN_SIGNBITD64:
6975 case BUILT_IN_SIGNBITD128:
6976 target = expand_builtin_signbit (exp, target);
6977 if (target)
6978 return target;
6979 break;
6981 /* Various hooks for the DWARF 2 __throw routine. */
6982 case BUILT_IN_UNWIND_INIT:
6983 expand_builtin_unwind_init ();
6984 return const0_rtx;
6985 case BUILT_IN_DWARF_CFA:
6986 return virtual_cfa_rtx;
6987 #ifdef DWARF2_UNWIND_INFO
6988 case BUILT_IN_DWARF_SP_COLUMN:
6989 return expand_builtin_dwarf_sp_column ();
6990 case BUILT_IN_INIT_DWARF_REG_SIZES:
6991 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6992 return const0_rtx;
6993 #endif
6994 case BUILT_IN_FROB_RETURN_ADDR:
6995 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6996 case BUILT_IN_EXTRACT_RETURN_ADDR:
6997 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6998 case BUILT_IN_EH_RETURN:
6999 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7000 CALL_EXPR_ARG (exp, 1));
7001 return const0_rtx;
7002 case BUILT_IN_EH_RETURN_DATA_REGNO:
7003 return expand_builtin_eh_return_data_regno (exp);
7004 case BUILT_IN_EXTEND_POINTER:
7005 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7006 case BUILT_IN_EH_POINTER:
7007 return expand_builtin_eh_pointer (exp);
7008 case BUILT_IN_EH_FILTER:
7009 return expand_builtin_eh_filter (exp);
7010 case BUILT_IN_EH_COPY_VALUES:
7011 return expand_builtin_eh_copy_values (exp);
7013 case BUILT_IN_VA_START:
7014 return expand_builtin_va_start (exp);
7015 case BUILT_IN_VA_END:
7016 return expand_builtin_va_end (exp);
7017 case BUILT_IN_VA_COPY:
7018 return expand_builtin_va_copy (exp);
7019 case BUILT_IN_EXPECT:
7020 return expand_builtin_expect (exp, target);
7021 case BUILT_IN_ASSUME_ALIGNED:
7022 return expand_builtin_assume_aligned (exp, target);
7023 case BUILT_IN_PREFETCH:
7024 expand_builtin_prefetch (exp);
7025 return const0_rtx;
7027 case BUILT_IN_INIT_TRAMPOLINE:
7028 return expand_builtin_init_trampoline (exp, true);
7029 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7030 return expand_builtin_init_trampoline (exp, false);
7031 case BUILT_IN_ADJUST_TRAMPOLINE:
7032 return expand_builtin_adjust_trampoline (exp);
7034 case BUILT_IN_INIT_DESCRIPTOR:
7035 return expand_builtin_init_descriptor (exp);
7036 case BUILT_IN_ADJUST_DESCRIPTOR:
7037 return expand_builtin_adjust_descriptor (exp);
7039 case BUILT_IN_FORK:
7040 case BUILT_IN_EXECL:
7041 case BUILT_IN_EXECV:
7042 case BUILT_IN_EXECLP:
7043 case BUILT_IN_EXECLE:
7044 case BUILT_IN_EXECVP:
7045 case BUILT_IN_EXECVE:
7046 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7047 if (target)
7048 return target;
7049 break;
7051 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7052 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7053 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7054 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7055 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7056 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7057 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7058 if (target)
7059 return target;
7060 break;
7062 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7063 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7064 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7065 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7066 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7067 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7068 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7069 if (target)
7070 return target;
7071 break;
7073 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7074 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7075 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7076 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7077 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7078 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7079 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7080 if (target)
7081 return target;
7082 break;
7084 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7085 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7086 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7087 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7088 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7089 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7090 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7091 if (target)
7092 return target;
7093 break;
7095 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7096 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7097 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7098 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7099 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7100 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7101 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7102 if (target)
7103 return target;
7104 break;
7106 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7107 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7108 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7109 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7110 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7111 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7112 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7113 if (target)
7114 return target;
7115 break;
7117 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7118 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7119 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7120 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7121 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7122 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7123 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7124 if (target)
7125 return target;
7126 break;
7128 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7129 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7130 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7131 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7132 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7133 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7134 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7135 if (target)
7136 return target;
7137 break;
7139 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7140 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7141 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7142 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7143 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7144 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7145 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7146 if (target)
7147 return target;
7148 break;
7150 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7151 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7152 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7153 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7154 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7155 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7156 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7157 if (target)
7158 return target;
7159 break;
7161 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7162 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7163 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7164 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7165 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7166 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7167 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7168 if (target)
7169 return target;
7170 break;
7172 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7173 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7174 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7175 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7176 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7177 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7178 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7179 if (target)
7180 return target;
7181 break;
7183 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7184 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7185 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7186 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7187 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7188 if (mode == VOIDmode)
7189 mode = TYPE_MODE (boolean_type_node);
7190 if (!target || !register_operand (target, mode))
7191 target = gen_reg_rtx (mode);
7193 mode = get_builtin_sync_mode
7194 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7195 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7196 if (target)
7197 return target;
7198 break;
7200 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7201 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7202 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7203 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7204 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7205 mode = get_builtin_sync_mode
7206 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7207 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7208 if (target)
7209 return target;
7210 break;
7212 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7213 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7214 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7215 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7216 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7217 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7218 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7219 if (target)
7220 return target;
7221 break;
7223 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7224 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7225 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7226 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7227 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7228 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7229 expand_builtin_sync_lock_release (mode, exp);
7230 return const0_rtx;
7232 case BUILT_IN_SYNC_SYNCHRONIZE:
7233 expand_builtin_sync_synchronize ();
7234 return const0_rtx;
7236 case BUILT_IN_ATOMIC_EXCHANGE_1:
7237 case BUILT_IN_ATOMIC_EXCHANGE_2:
7238 case BUILT_IN_ATOMIC_EXCHANGE_4:
7239 case BUILT_IN_ATOMIC_EXCHANGE_8:
7240 case BUILT_IN_ATOMIC_EXCHANGE_16:
7241 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7242 target = expand_builtin_atomic_exchange (mode, exp, target);
7243 if (target)
7244 return target;
7245 break;
7247 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7248 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7249 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7250 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7251 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7253 unsigned int nargs, z;
7254 vec<tree, va_gc> *vec;
7256 mode =
7257 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7258 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7259 if (target)
7260 return target;
7262 /* If this is turned into an external library call, the weak parameter
7263 must be dropped to match the expected parameter list. */
7264 nargs = call_expr_nargs (exp);
7265 vec_alloc (vec, nargs - 1);
7266 for (z = 0; z < 3; z++)
7267 vec->quick_push (CALL_EXPR_ARG (exp, z));
7268 /* Skip the boolean weak parameter. */
7269 for (z = 4; z < 6; z++)
7270 vec->quick_push (CALL_EXPR_ARG (exp, z));
7271 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7272 break;
7275 case BUILT_IN_ATOMIC_LOAD_1:
7276 case BUILT_IN_ATOMIC_LOAD_2:
7277 case BUILT_IN_ATOMIC_LOAD_4:
7278 case BUILT_IN_ATOMIC_LOAD_8:
7279 case BUILT_IN_ATOMIC_LOAD_16:
7280 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7281 target = expand_builtin_atomic_load (mode, exp, target);
7282 if (target)
7283 return target;
7284 break;
7286 case BUILT_IN_ATOMIC_STORE_1:
7287 case BUILT_IN_ATOMIC_STORE_2:
7288 case BUILT_IN_ATOMIC_STORE_4:
7289 case BUILT_IN_ATOMIC_STORE_8:
7290 case BUILT_IN_ATOMIC_STORE_16:
7291 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7292 target = expand_builtin_atomic_store (mode, exp);
7293 if (target)
7294 return const0_rtx;
7295 break;
7297 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7298 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7299 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7300 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7301 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7303 enum built_in_function lib;
7304 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7305 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7306 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7307 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7308 ignore, lib);
7309 if (target)
7310 return target;
7311 break;
7313 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7314 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7315 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7316 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7317 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7319 enum built_in_function lib;
7320 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7321 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7322 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7323 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7324 ignore, lib);
7325 if (target)
7326 return target;
7327 break;
7329 case BUILT_IN_ATOMIC_AND_FETCH_1:
7330 case BUILT_IN_ATOMIC_AND_FETCH_2:
7331 case BUILT_IN_ATOMIC_AND_FETCH_4:
7332 case BUILT_IN_ATOMIC_AND_FETCH_8:
7333 case BUILT_IN_ATOMIC_AND_FETCH_16:
7335 enum built_in_function lib;
7336 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7337 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7338 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7339 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7340 ignore, lib);
7341 if (target)
7342 return target;
7343 break;
7345 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7346 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7347 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7348 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7349 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7351 enum built_in_function lib;
7352 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7353 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7354 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7355 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7356 ignore, lib);
7357 if (target)
7358 return target;
7359 break;
7361 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7362 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7363 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7364 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7365 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7367 enum built_in_function lib;
7368 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7369 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7370 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7371 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7372 ignore, lib);
7373 if (target)
7374 return target;
7375 break;
7377 case BUILT_IN_ATOMIC_OR_FETCH_1:
7378 case BUILT_IN_ATOMIC_OR_FETCH_2:
7379 case BUILT_IN_ATOMIC_OR_FETCH_4:
7380 case BUILT_IN_ATOMIC_OR_FETCH_8:
7381 case BUILT_IN_ATOMIC_OR_FETCH_16:
7383 enum built_in_function lib;
7384 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7385 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7386 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7387 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7388 ignore, lib);
7389 if (target)
7390 return target;
7391 break;
7393 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7394 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7395 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7396 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7397 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7398 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7399 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7400 ignore, BUILT_IN_NONE);
7401 if (target)
7402 return target;
7403 break;
7405 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7406 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7407 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7408 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7409 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7410 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7411 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7412 ignore, BUILT_IN_NONE);
7413 if (target)
7414 return target;
7415 break;
7417 case BUILT_IN_ATOMIC_FETCH_AND_1:
7418 case BUILT_IN_ATOMIC_FETCH_AND_2:
7419 case BUILT_IN_ATOMIC_FETCH_AND_4:
7420 case BUILT_IN_ATOMIC_FETCH_AND_8:
7421 case BUILT_IN_ATOMIC_FETCH_AND_16:
7422 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7423 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7424 ignore, BUILT_IN_NONE);
7425 if (target)
7426 return target;
7427 break;
7429 case BUILT_IN_ATOMIC_FETCH_NAND_1:
7430 case BUILT_IN_ATOMIC_FETCH_NAND_2:
7431 case BUILT_IN_ATOMIC_FETCH_NAND_4:
7432 case BUILT_IN_ATOMIC_FETCH_NAND_8:
7433 case BUILT_IN_ATOMIC_FETCH_NAND_16:
7434 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
7435 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
7436 ignore, BUILT_IN_NONE);
7437 if (target)
7438 return target;
7439 break;
7441 case BUILT_IN_ATOMIC_FETCH_XOR_1:
7442 case BUILT_IN_ATOMIC_FETCH_XOR_2:
7443 case BUILT_IN_ATOMIC_FETCH_XOR_4:
7444 case BUILT_IN_ATOMIC_FETCH_XOR_8:
7445 case BUILT_IN_ATOMIC_FETCH_XOR_16:
7446 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
7447 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
7448 ignore, BUILT_IN_NONE);
7449 if (target)
7450 return target;
7451 break;
7453 case BUILT_IN_ATOMIC_FETCH_OR_1:
7454 case BUILT_IN_ATOMIC_FETCH_OR_2:
7455 case BUILT_IN_ATOMIC_FETCH_OR_4:
7456 case BUILT_IN_ATOMIC_FETCH_OR_8:
7457 case BUILT_IN_ATOMIC_FETCH_OR_16:
7458 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
7459 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
7460 ignore, BUILT_IN_NONE);
7461 if (target)
7462 return target;
7463 break;
7465 case BUILT_IN_ATOMIC_TEST_AND_SET:
7466 return expand_builtin_atomic_test_and_set (exp, target);
7468 case BUILT_IN_ATOMIC_CLEAR:
7469 return expand_builtin_atomic_clear (exp);
7471 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
7472 return expand_builtin_atomic_always_lock_free (exp);
7474 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
7475 target = expand_builtin_atomic_is_lock_free (exp);
7476 if (target)
7477 return target;
7478 break;
7480 case BUILT_IN_ATOMIC_THREAD_FENCE:
7481 expand_builtin_atomic_thread_fence (exp);
7482 return const0_rtx;
7484 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
7485 expand_builtin_atomic_signal_fence (exp);
7486 return const0_rtx;
7488 case BUILT_IN_OBJECT_SIZE:
7489 return expand_builtin_object_size (exp);
7491 case BUILT_IN_MEMCPY_CHK:
7492 case BUILT_IN_MEMPCPY_CHK:
7493 case BUILT_IN_MEMMOVE_CHK:
7494 case BUILT_IN_MEMSET_CHK:
7495 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7496 if (target)
7497 return target;
7498 break;
7500 case BUILT_IN_STRCPY_CHK:
7501 case BUILT_IN_STPCPY_CHK:
7502 case BUILT_IN_STRNCPY_CHK:
7503 case BUILT_IN_STPNCPY_CHK:
7504 case BUILT_IN_STRCAT_CHK:
7505 case BUILT_IN_STRNCAT_CHK:
7506 case BUILT_IN_SNPRINTF_CHK:
7507 case BUILT_IN_VSNPRINTF_CHK:
7508 maybe_emit_chk_warning (exp, fcode);
7509 break;
7511 case BUILT_IN_SPRINTF_CHK:
7512 case BUILT_IN_VSPRINTF_CHK:
7513 maybe_emit_sprintf_chk_warning (exp, fcode);
7514 break;
7516 case BUILT_IN_FREE:
7517 if (warn_free_nonheap_object)
7518 maybe_emit_free_warning (exp);
7519 break;
7521 case BUILT_IN_THREAD_POINTER:
7522 return expand_builtin_thread_pointer (exp, target);
7524 case BUILT_IN_SET_THREAD_POINTER:
7525 expand_builtin_set_thread_pointer (exp);
7526 return const0_rtx;
7528 case BUILT_IN_CILK_DETACH:
7529 expand_builtin_cilk_detach (exp);
7530 return const0_rtx;
7532 case BUILT_IN_CILK_POP_FRAME:
7533 expand_builtin_cilk_pop_frame (exp);
7534 return const0_rtx;
7536 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
7537 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
7538 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
7539 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
7540 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7541 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7542 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7543 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7544 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7545 case BUILT_IN_CHKP_GET_PTR_LBOUND:
7546 case BUILT_IN_CHKP_GET_PTR_UBOUND:
7547 /* We allow user CHKP builtins if Pointer Bounds
7548 Checker is off. */
7549 if (!chkp_function_instrumented_p (current_function_decl))
7551 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7552 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7553 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7554 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7555 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7556 return expand_normal (CALL_EXPR_ARG (exp, 0));
7557 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7558 return expand_normal (size_zero_node);
7559 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7560 return expand_normal (size_int (-1));
7561 else
7562 return const0_rtx;
7564 /* FALLTHROUGH */
7566 case BUILT_IN_CHKP_BNDMK:
7567 case BUILT_IN_CHKP_BNDSTX:
7568 case BUILT_IN_CHKP_BNDCL:
7569 case BUILT_IN_CHKP_BNDCU:
7570 case BUILT_IN_CHKP_BNDLDX:
7571 case BUILT_IN_CHKP_BNDRET:
7572 case BUILT_IN_CHKP_INTERSECT:
7573 case BUILT_IN_CHKP_NARROW:
7574 case BUILT_IN_CHKP_EXTRACT_LOWER:
7575 case BUILT_IN_CHKP_EXTRACT_UPPER:
7576 /* Software implementation of Pointer Bounds Checker is NYI.
7577 Target support is required. */
7578 error ("Your target platform does not support -fcheck-pointer-bounds");
7579 break;
7581 case BUILT_IN_ACC_ON_DEVICE:
7582 /* Do library call, if we failed to expand the builtin when
7583 folding. */
7584 break;
7586 default: /* just do library call, if unknown builtin */
7587 break;
7590 /* The switch statement above can drop through to cause the function
7591 to be called normally. */
7592 return expand_call (exp, target, ignore);
7595 /* Similar to expand_builtin but is used for instrumented calls. */
7598 expand_builtin_with_bounds (tree exp, rtx target,
7599 rtx subtarget ATTRIBUTE_UNUSED,
7600 machine_mode mode, int ignore)
7602 tree fndecl = get_callee_fndecl (exp);
7603 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7605 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7607 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7608 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7610 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7611 && fcode < END_CHKP_BUILTINS);
7613 switch (fcode)
7615 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7616 target = expand_builtin_memcpy_with_bounds (exp, target);
7617 if (target)
7618 return target;
7619 break;
7621 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7622 target = expand_builtin_mempcpy_with_bounds (exp, target);
7623 if (target)
7624 return target;
7625 break;
7627 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7628 target = expand_builtin_memset_with_bounds (exp, target, mode);
7629 if (target)
7630 return target;
7631 break;
7633 default:
7634 break;
7637 /* The switch statement above can drop through to cause the function
7638 to be called normally. */
7639 return expand_call (exp, target, ignore);
7642 /* Determine whether a tree node represents a call to a built-in
7643 function. If the tree T is a call to a built-in function with
7644 the right number of arguments of the appropriate types, return
7645 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7646 Otherwise the return value is END_BUILTINS. */
7648 enum built_in_function
7649 builtin_mathfn_code (const_tree t)
7651 const_tree fndecl, arg, parmlist;
7652 const_tree argtype, parmtype;
7653 const_call_expr_arg_iterator iter;
7655 if (TREE_CODE (t) != CALL_EXPR
7656 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
7657 return END_BUILTINS;
7659 fndecl = get_callee_fndecl (t);
7660 if (fndecl == NULL_TREE
7661 || TREE_CODE (fndecl) != FUNCTION_DECL
7662 || ! DECL_BUILT_IN (fndecl)
7663 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7664 return END_BUILTINS;
7666 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7667 init_const_call_expr_arg_iterator (t, &iter);
7668 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7670 /* If a function doesn't take a variable number of arguments,
7671 the last element in the list will have type `void'. */
7672 parmtype = TREE_VALUE (parmlist);
7673 if (VOID_TYPE_P (parmtype))
7675 if (more_const_call_expr_args_p (&iter))
7676 return END_BUILTINS;
7677 return DECL_FUNCTION_CODE (fndecl);
7680 if (! more_const_call_expr_args_p (&iter))
7681 return END_BUILTINS;
7683 arg = next_const_call_expr_arg (&iter);
7684 argtype = TREE_TYPE (arg);
7686 if (SCALAR_FLOAT_TYPE_P (parmtype))
7688 if (! SCALAR_FLOAT_TYPE_P (argtype))
7689 return END_BUILTINS;
7691 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7693 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7694 return END_BUILTINS;
7696 else if (POINTER_TYPE_P (parmtype))
7698 if (! POINTER_TYPE_P (argtype))
7699 return END_BUILTINS;
7701 else if (INTEGRAL_TYPE_P (parmtype))
7703 if (! INTEGRAL_TYPE_P (argtype))
7704 return END_BUILTINS;
7706 else
7707 return END_BUILTINS;
7710 /* Variable-length argument list. */
7711 return DECL_FUNCTION_CODE (fndecl);
7714 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7715 evaluate to a constant. */
7717 static tree
7718 fold_builtin_constant_p (tree arg)
7720 /* We return 1 for a numeric type that's known to be a constant
7721 value at compile-time or for an aggregate type that's a
7722 literal constant. */
7723 STRIP_NOPS (arg);
7725 /* If we know this is a constant, emit the constant of one. */
7726 if (CONSTANT_CLASS_P (arg)
7727 || (TREE_CODE (arg) == CONSTRUCTOR
7728 && TREE_CONSTANT (arg)))
7729 return integer_one_node;
7730 if (TREE_CODE (arg) == ADDR_EXPR)
7732 tree op = TREE_OPERAND (arg, 0);
7733 if (TREE_CODE (op) == STRING_CST
7734 || (TREE_CODE (op) == ARRAY_REF
7735 && integer_zerop (TREE_OPERAND (op, 1))
7736 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7737 return integer_one_node;
7740 /* If this expression has side effects, show we don't know it to be a
7741 constant. Likewise if it's a pointer or aggregate type since in
7742 those case we only want literals, since those are only optimized
7743 when generating RTL, not later.
7744 And finally, if we are compiling an initializer, not code, we
7745 need to return a definite result now; there's not going to be any
7746 more optimization done. */
7747 if (TREE_SIDE_EFFECTS (arg)
7748 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7749 || POINTER_TYPE_P (TREE_TYPE (arg))
7750 || cfun == 0
7751 || folding_initializer
7752 || force_folding_builtin_constant_p)
7753 return integer_zero_node;
7755 return NULL_TREE;
7758 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7759 return it as a truthvalue. */
7761 static tree
7762 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7763 tree predictor)
7765 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7767 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7768 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7769 ret_type = TREE_TYPE (TREE_TYPE (fn));
7770 pred_type = TREE_VALUE (arg_types);
7771 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7773 pred = fold_convert_loc (loc, pred_type, pred);
7774 expected = fold_convert_loc (loc, expected_type, expected);
7775 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7776 predictor);
7778 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7779 build_int_cst (ret_type, 0));
7782 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7783 NULL_TREE if no simplification is possible. */
7785 tree
7786 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7788 tree inner, fndecl, inner_arg0;
7789 enum tree_code code;
7791 /* Distribute the expected value over short-circuiting operators.
7792 See through the cast from truthvalue_type_node to long. */
7793 inner_arg0 = arg0;
7794 while (CONVERT_EXPR_P (inner_arg0)
7795 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7796 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7797 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7799 /* If this is a builtin_expect within a builtin_expect keep the
7800 inner one. See through a comparison against a constant. It
7801 might have been added to create a thruthvalue. */
7802 inner = inner_arg0;
7804 if (COMPARISON_CLASS_P (inner)
7805 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7806 inner = TREE_OPERAND (inner, 0);
7808 if (TREE_CODE (inner) == CALL_EXPR
7809 && (fndecl = get_callee_fndecl (inner))
7810 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7811 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7812 return arg0;
7814 inner = inner_arg0;
7815 code = TREE_CODE (inner);
7816 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7818 tree op0 = TREE_OPERAND (inner, 0);
7819 tree op1 = TREE_OPERAND (inner, 1);
7821 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7822 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7823 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7825 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7828 /* If the argument isn't invariant then there's nothing else we can do. */
7829 if (!TREE_CONSTANT (inner_arg0))
7830 return NULL_TREE;
7832 /* If we expect that a comparison against the argument will fold to
7833 a constant return the constant. In practice, this means a true
7834 constant or the address of a non-weak symbol. */
7835 inner = inner_arg0;
7836 STRIP_NOPS (inner);
7837 if (TREE_CODE (inner) == ADDR_EXPR)
7841 inner = TREE_OPERAND (inner, 0);
7843 while (TREE_CODE (inner) == COMPONENT_REF
7844 || TREE_CODE (inner) == ARRAY_REF);
7845 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
7846 return NULL_TREE;
7849 /* Otherwise, ARG0 already has the proper type for the return value. */
7850 return arg0;
7853 /* Fold a call to __builtin_classify_type with argument ARG. */
7855 static tree
7856 fold_builtin_classify_type (tree arg)
7858 if (arg == 0)
7859 return build_int_cst (integer_type_node, no_type_class);
7861 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7864 /* Fold a call to __builtin_strlen with argument ARG. */
7866 static tree
7867 fold_builtin_strlen (location_t loc, tree type, tree arg)
7869 if (!validate_arg (arg, POINTER_TYPE))
7870 return NULL_TREE;
7871 else
7873 tree len = c_strlen (arg, 0);
7875 if (len)
7876 return fold_convert_loc (loc, type, len);
7878 return NULL_TREE;
7882 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7884 static tree
7885 fold_builtin_inf (location_t loc, tree type, int warn)
7887 REAL_VALUE_TYPE real;
7889 /* __builtin_inff is intended to be usable to define INFINITY on all
7890 targets. If an infinity is not available, INFINITY expands "to a
7891 positive constant of type float that overflows at translation
7892 time", footnote "In this case, using INFINITY will violate the
7893 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7894 Thus we pedwarn to ensure this constraint violation is
7895 diagnosed. */
7896 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7897 pedwarn (loc, 0, "target format does not support infinity");
7899 real_inf (&real);
7900 return build_real (type, real);
7903 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7904 NULL_TREE if no simplification can be made. */
7906 static tree
7907 fold_builtin_sincos (location_t loc,
7908 tree arg0, tree arg1, tree arg2)
7910 tree type;
7911 tree fndecl, call = NULL_TREE;
7913 if (!validate_arg (arg0, REAL_TYPE)
7914 || !validate_arg (arg1, POINTER_TYPE)
7915 || !validate_arg (arg2, POINTER_TYPE))
7916 return NULL_TREE;
7918 type = TREE_TYPE (arg0);
7920 /* Calculate the result when the argument is a constant. */
7921 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
7922 if (fn == END_BUILTINS)
7923 return NULL_TREE;
7925 /* Canonicalize sincos to cexpi. */
7926 if (TREE_CODE (arg0) == REAL_CST)
7928 tree complex_type = build_complex_type (type);
7929 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
7931 if (!call)
7933 if (!targetm.libc_has_function (function_c99_math_complex)
7934 || !builtin_decl_implicit_p (fn))
7935 return NULL_TREE;
7936 fndecl = builtin_decl_explicit (fn);
7937 call = build_call_expr_loc (loc, fndecl, 1, arg0);
7938 call = builtin_save_expr (call);
7941 return build2 (COMPOUND_EXPR, void_type_node,
7942 build2 (MODIFY_EXPR, void_type_node,
7943 build_fold_indirect_ref_loc (loc, arg1),
7944 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
7945 build2 (MODIFY_EXPR, void_type_node,
7946 build_fold_indirect_ref_loc (loc, arg2),
7947 fold_build1_loc (loc, REALPART_EXPR, type, call)));
7950 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7951 Return NULL_TREE if no simplification can be made. */
7953 static tree
7954 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
7956 if (!validate_arg (arg1, POINTER_TYPE)
7957 || !validate_arg (arg2, POINTER_TYPE)
7958 || !validate_arg (len, INTEGER_TYPE))
7959 return NULL_TREE;
7961 /* If the LEN parameter is zero, return zero. */
7962 if (integer_zerop (len))
7963 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7964 arg1, arg2);
7966 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7967 if (operand_equal_p (arg1, arg2, 0))
7968 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7970 /* If len parameter is one, return an expression corresponding to
7971 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
7972 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7974 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7975 tree cst_uchar_ptr_node
7976 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7978 tree ind1
7979 = fold_convert_loc (loc, integer_type_node,
7980 build1 (INDIRECT_REF, cst_uchar_node,
7981 fold_convert_loc (loc,
7982 cst_uchar_ptr_node,
7983 arg1)));
7984 tree ind2
7985 = fold_convert_loc (loc, integer_type_node,
7986 build1 (INDIRECT_REF, cst_uchar_node,
7987 fold_convert_loc (loc,
7988 cst_uchar_ptr_node,
7989 arg2)));
7990 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7993 return NULL_TREE;
7996 /* Fold a call to builtin isascii with argument ARG. */
7998 static tree
7999 fold_builtin_isascii (location_t loc, tree arg)
8001 if (!validate_arg (arg, INTEGER_TYPE))
8002 return NULL_TREE;
8003 else
8005 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8006 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8007 build_int_cst (integer_type_node,
8008 ~ (unsigned HOST_WIDE_INT) 0x7f));
8009 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8010 arg, integer_zero_node);
8014 /* Fold a call to builtin toascii with argument ARG. */
8016 static tree
8017 fold_builtin_toascii (location_t loc, tree arg)
8019 if (!validate_arg (arg, INTEGER_TYPE))
8020 return NULL_TREE;
8022 /* Transform toascii(c) -> (c & 0x7f). */
8023 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8024 build_int_cst (integer_type_node, 0x7f));
8027 /* Fold a call to builtin isdigit with argument ARG. */
8029 static tree
8030 fold_builtin_isdigit (location_t loc, tree arg)
8032 if (!validate_arg (arg, INTEGER_TYPE))
8033 return NULL_TREE;
8034 else
8036 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8037 /* According to the C standard, isdigit is unaffected by locale.
8038 However, it definitely is affected by the target character set. */
8039 unsigned HOST_WIDE_INT target_digit0
8040 = lang_hooks.to_target_charset ('0');
8042 if (target_digit0 == 0)
8043 return NULL_TREE;
8045 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8046 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8047 build_int_cst (unsigned_type_node, target_digit0));
8048 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8049 build_int_cst (unsigned_type_node, 9));
8053 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8055 static tree
8056 fold_builtin_fabs (location_t loc, tree arg, tree type)
8058 if (!validate_arg (arg, REAL_TYPE))
8059 return NULL_TREE;
8061 arg = fold_convert_loc (loc, type, arg);
8062 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8065 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8067 static tree
8068 fold_builtin_abs (location_t loc, tree arg, tree type)
8070 if (!validate_arg (arg, INTEGER_TYPE))
8071 return NULL_TREE;
8073 arg = fold_convert_loc (loc, type, arg);
8074 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8077 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8079 static tree
8080 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
8082 /* ??? Only expand to FMA_EXPR if it's directly supported. */
8083 if (validate_arg (arg0, REAL_TYPE)
8084 && validate_arg (arg1, REAL_TYPE)
8085 && validate_arg (arg2, REAL_TYPE)
8086 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
8087 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
8089 return NULL_TREE;
8092 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8094 static tree
8095 fold_builtin_carg (location_t loc, tree arg, tree type)
8097 if (validate_arg (arg, COMPLEX_TYPE)
8098 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8100 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8102 if (atan2_fn)
8104 tree new_arg = builtin_save_expr (arg);
8105 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8106 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8107 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8111 return NULL_TREE;
8114 /* Fold a call to builtin frexp, we can assume the base is 2. */
8116 static tree
8117 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8119 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8120 return NULL_TREE;
8122 STRIP_NOPS (arg0);
8124 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8125 return NULL_TREE;
8127 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8129 /* Proceed if a valid pointer type was passed in. */
8130 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8132 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8133 tree frac, exp;
8135 switch (value->cl)
8137 case rvc_zero:
8138 /* For +-0, return (*exp = 0, +-0). */
8139 exp = integer_zero_node;
8140 frac = arg0;
8141 break;
8142 case rvc_nan:
8143 case rvc_inf:
8144 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8145 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8146 case rvc_normal:
8148 /* Since the frexp function always expects base 2, and in
8149 GCC normalized significands are already in the range
8150 [0.5, 1.0), we have exactly what frexp wants. */
8151 REAL_VALUE_TYPE frac_rvt = *value;
8152 SET_REAL_EXP (&frac_rvt, 0);
8153 frac = build_real (rettype, frac_rvt);
8154 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8156 break;
8157 default:
8158 gcc_unreachable ();
8161 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8162 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8163 TREE_SIDE_EFFECTS (arg1) = 1;
8164 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8167 return NULL_TREE;
8170 /* Fold a call to builtin modf. */
8172 static tree
8173 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8175 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8176 return NULL_TREE;
8178 STRIP_NOPS (arg0);
8180 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8181 return NULL_TREE;
8183 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8185 /* Proceed if a valid pointer type was passed in. */
8186 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8188 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8189 REAL_VALUE_TYPE trunc, frac;
8191 switch (value->cl)
8193 case rvc_nan:
8194 case rvc_zero:
8195 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8196 trunc = frac = *value;
8197 break;
8198 case rvc_inf:
8199 /* For +-Inf, return (*arg1 = arg0, +-0). */
8200 frac = dconst0;
8201 frac.sign = value->sign;
8202 trunc = *value;
8203 break;
8204 case rvc_normal:
8205 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8206 real_trunc (&trunc, VOIDmode, value);
8207 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8208 /* If the original number was negative and already
8209 integral, then the fractional part is -0.0. */
8210 if (value->sign && frac.cl == rvc_zero)
8211 frac.sign = value->sign;
8212 break;
8215 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8216 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8217 build_real (rettype, trunc));
8218 TREE_SIDE_EFFECTS (arg1) = 1;
8219 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8220 build_real (rettype, frac));
8223 return NULL_TREE;
8226 /* Given a location LOC, an interclass builtin function decl FNDECL
8227 and its single argument ARG, return an folded expression computing
8228 the same, or NULL_TREE if we either couldn't or didn't want to fold
8229 (the latter happen if there's an RTL instruction available). */
8231 static tree
8232 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8234 machine_mode mode;
8236 if (!validate_arg (arg, REAL_TYPE))
8237 return NULL_TREE;
8239 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8240 return NULL_TREE;
8242 mode = TYPE_MODE (TREE_TYPE (arg));
8244 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8246 /* If there is no optab, try generic code. */
8247 switch (DECL_FUNCTION_CODE (fndecl))
8249 tree result;
8251 CASE_FLT_FN (BUILT_IN_ISINF):
8253 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8254 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8255 tree type = TREE_TYPE (arg);
8256 REAL_VALUE_TYPE r;
8257 char buf[128];
8259 if (is_ibm_extended)
8261 /* NaN and Inf are encoded in the high-order double value
8262 only. The low-order value is not significant. */
8263 type = double_type_node;
8264 mode = DFmode;
8265 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8267 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8268 real_from_string (&r, buf);
8269 result = build_call_expr (isgr_fn, 2,
8270 fold_build1_loc (loc, ABS_EXPR, type, arg),
8271 build_real (type, r));
8272 return result;
8274 CASE_FLT_FN (BUILT_IN_FINITE):
8275 case BUILT_IN_ISFINITE:
8277 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8278 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8279 tree type = TREE_TYPE (arg);
8280 REAL_VALUE_TYPE r;
8281 char buf[128];
8283 if (is_ibm_extended)
8285 /* NaN and Inf are encoded in the high-order double value
8286 only. The low-order value is not significant. */
8287 type = double_type_node;
8288 mode = DFmode;
8289 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8291 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8292 real_from_string (&r, buf);
8293 result = build_call_expr (isle_fn, 2,
8294 fold_build1_loc (loc, ABS_EXPR, type, arg),
8295 build_real (type, r));
8296 /*result = fold_build2_loc (loc, UNGT_EXPR,
8297 TREE_TYPE (TREE_TYPE (fndecl)),
8298 fold_build1_loc (loc, ABS_EXPR, type, arg),
8299 build_real (type, r));
8300 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8301 TREE_TYPE (TREE_TYPE (fndecl)),
8302 result);*/
8303 return result;
8305 case BUILT_IN_ISNORMAL:
8307 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8308 islessequal(fabs(x),DBL_MAX). */
8309 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8310 tree type = TREE_TYPE (arg);
8311 tree orig_arg, max_exp, min_exp;
8312 machine_mode orig_mode = mode;
8313 REAL_VALUE_TYPE rmax, rmin;
8314 char buf[128];
8316 orig_arg = arg = builtin_save_expr (arg);
8317 if (is_ibm_extended)
8319 /* Use double to test the normal range of IBM extended
8320 precision. Emin for IBM extended precision is
8321 different to emin for IEEE double, being 53 higher
8322 since the low double exponent is at least 53 lower
8323 than the high double exponent. */
8324 type = double_type_node;
8325 mode = DFmode;
8326 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8328 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8330 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8331 real_from_string (&rmax, buf);
8332 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8333 real_from_string (&rmin, buf);
8334 max_exp = build_real (type, rmax);
8335 min_exp = build_real (type, rmin);
8337 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8338 if (is_ibm_extended)
8340 /* Testing the high end of the range is done just using
8341 the high double, using the same test as isfinite().
8342 For the subnormal end of the range we first test the
8343 high double, then if its magnitude is equal to the
8344 limit of 0x1p-969, we test whether the low double is
8345 non-zero and opposite sign to the high double. */
8346 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8347 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8348 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8349 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8350 arg, min_exp);
8351 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8352 complex_double_type_node, orig_arg);
8353 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8354 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8355 tree zero = build_real (type, dconst0);
8356 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8357 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8358 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8359 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8360 fold_build3 (COND_EXPR,
8361 integer_type_node,
8362 hilt, logt, lolt));
8363 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8364 eq_min, ok_lo);
8365 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8366 gt_min, eq_min);
8368 else
8370 tree const isge_fn
8371 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8372 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8374 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8375 max_exp, min_exp);
8376 return result;
8378 default:
8379 break;
8382 return NULL_TREE;
8385 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8386 ARG is the argument for the call. */
8388 static tree
8389 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8391 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8393 if (!validate_arg (arg, REAL_TYPE))
8394 return NULL_TREE;
8396 switch (builtin_index)
8398 case BUILT_IN_ISINF:
8399 if (!HONOR_INFINITIES (arg))
8400 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8402 return NULL_TREE;
8404 case BUILT_IN_ISINF_SIGN:
8406 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8407 /* In a boolean context, GCC will fold the inner COND_EXPR to
8408 1. So e.g. "if (isinf_sign(x))" would be folded to just
8409 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8410 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8411 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8412 tree tmp = NULL_TREE;
8414 arg = builtin_save_expr (arg);
8416 if (signbit_fn && isinf_fn)
8418 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8419 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8421 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8422 signbit_call, integer_zero_node);
8423 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8424 isinf_call, integer_zero_node);
8426 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8427 integer_minus_one_node, integer_one_node);
8428 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8429 isinf_call, tmp,
8430 integer_zero_node);
8433 return tmp;
8436 case BUILT_IN_ISFINITE:
8437 if (!HONOR_NANS (arg)
8438 && !HONOR_INFINITIES (arg))
8439 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8441 return NULL_TREE;
8443 case BUILT_IN_ISNAN:
8444 if (!HONOR_NANS (arg))
8445 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8448 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8449 if (is_ibm_extended)
8451 /* NaN and Inf are encoded in the high-order double value
8452 only. The low-order value is not significant. */
8453 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8456 arg = builtin_save_expr (arg);
8457 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8459 default:
8460 gcc_unreachable ();
8464 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8465 This builtin will generate code to return the appropriate floating
8466 point classification depending on the value of the floating point
8467 number passed in. The possible return values must be supplied as
8468 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8469 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8470 one floating point argument which is "type generic". */
8472 static tree
8473 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8475 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8476 arg, type, res, tmp;
8477 machine_mode mode;
8478 REAL_VALUE_TYPE r;
8479 char buf[128];
8481 /* Verify the required arguments in the original call. */
8482 if (nargs != 6
8483 || !validate_arg (args[0], INTEGER_TYPE)
8484 || !validate_arg (args[1], INTEGER_TYPE)
8485 || !validate_arg (args[2], INTEGER_TYPE)
8486 || !validate_arg (args[3], INTEGER_TYPE)
8487 || !validate_arg (args[4], INTEGER_TYPE)
8488 || !validate_arg (args[5], REAL_TYPE))
8489 return NULL_TREE;
8491 fp_nan = args[0];
8492 fp_infinite = args[1];
8493 fp_normal = args[2];
8494 fp_subnormal = args[3];
8495 fp_zero = args[4];
8496 arg = args[5];
8497 type = TREE_TYPE (arg);
8498 mode = TYPE_MODE (type);
8499 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8501 /* fpclassify(x) ->
8502 isnan(x) ? FP_NAN :
8503 (fabs(x) == Inf ? FP_INFINITE :
8504 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8505 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8507 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8508 build_real (type, dconst0));
8509 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8510 tmp, fp_zero, fp_subnormal);
8512 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8513 real_from_string (&r, buf);
8514 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8515 arg, build_real (type, r));
8516 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8518 if (HONOR_INFINITIES (mode))
8520 real_inf (&r);
8521 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8522 build_real (type, r));
8523 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8524 fp_infinite, res);
8527 if (HONOR_NANS (mode))
8529 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8530 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8533 return res;
8536 /* Fold a call to an unordered comparison function such as
8537 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8538 being called and ARG0 and ARG1 are the arguments for the call.
8539 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8540 the opposite of the desired result. UNORDERED_CODE is used
8541 for modes that can hold NaNs and ORDERED_CODE is used for
8542 the rest. */
8544 static tree
8545 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8546 enum tree_code unordered_code,
8547 enum tree_code ordered_code)
8549 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8550 enum tree_code code;
8551 tree type0, type1;
8552 enum tree_code code0, code1;
8553 tree cmp_type = NULL_TREE;
8555 type0 = TREE_TYPE (arg0);
8556 type1 = TREE_TYPE (arg1);
8558 code0 = TREE_CODE (type0);
8559 code1 = TREE_CODE (type1);
8561 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8562 /* Choose the wider of two real types. */
8563 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8564 ? type0 : type1;
8565 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8566 cmp_type = type0;
8567 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8568 cmp_type = type1;
8570 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8571 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8573 if (unordered_code == UNORDERED_EXPR)
8575 if (!HONOR_NANS (arg0))
8576 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8577 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8580 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8581 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8582 fold_build2_loc (loc, code, type, arg0, arg1));
8585 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8586 arithmetics if it can never overflow, or into internal functions that
8587 return both result of arithmetics and overflowed boolean flag in
8588 a complex integer result, or some other check for overflow.
8589 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8590 checking part of that. */
8592 static tree
8593 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8594 tree arg0, tree arg1, tree arg2)
8596 enum internal_fn ifn = IFN_LAST;
8597 /* The code of the expression corresponding to the type-generic
8598 built-in, or ERROR_MARK for the type-specific ones. */
8599 enum tree_code opcode = ERROR_MARK;
8600 bool ovf_only = false;
8602 switch (fcode)
8604 case BUILT_IN_ADD_OVERFLOW_P:
8605 ovf_only = true;
8606 /* FALLTHRU */
8607 case BUILT_IN_ADD_OVERFLOW:
8608 opcode = PLUS_EXPR;
8609 /* FALLTHRU */
8610 case BUILT_IN_SADD_OVERFLOW:
8611 case BUILT_IN_SADDL_OVERFLOW:
8612 case BUILT_IN_SADDLL_OVERFLOW:
8613 case BUILT_IN_UADD_OVERFLOW:
8614 case BUILT_IN_UADDL_OVERFLOW:
8615 case BUILT_IN_UADDLL_OVERFLOW:
8616 ifn = IFN_ADD_OVERFLOW;
8617 break;
8618 case BUILT_IN_SUB_OVERFLOW_P:
8619 ovf_only = true;
8620 /* FALLTHRU */
8621 case BUILT_IN_SUB_OVERFLOW:
8622 opcode = MINUS_EXPR;
8623 /* FALLTHRU */
8624 case BUILT_IN_SSUB_OVERFLOW:
8625 case BUILT_IN_SSUBL_OVERFLOW:
8626 case BUILT_IN_SSUBLL_OVERFLOW:
8627 case BUILT_IN_USUB_OVERFLOW:
8628 case BUILT_IN_USUBL_OVERFLOW:
8629 case BUILT_IN_USUBLL_OVERFLOW:
8630 ifn = IFN_SUB_OVERFLOW;
8631 break;
8632 case BUILT_IN_MUL_OVERFLOW_P:
8633 ovf_only = true;
8634 /* FALLTHRU */
8635 case BUILT_IN_MUL_OVERFLOW:
8636 opcode = MULT_EXPR;
8637 /* FALLTHRU */
8638 case BUILT_IN_SMUL_OVERFLOW:
8639 case BUILT_IN_SMULL_OVERFLOW:
8640 case BUILT_IN_SMULLL_OVERFLOW:
8641 case BUILT_IN_UMUL_OVERFLOW:
8642 case BUILT_IN_UMULL_OVERFLOW:
8643 case BUILT_IN_UMULLL_OVERFLOW:
8644 ifn = IFN_MUL_OVERFLOW;
8645 break;
8646 default:
8647 gcc_unreachable ();
8650 /* For the "generic" overloads, the first two arguments can have different
8651 types and the last argument determines the target type to use to check
8652 for overflow. The arguments of the other overloads all have the same
8653 type. */
8654 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
8656 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8657 arguments are constant, attempt to fold the built-in call into a constant
8658 expression indicating whether or not it detected an overflow. */
8659 if (ovf_only
8660 && TREE_CODE (arg0) == INTEGER_CST
8661 && TREE_CODE (arg1) == INTEGER_CST)
8662 /* Perform the computation in the target type and check for overflow. */
8663 return omit_one_operand_loc (loc, boolean_type_node,
8664 arith_overflowed_p (opcode, type, arg0, arg1)
8665 ? boolean_true_node : boolean_false_node,
8666 arg2);
8668 tree ctype = build_complex_type (type);
8669 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8670 2, arg0, arg1);
8671 tree tgt = save_expr (call);
8672 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8673 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8674 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8676 if (ovf_only)
8677 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
8679 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8680 tree store
8681 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8682 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8685 /* Fold a call to __builtin_FILE to a constant string. */
8687 static inline tree
8688 fold_builtin_FILE (location_t loc)
8690 if (const char *fname = LOCATION_FILE (loc))
8691 return build_string_literal (strlen (fname) + 1, fname);
8693 return build_string_literal (1, "");
8696 /* Fold a call to __builtin_FUNCTION to a constant string. */
8698 static inline tree
8699 fold_builtin_FUNCTION ()
8701 const char *name = "";
8703 if (current_function_decl)
8704 name = lang_hooks.decl_printable_name (current_function_decl, 0);
8706 return build_string_literal (strlen (name) + 1, name);
8709 /* Fold a call to __builtin_LINE to an integer constant. */
8711 static inline tree
8712 fold_builtin_LINE (location_t loc, tree type)
8714 return build_int_cst (type, LOCATION_LINE (loc));
8717 /* Fold a call to built-in function FNDECL with 0 arguments.
8718 This function returns NULL_TREE if no simplification was possible. */
8720 static tree
8721 fold_builtin_0 (location_t loc, tree fndecl)
8723 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8724 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8725 switch (fcode)
8727 case BUILT_IN_FILE:
8728 return fold_builtin_FILE (loc);
8730 case BUILT_IN_FUNCTION:
8731 return fold_builtin_FUNCTION ();
8733 case BUILT_IN_LINE:
8734 return fold_builtin_LINE (loc, type);
8736 CASE_FLT_FN (BUILT_IN_INF):
8737 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
8738 case BUILT_IN_INFD32:
8739 case BUILT_IN_INFD64:
8740 case BUILT_IN_INFD128:
8741 return fold_builtin_inf (loc, type, true);
8743 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8744 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
8745 return fold_builtin_inf (loc, type, false);
8747 case BUILT_IN_CLASSIFY_TYPE:
8748 return fold_builtin_classify_type (NULL_TREE);
8750 default:
8751 break;
8753 return NULL_TREE;
8756 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8757 This function returns NULL_TREE if no simplification was possible. */
8759 static tree
8760 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8762 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8763 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8765 if (TREE_CODE (arg0) == ERROR_MARK)
8766 return NULL_TREE;
8768 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
8769 return ret;
8771 switch (fcode)
8773 case BUILT_IN_CONSTANT_P:
8775 tree val = fold_builtin_constant_p (arg0);
8777 /* Gimplification will pull the CALL_EXPR for the builtin out of
8778 an if condition. When not optimizing, we'll not CSE it back.
8779 To avoid link error types of regressions, return false now. */
8780 if (!val && !optimize)
8781 val = integer_zero_node;
8783 return val;
8786 case BUILT_IN_CLASSIFY_TYPE:
8787 return fold_builtin_classify_type (arg0);
8789 case BUILT_IN_STRLEN:
8790 return fold_builtin_strlen (loc, type, arg0);
8792 CASE_FLT_FN (BUILT_IN_FABS):
8793 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8794 case BUILT_IN_FABSD32:
8795 case BUILT_IN_FABSD64:
8796 case BUILT_IN_FABSD128:
8797 return fold_builtin_fabs (loc, arg0, type);
8799 case BUILT_IN_ABS:
8800 case BUILT_IN_LABS:
8801 case BUILT_IN_LLABS:
8802 case BUILT_IN_IMAXABS:
8803 return fold_builtin_abs (loc, arg0, type);
8805 CASE_FLT_FN (BUILT_IN_CONJ):
8806 if (validate_arg (arg0, COMPLEX_TYPE)
8807 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8808 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8809 break;
8811 CASE_FLT_FN (BUILT_IN_CREAL):
8812 if (validate_arg (arg0, COMPLEX_TYPE)
8813 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8814 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8815 break;
8817 CASE_FLT_FN (BUILT_IN_CIMAG):
8818 if (validate_arg (arg0, COMPLEX_TYPE)
8819 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8820 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8821 break;
8823 CASE_FLT_FN (BUILT_IN_CARG):
8824 return fold_builtin_carg (loc, arg0, type);
8826 case BUILT_IN_ISASCII:
8827 return fold_builtin_isascii (loc, arg0);
8829 case BUILT_IN_TOASCII:
8830 return fold_builtin_toascii (loc, arg0);
8832 case BUILT_IN_ISDIGIT:
8833 return fold_builtin_isdigit (loc, arg0);
8835 CASE_FLT_FN (BUILT_IN_FINITE):
8836 case BUILT_IN_FINITED32:
8837 case BUILT_IN_FINITED64:
8838 case BUILT_IN_FINITED128:
8839 case BUILT_IN_ISFINITE:
8841 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
8842 if (ret)
8843 return ret;
8844 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8847 CASE_FLT_FN (BUILT_IN_ISINF):
8848 case BUILT_IN_ISINFD32:
8849 case BUILT_IN_ISINFD64:
8850 case BUILT_IN_ISINFD128:
8852 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
8853 if (ret)
8854 return ret;
8855 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8858 case BUILT_IN_ISNORMAL:
8859 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8861 case BUILT_IN_ISINF_SIGN:
8862 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
8864 CASE_FLT_FN (BUILT_IN_ISNAN):
8865 case BUILT_IN_ISNAND32:
8866 case BUILT_IN_ISNAND64:
8867 case BUILT_IN_ISNAND128:
8868 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
8870 case BUILT_IN_FREE:
8871 if (integer_zerop (arg0))
8872 return build_empty_stmt (loc);
8873 break;
8875 default:
8876 break;
8879 return NULL_TREE;
8883 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8884 This function returns NULL_TREE if no simplification was possible. */
8886 static tree
8887 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
8889 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8890 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8892 if (TREE_CODE (arg0) == ERROR_MARK
8893 || TREE_CODE (arg1) == ERROR_MARK)
8894 return NULL_TREE;
8896 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
8897 return ret;
8899 switch (fcode)
8901 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
8902 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
8903 if (validate_arg (arg0, REAL_TYPE)
8904 && validate_arg (arg1, POINTER_TYPE))
8905 return do_mpfr_lgamma_r (arg0, arg1, type);
8906 break;
8908 CASE_FLT_FN (BUILT_IN_FREXP):
8909 return fold_builtin_frexp (loc, arg0, arg1, type);
8911 CASE_FLT_FN (BUILT_IN_MODF):
8912 return fold_builtin_modf (loc, arg0, arg1, type);
8914 case BUILT_IN_STRSPN:
8915 return fold_builtin_strspn (loc, arg0, arg1);
8917 case BUILT_IN_STRCSPN:
8918 return fold_builtin_strcspn (loc, arg0, arg1);
8920 case BUILT_IN_STRPBRK:
8921 return fold_builtin_strpbrk (loc, arg0, arg1, type);
8923 case BUILT_IN_EXPECT:
8924 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
8926 case BUILT_IN_ISGREATER:
8927 return fold_builtin_unordered_cmp (loc, fndecl,
8928 arg0, arg1, UNLE_EXPR, LE_EXPR);
8929 case BUILT_IN_ISGREATEREQUAL:
8930 return fold_builtin_unordered_cmp (loc, fndecl,
8931 arg0, arg1, UNLT_EXPR, LT_EXPR);
8932 case BUILT_IN_ISLESS:
8933 return fold_builtin_unordered_cmp (loc, fndecl,
8934 arg0, arg1, UNGE_EXPR, GE_EXPR);
8935 case BUILT_IN_ISLESSEQUAL:
8936 return fold_builtin_unordered_cmp (loc, fndecl,
8937 arg0, arg1, UNGT_EXPR, GT_EXPR);
8938 case BUILT_IN_ISLESSGREATER:
8939 return fold_builtin_unordered_cmp (loc, fndecl,
8940 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
8941 case BUILT_IN_ISUNORDERED:
8942 return fold_builtin_unordered_cmp (loc, fndecl,
8943 arg0, arg1, UNORDERED_EXPR,
8944 NOP_EXPR);
8946 /* We do the folding for va_start in the expander. */
8947 case BUILT_IN_VA_START:
8948 break;
8950 case BUILT_IN_OBJECT_SIZE:
8951 return fold_builtin_object_size (arg0, arg1);
8953 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8954 return fold_builtin_atomic_always_lock_free (arg0, arg1);
8956 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8957 return fold_builtin_atomic_is_lock_free (arg0, arg1);
8959 default:
8960 break;
8962 return NULL_TREE;
8965 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
8966 and ARG2.
8967 This function returns NULL_TREE if no simplification was possible. */
8969 static tree
8970 fold_builtin_3 (location_t loc, tree fndecl,
8971 tree arg0, tree arg1, tree arg2)
8973 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8974 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8976 if (TREE_CODE (arg0) == ERROR_MARK
8977 || TREE_CODE (arg1) == ERROR_MARK
8978 || TREE_CODE (arg2) == ERROR_MARK)
8979 return NULL_TREE;
8981 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
8982 arg0, arg1, arg2))
8983 return ret;
8985 switch (fcode)
8988 CASE_FLT_FN (BUILT_IN_SINCOS):
8989 return fold_builtin_sincos (loc, arg0, arg1, arg2);
8991 CASE_FLT_FN (BUILT_IN_FMA):
8992 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
8994 CASE_FLT_FN (BUILT_IN_REMQUO):
8995 if (validate_arg (arg0, REAL_TYPE)
8996 && validate_arg (arg1, REAL_TYPE)
8997 && validate_arg (arg2, POINTER_TYPE))
8998 return do_mpfr_remquo (arg0, arg1, arg2);
8999 break;
9001 case BUILT_IN_MEMCMP:
9002 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
9004 case BUILT_IN_EXPECT:
9005 return fold_builtin_expect (loc, arg0, arg1, arg2);
9007 case BUILT_IN_ADD_OVERFLOW:
9008 case BUILT_IN_SUB_OVERFLOW:
9009 case BUILT_IN_MUL_OVERFLOW:
9010 case BUILT_IN_ADD_OVERFLOW_P:
9011 case BUILT_IN_SUB_OVERFLOW_P:
9012 case BUILT_IN_MUL_OVERFLOW_P:
9013 case BUILT_IN_SADD_OVERFLOW:
9014 case BUILT_IN_SADDL_OVERFLOW:
9015 case BUILT_IN_SADDLL_OVERFLOW:
9016 case BUILT_IN_SSUB_OVERFLOW:
9017 case BUILT_IN_SSUBL_OVERFLOW:
9018 case BUILT_IN_SSUBLL_OVERFLOW:
9019 case BUILT_IN_SMUL_OVERFLOW:
9020 case BUILT_IN_SMULL_OVERFLOW:
9021 case BUILT_IN_SMULLL_OVERFLOW:
9022 case BUILT_IN_UADD_OVERFLOW:
9023 case BUILT_IN_UADDL_OVERFLOW:
9024 case BUILT_IN_UADDLL_OVERFLOW:
9025 case BUILT_IN_USUB_OVERFLOW:
9026 case BUILT_IN_USUBL_OVERFLOW:
9027 case BUILT_IN_USUBLL_OVERFLOW:
9028 case BUILT_IN_UMUL_OVERFLOW:
9029 case BUILT_IN_UMULL_OVERFLOW:
9030 case BUILT_IN_UMULLL_OVERFLOW:
9031 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9033 default:
9034 break;
9036 return NULL_TREE;
9039 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9040 arguments. IGNORE is true if the result of the
9041 function call is ignored. This function returns NULL_TREE if no
9042 simplification was possible. */
9044 tree
9045 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9047 tree ret = NULL_TREE;
9049 switch (nargs)
9051 case 0:
9052 ret = fold_builtin_0 (loc, fndecl);
9053 break;
9054 case 1:
9055 ret = fold_builtin_1 (loc, fndecl, args[0]);
9056 break;
9057 case 2:
9058 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9059 break;
9060 case 3:
9061 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9062 break;
9063 default:
9064 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9065 break;
9067 if (ret)
9069 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9070 SET_EXPR_LOCATION (ret, loc);
9071 TREE_NO_WARNING (ret) = 1;
9072 return ret;
9074 return NULL_TREE;
9077 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9078 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9079 of arguments in ARGS to be omitted. OLDNARGS is the number of
9080 elements in ARGS. */
9082 static tree
9083 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9084 int skip, tree fndecl, int n, va_list newargs)
9086 int nargs = oldnargs - skip + n;
9087 tree *buffer;
9089 if (n > 0)
9091 int i, j;
9093 buffer = XALLOCAVEC (tree, nargs);
9094 for (i = 0; i < n; i++)
9095 buffer[i] = va_arg (newargs, tree);
9096 for (j = skip; j < oldnargs; j++, i++)
9097 buffer[i] = args[j];
9099 else
9100 buffer = args + skip;
9102 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9105 /* Return true if FNDECL shouldn't be folded right now.
9106 If a built-in function has an inline attribute always_inline
9107 wrapper, defer folding it after always_inline functions have
9108 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9109 might not be performed. */
9111 bool
9112 avoid_folding_inline_builtin (tree fndecl)
9114 return (DECL_DECLARED_INLINE_P (fndecl)
9115 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9116 && cfun
9117 && !cfun->always_inline_functions_inlined
9118 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9121 /* A wrapper function for builtin folding that prevents warnings for
9122 "statement without effect" and the like, caused by removing the
9123 call node earlier than the warning is generated. */
9125 tree
9126 fold_call_expr (location_t loc, tree exp, bool ignore)
9128 tree ret = NULL_TREE;
9129 tree fndecl = get_callee_fndecl (exp);
9130 if (fndecl
9131 && TREE_CODE (fndecl) == FUNCTION_DECL
9132 && DECL_BUILT_IN (fndecl)
9133 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9134 yet. Defer folding until we see all the arguments
9135 (after inlining). */
9136 && !CALL_EXPR_VA_ARG_PACK (exp))
9138 int nargs = call_expr_nargs (exp);
9140 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9141 instead last argument is __builtin_va_arg_pack (). Defer folding
9142 even in that case, until arguments are finalized. */
9143 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9145 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9146 if (fndecl2
9147 && TREE_CODE (fndecl2) == FUNCTION_DECL
9148 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9149 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9150 return NULL_TREE;
9153 if (avoid_folding_inline_builtin (fndecl))
9154 return NULL_TREE;
9156 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9157 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9158 CALL_EXPR_ARGP (exp), ignore);
9159 else
9161 tree *args = CALL_EXPR_ARGP (exp);
9162 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9163 if (ret)
9164 return ret;
9167 return NULL_TREE;
9170 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9171 N arguments are passed in the array ARGARRAY. Return a folded
9172 expression or NULL_TREE if no simplification was possible. */
9174 tree
9175 fold_builtin_call_array (location_t loc, tree,
9176 tree fn,
9177 int n,
9178 tree *argarray)
9180 if (TREE_CODE (fn) != ADDR_EXPR)
9181 return NULL_TREE;
9183 tree fndecl = TREE_OPERAND (fn, 0);
9184 if (TREE_CODE (fndecl) == FUNCTION_DECL
9185 && DECL_BUILT_IN (fndecl))
9187 /* If last argument is __builtin_va_arg_pack (), arguments to this
9188 function are not finalized yet. Defer folding until they are. */
9189 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9191 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9192 if (fndecl2
9193 && TREE_CODE (fndecl2) == FUNCTION_DECL
9194 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9195 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9196 return NULL_TREE;
9198 if (avoid_folding_inline_builtin (fndecl))
9199 return NULL_TREE;
9200 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9201 return targetm.fold_builtin (fndecl, n, argarray, false);
9202 else
9203 return fold_builtin_n (loc, fndecl, argarray, n, false);
9206 return NULL_TREE;
9209 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9210 along with N new arguments specified as the "..." parameters. SKIP
9211 is the number of arguments in EXP to be omitted. This function is used
9212 to do varargs-to-varargs transformations. */
9214 static tree
9215 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9217 va_list ap;
9218 tree t;
9220 va_start (ap, n);
9221 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9222 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9223 va_end (ap);
9225 return t;
9228 /* Validate a single argument ARG against a tree code CODE representing
9229 a type. Return true when argument is valid. */
9231 static bool
9232 validate_arg (const_tree arg, enum tree_code code)
9234 if (!arg)
9235 return false;
9236 else if (code == POINTER_TYPE)
9237 return POINTER_TYPE_P (TREE_TYPE (arg));
9238 else if (code == INTEGER_TYPE)
9239 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9240 return code == TREE_CODE (TREE_TYPE (arg));
9243 /* This function validates the types of a function call argument list
9244 against a specified list of tree_codes. If the last specifier is a 0,
9245 that represents an ellipses, otherwise the last specifier must be a
9246 VOID_TYPE.
9248 This is the GIMPLE version of validate_arglist. Eventually we want to
9249 completely convert builtins.c to work from GIMPLEs and the tree based
9250 validate_arglist will then be removed. */
9252 bool
9253 validate_gimple_arglist (const gcall *call, ...)
9255 enum tree_code code;
9256 bool res = 0;
9257 va_list ap;
9258 const_tree arg;
9259 size_t i;
9261 va_start (ap, call);
9262 i = 0;
9266 code = (enum tree_code) va_arg (ap, int);
9267 switch (code)
9269 case 0:
9270 /* This signifies an ellipses, any further arguments are all ok. */
9271 res = true;
9272 goto end;
9273 case VOID_TYPE:
9274 /* This signifies an endlink, if no arguments remain, return
9275 true, otherwise return false. */
9276 res = (i == gimple_call_num_args (call));
9277 goto end;
9278 default:
9279 /* If no parameters remain or the parameter's code does not
9280 match the specified code, return false. Otherwise continue
9281 checking any remaining arguments. */
9282 arg = gimple_call_arg (call, i++);
9283 if (!validate_arg (arg, code))
9284 goto end;
9285 break;
9288 while (1);
9290 /* We need gotos here since we can only have one VA_CLOSE in a
9291 function. */
9292 end: ;
9293 va_end (ap);
9295 return res;
9298 /* Default target-specific builtin expander that does nothing. */
9301 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9302 rtx target ATTRIBUTE_UNUSED,
9303 rtx subtarget ATTRIBUTE_UNUSED,
9304 machine_mode mode ATTRIBUTE_UNUSED,
9305 int ignore ATTRIBUTE_UNUSED)
9307 return NULL_RTX;
9310 /* Returns true is EXP represents data that would potentially reside
9311 in a readonly section. */
9313 bool
9314 readonly_data_expr (tree exp)
9316 STRIP_NOPS (exp);
9318 if (TREE_CODE (exp) != ADDR_EXPR)
9319 return false;
9321 exp = get_base_address (TREE_OPERAND (exp, 0));
9322 if (!exp)
9323 return false;
9325 /* Make sure we call decl_readonly_section only for trees it
9326 can handle (since it returns true for everything it doesn't
9327 understand). */
9328 if (TREE_CODE (exp) == STRING_CST
9329 || TREE_CODE (exp) == CONSTRUCTOR
9330 || (VAR_P (exp) && TREE_STATIC (exp)))
9331 return decl_readonly_section (exp, 0);
9332 else
9333 return false;
9336 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9337 to the call, and TYPE is its return type.
9339 Return NULL_TREE if no simplification was possible, otherwise return the
9340 simplified form of the call as a tree.
9342 The simplified form may be a constant or other expression which
9343 computes the same value, but in a more efficient manner (including
9344 calls to other builtin functions).
9346 The call may contain arguments which need to be evaluated, but
9347 which are not useful to determine the result of the call. In
9348 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9349 COMPOUND_EXPR will be an argument which must be evaluated.
9350 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9351 COMPOUND_EXPR in the chain will contain the tree for the simplified
9352 form of the builtin function call. */
9354 static tree
9355 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9357 if (!validate_arg (s1, POINTER_TYPE)
9358 || !validate_arg (s2, POINTER_TYPE))
9359 return NULL_TREE;
9360 else
9362 tree fn;
9363 const char *p1, *p2;
9365 p2 = c_getstr (s2);
9366 if (p2 == NULL)
9367 return NULL_TREE;
9369 p1 = c_getstr (s1);
9370 if (p1 != NULL)
9372 const char *r = strpbrk (p1, p2);
9373 tree tem;
9375 if (r == NULL)
9376 return build_int_cst (TREE_TYPE (s1), 0);
9378 /* Return an offset into the constant string argument. */
9379 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9380 return fold_convert_loc (loc, type, tem);
9383 if (p2[0] == '\0')
9384 /* strpbrk(x, "") == NULL.
9385 Evaluate and ignore s1 in case it had side-effects. */
9386 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
9388 if (p2[1] != '\0')
9389 return NULL_TREE; /* Really call strpbrk. */
9391 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9392 if (!fn)
9393 return NULL_TREE;
9395 /* New argument list transforming strpbrk(s1, s2) to
9396 strchr(s1, s2[0]). */
9397 return build_call_expr_loc (loc, fn, 2, s1,
9398 build_int_cst (integer_type_node, p2[0]));
9402 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9403 to the call.
9405 Return NULL_TREE if no simplification was possible, otherwise return the
9406 simplified form of the call as a tree.
9408 The simplified form may be a constant or other expression which
9409 computes the same value, but in a more efficient manner (including
9410 calls to other builtin functions).
9412 The call may contain arguments which need to be evaluated, but
9413 which are not useful to determine the result of the call. In
9414 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9415 COMPOUND_EXPR will be an argument which must be evaluated.
9416 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9417 COMPOUND_EXPR in the chain will contain the tree for the simplified
9418 form of the builtin function call. */
9420 static tree
9421 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9423 if (!validate_arg (s1, POINTER_TYPE)
9424 || !validate_arg (s2, POINTER_TYPE))
9425 return NULL_TREE;
9426 else
9428 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9430 /* If either argument is "", return NULL_TREE. */
9431 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9432 /* Evaluate and ignore both arguments in case either one has
9433 side-effects. */
9434 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9435 s1, s2);
9436 return NULL_TREE;
9440 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9441 to the call.
9443 Return NULL_TREE if no simplification was possible, otherwise return the
9444 simplified form of the call as a tree.
9446 The simplified form may be a constant or other expression which
9447 computes the same value, but in a more efficient manner (including
9448 calls to other builtin functions).
9450 The call may contain arguments which need to be evaluated, but
9451 which are not useful to determine the result of the call. In
9452 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9453 COMPOUND_EXPR will be an argument which must be evaluated.
9454 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9455 COMPOUND_EXPR in the chain will contain the tree for the simplified
9456 form of the builtin function call. */
9458 static tree
9459 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9461 if (!validate_arg (s1, POINTER_TYPE)
9462 || !validate_arg (s2, POINTER_TYPE))
9463 return NULL_TREE;
9464 else
9466 /* If the first argument is "", return NULL_TREE. */
9467 const char *p1 = c_getstr (s1);
9468 if (p1 && *p1 == '\0')
9470 /* Evaluate and ignore argument s2 in case it has
9471 side-effects. */
9472 return omit_one_operand_loc (loc, size_type_node,
9473 size_zero_node, s2);
9476 /* If the second argument is "", return __builtin_strlen(s1). */
9477 const char *p2 = c_getstr (s2);
9478 if (p2 && *p2 == '\0')
9480 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9482 /* If the replacement _DECL isn't initialized, don't do the
9483 transformation. */
9484 if (!fn)
9485 return NULL_TREE;
9487 return build_call_expr_loc (loc, fn, 1, s1);
9489 return NULL_TREE;
9493 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9494 produced. False otherwise. This is done so that we don't output the error
9495 or warning twice or three times. */
9497 bool
9498 fold_builtin_next_arg (tree exp, bool va_start_p)
9500 tree fntype = TREE_TYPE (current_function_decl);
9501 int nargs = call_expr_nargs (exp);
9502 tree arg;
9503 /* There is good chance the current input_location points inside the
9504 definition of the va_start macro (perhaps on the token for
9505 builtin) in a system header, so warnings will not be emitted.
9506 Use the location in real source code. */
9507 source_location current_location =
9508 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9509 NULL);
9511 if (!stdarg_p (fntype))
9513 error ("%<va_start%> used in function with fixed args");
9514 return true;
9517 if (va_start_p)
9519 if (va_start_p && (nargs != 2))
9521 error ("wrong number of arguments to function %<va_start%>");
9522 return true;
9524 arg = CALL_EXPR_ARG (exp, 1);
9526 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9527 when we checked the arguments and if needed issued a warning. */
9528 else
9530 if (nargs == 0)
9532 /* Evidently an out of date version of <stdarg.h>; can't validate
9533 va_start's second argument, but can still work as intended. */
9534 warning_at (current_location,
9535 OPT_Wvarargs,
9536 "%<__builtin_next_arg%> called without an argument");
9537 return true;
9539 else if (nargs > 1)
9541 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9542 return true;
9544 arg = CALL_EXPR_ARG (exp, 0);
9547 if (TREE_CODE (arg) == SSA_NAME)
9548 arg = SSA_NAME_VAR (arg);
9550 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9551 or __builtin_next_arg (0) the first time we see it, after checking
9552 the arguments and if needed issuing a warning. */
9553 if (!integer_zerop (arg))
9555 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9557 /* Strip off all nops for the sake of the comparison. This
9558 is not quite the same as STRIP_NOPS. It does more.
9559 We must also strip off INDIRECT_EXPR for C++ reference
9560 parameters. */
9561 while (CONVERT_EXPR_P (arg)
9562 || TREE_CODE (arg) == INDIRECT_REF)
9563 arg = TREE_OPERAND (arg, 0);
9564 if (arg != last_parm)
9566 /* FIXME: Sometimes with the tree optimizers we can get the
9567 not the last argument even though the user used the last
9568 argument. We just warn and set the arg to be the last
9569 argument so that we will get wrong-code because of
9570 it. */
9571 warning_at (current_location,
9572 OPT_Wvarargs,
9573 "second parameter of %<va_start%> not last named argument");
9576 /* Undefined by C99 7.15.1.4p4 (va_start):
9577 "If the parameter parmN is declared with the register storage
9578 class, with a function or array type, or with a type that is
9579 not compatible with the type that results after application of
9580 the default argument promotions, the behavior is undefined."
9582 else if (DECL_REGISTER (arg))
9584 warning_at (current_location,
9585 OPT_Wvarargs,
9586 "undefined behavior when second parameter of "
9587 "%<va_start%> is declared with %<register%> storage");
9590 /* We want to verify the second parameter just once before the tree
9591 optimizers are run and then avoid keeping it in the tree,
9592 as otherwise we could warn even for correct code like:
9593 void foo (int i, ...)
9594 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9595 if (va_start_p)
9596 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9597 else
9598 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9600 return false;
9604 /* Expand a call EXP to __builtin_object_size. */
9606 static rtx
9607 expand_builtin_object_size (tree exp)
9609 tree ost;
9610 int object_size_type;
9611 tree fndecl = get_callee_fndecl (exp);
9613 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9615 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
9616 exp, fndecl);
9617 expand_builtin_trap ();
9618 return const0_rtx;
9621 ost = CALL_EXPR_ARG (exp, 1);
9622 STRIP_NOPS (ost);
9624 if (TREE_CODE (ost) != INTEGER_CST
9625 || tree_int_cst_sgn (ost) < 0
9626 || compare_tree_int (ost, 3) > 0)
9628 error ("%Klast argument of %qD is not integer constant between 0 and 3",
9629 exp, fndecl);
9630 expand_builtin_trap ();
9631 return const0_rtx;
9634 object_size_type = tree_to_shwi (ost);
9636 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9639 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9640 FCODE is the BUILT_IN_* to use.
9641 Return NULL_RTX if we failed; the caller should emit a normal call,
9642 otherwise try to get the result in TARGET, if convenient (and in
9643 mode MODE if that's convenient). */
9645 static rtx
9646 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9647 enum built_in_function fcode)
9649 tree dest, src, len, size;
9651 if (!validate_arglist (exp,
9652 POINTER_TYPE,
9653 fcode == BUILT_IN_MEMSET_CHK
9654 ? INTEGER_TYPE : POINTER_TYPE,
9655 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9656 return NULL_RTX;
9658 dest = CALL_EXPR_ARG (exp, 0);
9659 src = CALL_EXPR_ARG (exp, 1);
9660 len = CALL_EXPR_ARG (exp, 2);
9661 size = CALL_EXPR_ARG (exp, 3);
9663 bool sizes_ok = check_sizes (OPT_Wstringop_overflow_,
9664 exp, len, /*maxlen=*/NULL_TREE,
9665 /*str=*/NULL_TREE, size);
9667 if (!tree_fits_uhwi_p (size))
9668 return NULL_RTX;
9670 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
9672 /* Avoid transforming the checking call to an ordinary one when
9673 an overflow has been detected or when the call couldn't be
9674 validated because the size is not constant. */
9675 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
9676 return NULL_RTX;
9678 tree fn = NULL_TREE;
9679 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9680 mem{cpy,pcpy,move,set} is available. */
9681 switch (fcode)
9683 case BUILT_IN_MEMCPY_CHK:
9684 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
9685 break;
9686 case BUILT_IN_MEMPCPY_CHK:
9687 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
9688 break;
9689 case BUILT_IN_MEMMOVE_CHK:
9690 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
9691 break;
9692 case BUILT_IN_MEMSET_CHK:
9693 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
9694 break;
9695 default:
9696 break;
9699 if (! fn)
9700 return NULL_RTX;
9702 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
9703 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9704 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9705 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9707 else if (fcode == BUILT_IN_MEMSET_CHK)
9708 return NULL_RTX;
9709 else
9711 unsigned int dest_align = get_pointer_alignment (dest);
9713 /* If DEST is not a pointer type, call the normal function. */
9714 if (dest_align == 0)
9715 return NULL_RTX;
9717 /* If SRC and DEST are the same (and not volatile), do nothing. */
9718 if (operand_equal_p (src, dest, 0))
9720 tree expr;
9722 if (fcode != BUILT_IN_MEMPCPY_CHK)
9724 /* Evaluate and ignore LEN in case it has side-effects. */
9725 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9726 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9729 expr = fold_build_pointer_plus (dest, len);
9730 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9733 /* __memmove_chk special case. */
9734 if (fcode == BUILT_IN_MEMMOVE_CHK)
9736 unsigned int src_align = get_pointer_alignment (src);
9738 if (src_align == 0)
9739 return NULL_RTX;
9741 /* If src is categorized for a readonly section we can use
9742 normal __memcpy_chk. */
9743 if (readonly_data_expr (src))
9745 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
9746 if (!fn)
9747 return NULL_RTX;
9748 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9749 dest, src, len, size);
9750 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9751 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9752 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9755 return NULL_RTX;
9759 /* Emit warning if a buffer overflow is detected at compile time. */
9761 static void
9762 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9764 /* The source string. */
9765 tree srcstr = NULL_TREE;
9766 /* The size of the destination object. */
9767 tree objsize = NULL_TREE;
9768 /* The string that is being concatenated with (as in __strcat_chk)
9769 or null if it isn't. */
9770 tree catstr = NULL_TREE;
9771 /* The maximum length of the source sequence in a bounded operation
9772 (such as __strncat_chk) or null if the operation isn't bounded
9773 (such as __strcat_chk). */
9774 tree maxlen = NULL_TREE;
9776 switch (fcode)
9778 case BUILT_IN_STRCPY_CHK:
9779 case BUILT_IN_STPCPY_CHK:
9780 srcstr = CALL_EXPR_ARG (exp, 1);
9781 objsize = CALL_EXPR_ARG (exp, 2);
9782 break;
9784 case BUILT_IN_STRCAT_CHK:
9785 /* For __strcat_chk the warning will be emitted only if overflowing
9786 by at least strlen (dest) + 1 bytes. */
9787 catstr = CALL_EXPR_ARG (exp, 0);
9788 srcstr = CALL_EXPR_ARG (exp, 1);
9789 objsize = CALL_EXPR_ARG (exp, 2);
9790 break;
9792 case BUILT_IN_STRNCAT_CHK:
9793 catstr = CALL_EXPR_ARG (exp, 0);
9794 srcstr = CALL_EXPR_ARG (exp, 1);
9795 maxlen = CALL_EXPR_ARG (exp, 2);
9796 objsize = CALL_EXPR_ARG (exp, 3);
9797 break;
9799 case BUILT_IN_STRNCPY_CHK:
9800 case BUILT_IN_STPNCPY_CHK:
9801 srcstr = CALL_EXPR_ARG (exp, 1);
9802 maxlen = CALL_EXPR_ARG (exp, 2);
9803 objsize = CALL_EXPR_ARG (exp, 3);
9804 break;
9806 case BUILT_IN_SNPRINTF_CHK:
9807 case BUILT_IN_VSNPRINTF_CHK:
9808 maxlen = CALL_EXPR_ARG (exp, 1);
9809 objsize = CALL_EXPR_ARG (exp, 3);
9810 break;
9811 default:
9812 gcc_unreachable ();
9815 if (catstr && maxlen)
9817 /* Check __strncat_chk. There is no way to determine the length
9818 of the string to which the source string is being appended so
9819 just warn when the length of the source string is not known. */
9820 check_strncat_sizes (exp, objsize);
9821 return;
9824 check_sizes (OPT_Wstringop_overflow_, exp,
9825 /*size=*/NULL_TREE, maxlen, srcstr, objsize);
9828 /* Emit warning if a buffer overflow is detected at compile time
9829 in __sprintf_chk/__vsprintf_chk calls. */
9831 static void
9832 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
9834 tree size, len, fmt;
9835 const char *fmt_str;
9836 int nargs = call_expr_nargs (exp);
9838 /* Verify the required arguments in the original call. */
9840 if (nargs < 4)
9841 return;
9842 size = CALL_EXPR_ARG (exp, 2);
9843 fmt = CALL_EXPR_ARG (exp, 3);
9845 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9846 return;
9848 /* Check whether the format is a literal string constant. */
9849 fmt_str = c_getstr (fmt);
9850 if (fmt_str == NULL)
9851 return;
9853 if (!init_target_chars ())
9854 return;
9856 /* If the format doesn't contain % args or %%, we know its size. */
9857 if (strchr (fmt_str, target_percent) == 0)
9858 len = build_int_cstu (size_type_node, strlen (fmt_str));
9859 /* If the format is "%s" and first ... argument is a string literal,
9860 we know it too. */
9861 else if (fcode == BUILT_IN_SPRINTF_CHK
9862 && strcmp (fmt_str, target_percent_s) == 0)
9864 tree arg;
9866 if (nargs < 5)
9867 return;
9868 arg = CALL_EXPR_ARG (exp, 4);
9869 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
9870 return;
9872 len = c_strlen (arg, 1);
9873 if (!len || ! tree_fits_uhwi_p (len))
9874 return;
9876 else
9877 return;
9879 /* Add one for the terminating nul. */
9880 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
9881 check_sizes (OPT_Wstringop_overflow_,
9882 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, len, size);
9885 /* Emit warning if a free is called with address of a variable. */
9887 static void
9888 maybe_emit_free_warning (tree exp)
9890 tree arg = CALL_EXPR_ARG (exp, 0);
9892 STRIP_NOPS (arg);
9893 if (TREE_CODE (arg) != ADDR_EXPR)
9894 return;
9896 arg = get_base_address (TREE_OPERAND (arg, 0));
9897 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
9898 return;
9900 if (SSA_VAR_P (arg))
9901 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9902 "%Kattempt to free a non-heap object %qD", exp, arg);
9903 else
9904 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9905 "%Kattempt to free a non-heap object", exp);
9908 /* Fold a call to __builtin_object_size with arguments PTR and OST,
9909 if possible. */
9911 static tree
9912 fold_builtin_object_size (tree ptr, tree ost)
9914 unsigned HOST_WIDE_INT bytes;
9915 int object_size_type;
9917 if (!validate_arg (ptr, POINTER_TYPE)
9918 || !validate_arg (ost, INTEGER_TYPE))
9919 return NULL_TREE;
9921 STRIP_NOPS (ost);
9923 if (TREE_CODE (ost) != INTEGER_CST
9924 || tree_int_cst_sgn (ost) < 0
9925 || compare_tree_int (ost, 3) > 0)
9926 return NULL_TREE;
9928 object_size_type = tree_to_shwi (ost);
9930 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
9931 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
9932 and (size_t) 0 for types 2 and 3. */
9933 if (TREE_SIDE_EFFECTS (ptr))
9934 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
9936 if (TREE_CODE (ptr) == ADDR_EXPR)
9938 compute_builtin_object_size (ptr, object_size_type, &bytes);
9939 if (wi::fits_to_tree_p (bytes, size_type_node))
9940 return build_int_cstu (size_type_node, bytes);
9942 else if (TREE_CODE (ptr) == SSA_NAME)
9944 /* If object size is not known yet, delay folding until
9945 later. Maybe subsequent passes will help determining
9946 it. */
9947 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
9948 && wi::fits_to_tree_p (bytes, size_type_node))
9949 return build_int_cstu (size_type_node, bytes);
9952 return NULL_TREE;
9955 /* Builtins with folding operations that operate on "..." arguments
9956 need special handling; we need to store the arguments in a convenient
9957 data structure before attempting any folding. Fortunately there are
9958 only a few builtins that fall into this category. FNDECL is the
9959 function, EXP is the CALL_EXPR for the call. */
9961 static tree
9962 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
9964 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9965 tree ret = NULL_TREE;
9967 switch (fcode)
9969 case BUILT_IN_FPCLASSIFY:
9970 ret = fold_builtin_fpclassify (loc, args, nargs);
9971 break;
9973 default:
9974 break;
9976 if (ret)
9978 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9979 SET_EXPR_LOCATION (ret, loc);
9980 TREE_NO_WARNING (ret) = 1;
9981 return ret;
9983 return NULL_TREE;
9986 /* Initialize format string characters in the target charset. */
9988 bool
9989 init_target_chars (void)
9991 static bool init;
9992 if (!init)
9994 target_newline = lang_hooks.to_target_charset ('\n');
9995 target_percent = lang_hooks.to_target_charset ('%');
9996 target_c = lang_hooks.to_target_charset ('c');
9997 target_s = lang_hooks.to_target_charset ('s');
9998 if (target_newline == 0 || target_percent == 0 || target_c == 0
9999 || target_s == 0)
10000 return false;
10002 target_percent_c[0] = target_percent;
10003 target_percent_c[1] = target_c;
10004 target_percent_c[2] = '\0';
10006 target_percent_s[0] = target_percent;
10007 target_percent_s[1] = target_s;
10008 target_percent_s[2] = '\0';
10010 target_percent_s_newline[0] = target_percent;
10011 target_percent_s_newline[1] = target_s;
10012 target_percent_s_newline[2] = target_newline;
10013 target_percent_s_newline[3] = '\0';
10015 init = true;
10017 return true;
10020 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10021 and no overflow/underflow occurred. INEXACT is true if M was not
10022 exactly calculated. TYPE is the tree type for the result. This
10023 function assumes that you cleared the MPFR flags and then
10024 calculated M to see if anything subsequently set a flag prior to
10025 entering this function. Return NULL_TREE if any checks fail. */
10027 static tree
10028 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10030 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10031 overflow/underflow occurred. If -frounding-math, proceed iff the
10032 result of calling FUNC was exact. */
10033 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10034 && (!flag_rounding_math || !inexact))
10036 REAL_VALUE_TYPE rr;
10038 real_from_mpfr (&rr, m, type, GMP_RNDN);
10039 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10040 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10041 but the mpft_t is not, then we underflowed in the
10042 conversion. */
10043 if (real_isfinite (&rr)
10044 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10046 REAL_VALUE_TYPE rmode;
10048 real_convert (&rmode, TYPE_MODE (type), &rr);
10049 /* Proceed iff the specified mode can hold the value. */
10050 if (real_identical (&rmode, &rr))
10051 return build_real (type, rmode);
10054 return NULL_TREE;
10057 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10058 number and no overflow/underflow occurred. INEXACT is true if M
10059 was not exactly calculated. TYPE is the tree type for the result.
10060 This function assumes that you cleared the MPFR flags and then
10061 calculated M to see if anything subsequently set a flag prior to
10062 entering this function. Return NULL_TREE if any checks fail, if
10063 FORCE_CONVERT is true, then bypass the checks. */
10065 static tree
10066 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10068 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10069 overflow/underflow occurred. If -frounding-math, proceed iff the
10070 result of calling FUNC was exact. */
10071 if (force_convert
10072 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10073 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10074 && (!flag_rounding_math || !inexact)))
10076 REAL_VALUE_TYPE re, im;
10078 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10079 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10080 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10081 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10082 but the mpft_t is not, then we underflowed in the
10083 conversion. */
10084 if (force_convert
10085 || (real_isfinite (&re) && real_isfinite (&im)
10086 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10087 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10089 REAL_VALUE_TYPE re_mode, im_mode;
10091 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10092 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10093 /* Proceed iff the specified mode can hold the value. */
10094 if (force_convert
10095 || (real_identical (&re_mode, &re)
10096 && real_identical (&im_mode, &im)))
10097 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10098 build_real (TREE_TYPE (type), im_mode));
10101 return NULL_TREE;
10104 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10105 the pointer *(ARG_QUO) and return the result. The type is taken
10106 from the type of ARG0 and is used for setting the precision of the
10107 calculation and results. */
10109 static tree
10110 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10112 tree const type = TREE_TYPE (arg0);
10113 tree result = NULL_TREE;
10115 STRIP_NOPS (arg0);
10116 STRIP_NOPS (arg1);
10118 /* To proceed, MPFR must exactly represent the target floating point
10119 format, which only happens when the target base equals two. */
10120 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10121 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10122 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10124 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10125 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10127 if (real_isfinite (ra0) && real_isfinite (ra1))
10129 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10130 const int prec = fmt->p;
10131 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10132 tree result_rem;
10133 long integer_quo;
10134 mpfr_t m0, m1;
10136 mpfr_inits2 (prec, m0, m1, NULL);
10137 mpfr_from_real (m0, ra0, GMP_RNDN);
10138 mpfr_from_real (m1, ra1, GMP_RNDN);
10139 mpfr_clear_flags ();
10140 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10141 /* Remquo is independent of the rounding mode, so pass
10142 inexact=0 to do_mpfr_ckconv(). */
10143 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10144 mpfr_clears (m0, m1, NULL);
10145 if (result_rem)
10147 /* MPFR calculates quo in the host's long so it may
10148 return more bits in quo than the target int can hold
10149 if sizeof(host long) > sizeof(target int). This can
10150 happen even for native compilers in LP64 mode. In
10151 these cases, modulo the quo value with the largest
10152 number that the target int can hold while leaving one
10153 bit for the sign. */
10154 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10155 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10157 /* Dereference the quo pointer argument. */
10158 arg_quo = build_fold_indirect_ref (arg_quo);
10159 /* Proceed iff a valid pointer type was passed in. */
10160 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10162 /* Set the value. */
10163 tree result_quo
10164 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10165 build_int_cst (TREE_TYPE (arg_quo),
10166 integer_quo));
10167 TREE_SIDE_EFFECTS (result_quo) = 1;
10168 /* Combine the quo assignment with the rem. */
10169 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10170 result_quo, result_rem));
10175 return result;
10178 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10179 resulting value as a tree with type TYPE. The mpfr precision is
10180 set to the precision of TYPE. We assume that this mpfr function
10181 returns zero if the result could be calculated exactly within the
10182 requested precision. In addition, the integer pointer represented
10183 by ARG_SG will be dereferenced and set to the appropriate signgam
10184 (-1,1) value. */
10186 static tree
10187 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10189 tree result = NULL_TREE;
10191 STRIP_NOPS (arg);
10193 /* To proceed, MPFR must exactly represent the target floating point
10194 format, which only happens when the target base equals two. Also
10195 verify ARG is a constant and that ARG_SG is an int pointer. */
10196 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10197 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10198 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10199 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10201 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10203 /* In addition to NaN and Inf, the argument cannot be zero or a
10204 negative integer. */
10205 if (real_isfinite (ra)
10206 && ra->cl != rvc_zero
10207 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10209 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10210 const int prec = fmt->p;
10211 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10212 int inexact, sg;
10213 mpfr_t m;
10214 tree result_lg;
10216 mpfr_init2 (m, prec);
10217 mpfr_from_real (m, ra, GMP_RNDN);
10218 mpfr_clear_flags ();
10219 inexact = mpfr_lgamma (m, &sg, m, rnd);
10220 result_lg = do_mpfr_ckconv (m, type, inexact);
10221 mpfr_clear (m);
10222 if (result_lg)
10224 tree result_sg;
10226 /* Dereference the arg_sg pointer argument. */
10227 arg_sg = build_fold_indirect_ref (arg_sg);
10228 /* Assign the signgam value into *arg_sg. */
10229 result_sg = fold_build2 (MODIFY_EXPR,
10230 TREE_TYPE (arg_sg), arg_sg,
10231 build_int_cst (TREE_TYPE (arg_sg), sg));
10232 TREE_SIDE_EFFECTS (result_sg) = 1;
10233 /* Combine the signgam assignment with the lgamma result. */
10234 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10235 result_sg, result_lg));
10240 return result;
10243 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10244 mpc function FUNC on it and return the resulting value as a tree
10245 with type TYPE. The mpfr precision is set to the precision of
10246 TYPE. We assume that function FUNC returns zero if the result
10247 could be calculated exactly within the requested precision. If
10248 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10249 in the arguments and/or results. */
10251 tree
10252 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10253 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10255 tree result = NULL_TREE;
10257 STRIP_NOPS (arg0);
10258 STRIP_NOPS (arg1);
10260 /* To proceed, MPFR must exactly represent the target floating point
10261 format, which only happens when the target base equals two. */
10262 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10263 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10264 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10265 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10266 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10268 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10269 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10270 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10271 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10273 if (do_nonfinite
10274 || (real_isfinite (re0) && real_isfinite (im0)
10275 && real_isfinite (re1) && real_isfinite (im1)))
10277 const struct real_format *const fmt =
10278 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10279 const int prec = fmt->p;
10280 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10281 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10282 int inexact;
10283 mpc_t m0, m1;
10285 mpc_init2 (m0, prec);
10286 mpc_init2 (m1, prec);
10287 mpfr_from_real (mpc_realref (m0), re0, rnd);
10288 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10289 mpfr_from_real (mpc_realref (m1), re1, rnd);
10290 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10291 mpfr_clear_flags ();
10292 inexact = func (m0, m0, m1, crnd);
10293 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10294 mpc_clear (m0);
10295 mpc_clear (m1);
10299 return result;
10302 /* A wrapper function for builtin folding that prevents warnings for
10303 "statement without effect" and the like, caused by removing the
10304 call node earlier than the warning is generated. */
10306 tree
10307 fold_call_stmt (gcall *stmt, bool ignore)
10309 tree ret = NULL_TREE;
10310 tree fndecl = gimple_call_fndecl (stmt);
10311 location_t loc = gimple_location (stmt);
10312 if (fndecl
10313 && TREE_CODE (fndecl) == FUNCTION_DECL
10314 && DECL_BUILT_IN (fndecl)
10315 && !gimple_call_va_arg_pack_p (stmt))
10317 int nargs = gimple_call_num_args (stmt);
10318 tree *args = (nargs > 0
10319 ? gimple_call_arg_ptr (stmt, 0)
10320 : &error_mark_node);
10322 if (avoid_folding_inline_builtin (fndecl))
10323 return NULL_TREE;
10324 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10326 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10328 else
10330 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10331 if (ret)
10333 /* Propagate location information from original call to
10334 expansion of builtin. Otherwise things like
10335 maybe_emit_chk_warning, that operate on the expansion
10336 of a builtin, will use the wrong location information. */
10337 if (gimple_has_location (stmt))
10339 tree realret = ret;
10340 if (TREE_CODE (ret) == NOP_EXPR)
10341 realret = TREE_OPERAND (ret, 0);
10342 if (CAN_HAVE_LOCATION_P (realret)
10343 && !EXPR_HAS_LOCATION (realret))
10344 SET_EXPR_LOCATION (realret, loc);
10345 return realret;
10347 return ret;
10351 return NULL_TREE;
10354 /* Look up the function in builtin_decl that corresponds to DECL
10355 and set ASMSPEC as its user assembler name. DECL must be a
10356 function decl that declares a builtin. */
10358 void
10359 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10361 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10362 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10363 && asmspec != 0);
10365 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10366 set_user_assembler_name (builtin, asmspec);
10368 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10369 && INT_TYPE_SIZE < BITS_PER_WORD)
10371 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
10372 set_user_assembler_libfunc ("ffs", asmspec);
10373 set_optab_libfunc (ffs_optab, mode, "ffs");
10377 /* Return true if DECL is a builtin that expands to a constant or similarly
10378 simple code. */
10379 bool
10380 is_simple_builtin (tree decl)
10382 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10383 switch (DECL_FUNCTION_CODE (decl))
10385 /* Builtins that expand to constants. */
10386 case BUILT_IN_CONSTANT_P:
10387 case BUILT_IN_EXPECT:
10388 case BUILT_IN_OBJECT_SIZE:
10389 case BUILT_IN_UNREACHABLE:
10390 /* Simple register moves or loads from stack. */
10391 case BUILT_IN_ASSUME_ALIGNED:
10392 case BUILT_IN_RETURN_ADDRESS:
10393 case BUILT_IN_EXTRACT_RETURN_ADDR:
10394 case BUILT_IN_FROB_RETURN_ADDR:
10395 case BUILT_IN_RETURN:
10396 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10397 case BUILT_IN_FRAME_ADDRESS:
10398 case BUILT_IN_VA_END:
10399 case BUILT_IN_STACK_SAVE:
10400 case BUILT_IN_STACK_RESTORE:
10401 /* Exception state returns or moves registers around. */
10402 case BUILT_IN_EH_FILTER:
10403 case BUILT_IN_EH_POINTER:
10404 case BUILT_IN_EH_COPY_VALUES:
10405 return true;
10407 default:
10408 return false;
10411 return false;
10414 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10415 most probably expanded inline into reasonably simple code. This is a
10416 superset of is_simple_builtin. */
10417 bool
10418 is_inexpensive_builtin (tree decl)
10420 if (!decl)
10421 return false;
10422 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10423 return true;
10424 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10425 switch (DECL_FUNCTION_CODE (decl))
10427 case BUILT_IN_ABS:
10428 case BUILT_IN_ALLOCA:
10429 case BUILT_IN_ALLOCA_WITH_ALIGN:
10430 case BUILT_IN_BSWAP16:
10431 case BUILT_IN_BSWAP32:
10432 case BUILT_IN_BSWAP64:
10433 case BUILT_IN_CLZ:
10434 case BUILT_IN_CLZIMAX:
10435 case BUILT_IN_CLZL:
10436 case BUILT_IN_CLZLL:
10437 case BUILT_IN_CTZ:
10438 case BUILT_IN_CTZIMAX:
10439 case BUILT_IN_CTZL:
10440 case BUILT_IN_CTZLL:
10441 case BUILT_IN_FFS:
10442 case BUILT_IN_FFSIMAX:
10443 case BUILT_IN_FFSL:
10444 case BUILT_IN_FFSLL:
10445 case BUILT_IN_IMAXABS:
10446 case BUILT_IN_FINITE:
10447 case BUILT_IN_FINITEF:
10448 case BUILT_IN_FINITEL:
10449 case BUILT_IN_FINITED32:
10450 case BUILT_IN_FINITED64:
10451 case BUILT_IN_FINITED128:
10452 case BUILT_IN_FPCLASSIFY:
10453 case BUILT_IN_ISFINITE:
10454 case BUILT_IN_ISINF_SIGN:
10455 case BUILT_IN_ISINF:
10456 case BUILT_IN_ISINFF:
10457 case BUILT_IN_ISINFL:
10458 case BUILT_IN_ISINFD32:
10459 case BUILT_IN_ISINFD64:
10460 case BUILT_IN_ISINFD128:
10461 case BUILT_IN_ISNAN:
10462 case BUILT_IN_ISNANF:
10463 case BUILT_IN_ISNANL:
10464 case BUILT_IN_ISNAND32:
10465 case BUILT_IN_ISNAND64:
10466 case BUILT_IN_ISNAND128:
10467 case BUILT_IN_ISNORMAL:
10468 case BUILT_IN_ISGREATER:
10469 case BUILT_IN_ISGREATEREQUAL:
10470 case BUILT_IN_ISLESS:
10471 case BUILT_IN_ISLESSEQUAL:
10472 case BUILT_IN_ISLESSGREATER:
10473 case BUILT_IN_ISUNORDERED:
10474 case BUILT_IN_VA_ARG_PACK:
10475 case BUILT_IN_VA_ARG_PACK_LEN:
10476 case BUILT_IN_VA_COPY:
10477 case BUILT_IN_TRAP:
10478 case BUILT_IN_SAVEREGS:
10479 case BUILT_IN_POPCOUNTL:
10480 case BUILT_IN_POPCOUNTLL:
10481 case BUILT_IN_POPCOUNTIMAX:
10482 case BUILT_IN_POPCOUNT:
10483 case BUILT_IN_PARITYL:
10484 case BUILT_IN_PARITYLL:
10485 case BUILT_IN_PARITYIMAX:
10486 case BUILT_IN_PARITY:
10487 case BUILT_IN_LABS:
10488 case BUILT_IN_LLABS:
10489 case BUILT_IN_PREFETCH:
10490 case BUILT_IN_ACC_ON_DEVICE:
10491 return true;
10493 default:
10494 return is_simple_builtin (decl);
10497 return false;
10500 /* Return true if T is a constant and the value cast to a target char
10501 can be represented by a host char.
10502 Store the casted char constant in *P if so. */
10504 bool
10505 target_char_cst_p (tree t, char *p)
10507 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
10508 return false;
10510 *p = (char)tree_to_uhwi (t);
10511 return true;