[42/46] Add vec_info::replace_stmt
[official-gcc.git] / gcc / builtins.c
blobc6596113801b23b682d16dc1e26b404de793eb50
1 /* Expand builtin functions.
2 Copyright (C) 1988-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "params.h"
35 #include "tm_p.h"
36 #include "stringpool.h"
37 #include "tree-vrp.h"
38 #include "tree-ssanames.h"
39 #include "expmed.h"
40 #include "optabs.h"
41 #include "emit-rtl.h"
42 #include "recog.h"
43 #include "diagnostic-core.h"
44 #include "alias.h"
45 #include "fold-const.h"
46 #include "fold-const-call.h"
47 #include "gimple-ssa-warn-restrict.h"
48 #include "stor-layout.h"
49 #include "calls.h"
50 #include "varasm.h"
51 #include "tree-object-size.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
76 struct target_builtins default_target_builtins;
77 #if SWITCHABLE_TARGET
78 struct target_builtins *this_target_builtins = &default_target_builtins;
79 #endif
81 /* Define the names of the builtin function types and codes. */
82 const char *const built_in_class_names[BUILT_IN_LAST]
83 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
85 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
86 const char * built_in_names[(int) END_BUILTINS] =
88 #include "builtins.def"
91 /* Setup an array of builtin_info_type, make sure each element decl is
92 initialized to NULL_TREE. */
93 builtin_info_type builtin_info[(int)END_BUILTINS];
95 /* Non-zero if __builtin_constant_p should be folded right away. */
96 bool force_folding_builtin_constant_p;
98 static rtx c_readstr (const char *, scalar_int_mode);
99 static int target_char_cast (tree, char *);
100 static rtx get_memory_rtx (tree, tree);
101 static int apply_args_size (void);
102 static int apply_result_size (void);
103 static rtx result_vector (int, rtx);
104 static void expand_builtin_prefetch (tree);
105 static rtx expand_builtin_apply_args (void);
106 static rtx expand_builtin_apply_args_1 (void);
107 static rtx expand_builtin_apply (rtx, rtx, rtx);
108 static void expand_builtin_return (rtx);
109 static enum type_class type_to_class (tree);
110 static rtx expand_builtin_classify_type (tree);
111 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
112 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
113 static rtx expand_builtin_interclass_mathfn (tree, rtx);
114 static rtx expand_builtin_sincos (tree);
115 static rtx expand_builtin_cexpi (tree, rtx);
116 static rtx expand_builtin_int_roundingfn (tree, rtx);
117 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
118 static rtx expand_builtin_next_arg (void);
119 static rtx expand_builtin_va_start (tree);
120 static rtx expand_builtin_va_end (tree);
121 static rtx expand_builtin_va_copy (tree);
122 static rtx inline_expand_builtin_string_cmp (tree, rtx);
123 static rtx expand_builtin_strcmp (tree, rtx);
124 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
125 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
126 static rtx expand_builtin_memchr (tree, rtx);
127 static rtx expand_builtin_memcpy (tree, rtx);
128 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
129 rtx target, tree exp, int endp);
130 static rtx expand_builtin_memmove (tree, rtx);
131 static rtx expand_builtin_mempcpy (tree, rtx);
132 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, int);
133 static rtx expand_builtin_strcat (tree, rtx);
134 static rtx expand_builtin_strcpy (tree, rtx);
135 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
136 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
137 static rtx expand_builtin_stpncpy (tree, rtx);
138 static rtx expand_builtin_strncat (tree, rtx);
139 static rtx expand_builtin_strncpy (tree, rtx);
140 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
141 static rtx expand_builtin_memset (tree, rtx, machine_mode);
142 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
143 static rtx expand_builtin_bzero (tree);
144 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
145 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
146 static rtx expand_builtin_alloca (tree);
147 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
148 static rtx expand_builtin_frame_address (tree, tree);
149 static tree stabilize_va_list_loc (location_t, tree, int);
150 static rtx expand_builtin_expect (tree, rtx);
151 static tree fold_builtin_constant_p (tree);
152 static tree fold_builtin_classify_type (tree);
153 static tree fold_builtin_strlen (location_t, tree, tree);
154 static tree fold_builtin_inf (location_t, tree, int);
155 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
156 static bool validate_arg (const_tree, enum tree_code code);
157 static rtx expand_builtin_fabs (tree, rtx, rtx);
158 static rtx expand_builtin_signbit (tree, rtx);
159 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
160 static tree fold_builtin_isascii (location_t, tree);
161 static tree fold_builtin_toascii (location_t, tree);
162 static tree fold_builtin_isdigit (location_t, tree);
163 static tree fold_builtin_fabs (location_t, tree, tree);
164 static tree fold_builtin_abs (location_t, tree, tree);
165 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
166 enum tree_code);
167 static tree fold_builtin_0 (location_t, tree);
168 static tree fold_builtin_1 (location_t, tree, tree);
169 static tree fold_builtin_2 (location_t, tree, tree, tree);
170 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
171 static tree fold_builtin_varargs (location_t, tree, tree*, int);
173 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
174 static tree fold_builtin_strspn (location_t, tree, tree);
175 static tree fold_builtin_strcspn (location_t, tree, tree);
177 static rtx expand_builtin_object_size (tree);
178 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
179 enum built_in_function);
180 static void maybe_emit_chk_warning (tree, enum built_in_function);
181 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
182 static void maybe_emit_free_warning (tree);
183 static tree fold_builtin_object_size (tree, tree);
185 unsigned HOST_WIDE_INT target_newline;
186 unsigned HOST_WIDE_INT target_percent;
187 static unsigned HOST_WIDE_INT target_c;
188 static unsigned HOST_WIDE_INT target_s;
189 char target_percent_c[3];
190 char target_percent_s[3];
191 char target_percent_s_newline[4];
192 static tree do_mpfr_remquo (tree, tree, tree);
193 static tree do_mpfr_lgamma_r (tree, tree, tree);
194 static void expand_builtin_sync_synchronize (void);
196 /* Return true if NAME starts with __builtin_ or __sync_. */
198 static bool
199 is_builtin_name (const char *name)
201 if (strncmp (name, "__builtin_", 10) == 0)
202 return true;
203 if (strncmp (name, "__sync_", 7) == 0)
204 return true;
205 if (strncmp (name, "__atomic_", 9) == 0)
206 return true;
207 return false;
211 /* Return true if DECL is a function symbol representing a built-in. */
213 bool
214 is_builtin_fn (tree decl)
216 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
219 /* Return true if NODE should be considered for inline expansion regardless
220 of the optimization level. This means whenever a function is invoked with
221 its "internal" name, which normally contains the prefix "__builtin". */
223 bool
224 called_as_built_in (tree node)
226 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
227 we want the name used to call the function, not the name it
228 will have. */
229 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
230 return is_builtin_name (name);
233 /* Compute values M and N such that M divides (address of EXP - N) and such
234 that N < M. If these numbers can be determined, store M in alignp and N in
235 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
236 *alignp and any bit-offset to *bitposp.
238 Note that the address (and thus the alignment) computed here is based
239 on the address to which a symbol resolves, whereas DECL_ALIGN is based
240 on the address at which an object is actually located. These two
241 addresses are not always the same. For example, on ARM targets,
242 the address &foo of a Thumb function foo() has the lowest bit set,
243 whereas foo() itself starts on an even address.
245 If ADDR_P is true we are taking the address of the memory reference EXP
246 and thus cannot rely on the access taking place. */
248 static bool
249 get_object_alignment_2 (tree exp, unsigned int *alignp,
250 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
252 poly_int64 bitsize, bitpos;
253 tree offset;
254 machine_mode mode;
255 int unsignedp, reversep, volatilep;
256 unsigned int align = BITS_PER_UNIT;
257 bool known_alignment = false;
259 /* Get the innermost object and the constant (bitpos) and possibly
260 variable (offset) offset of the access. */
261 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
262 &unsignedp, &reversep, &volatilep);
264 /* Extract alignment information from the innermost object and
265 possibly adjust bitpos and offset. */
266 if (TREE_CODE (exp) == FUNCTION_DECL)
268 /* Function addresses can encode extra information besides their
269 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
270 allows the low bit to be used as a virtual bit, we know
271 that the address itself must be at least 2-byte aligned. */
272 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
273 align = 2 * BITS_PER_UNIT;
275 else if (TREE_CODE (exp) == LABEL_DECL)
277 else if (TREE_CODE (exp) == CONST_DECL)
279 /* The alignment of a CONST_DECL is determined by its initializer. */
280 exp = DECL_INITIAL (exp);
281 align = TYPE_ALIGN (TREE_TYPE (exp));
282 if (CONSTANT_CLASS_P (exp))
283 align = targetm.constant_alignment (exp, align);
285 known_alignment = true;
287 else if (DECL_P (exp))
289 align = DECL_ALIGN (exp);
290 known_alignment = true;
292 else if (TREE_CODE (exp) == INDIRECT_REF
293 || TREE_CODE (exp) == MEM_REF
294 || TREE_CODE (exp) == TARGET_MEM_REF)
296 tree addr = TREE_OPERAND (exp, 0);
297 unsigned ptr_align;
298 unsigned HOST_WIDE_INT ptr_bitpos;
299 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
301 /* If the address is explicitely aligned, handle that. */
302 if (TREE_CODE (addr) == BIT_AND_EXPR
303 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
305 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
306 ptr_bitmask *= BITS_PER_UNIT;
307 align = least_bit_hwi (ptr_bitmask);
308 addr = TREE_OPERAND (addr, 0);
311 known_alignment
312 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
313 align = MAX (ptr_align, align);
315 /* Re-apply explicit alignment to the bitpos. */
316 ptr_bitpos &= ptr_bitmask;
318 /* The alignment of the pointer operand in a TARGET_MEM_REF
319 has to take the variable offset parts into account. */
320 if (TREE_CODE (exp) == TARGET_MEM_REF)
322 if (TMR_INDEX (exp))
324 unsigned HOST_WIDE_INT step = 1;
325 if (TMR_STEP (exp))
326 step = TREE_INT_CST_LOW (TMR_STEP (exp));
327 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
329 if (TMR_INDEX2 (exp))
330 align = BITS_PER_UNIT;
331 known_alignment = false;
334 /* When EXP is an actual memory reference then we can use
335 TYPE_ALIGN of a pointer indirection to derive alignment.
336 Do so only if get_pointer_alignment_1 did not reveal absolute
337 alignment knowledge and if using that alignment would
338 improve the situation. */
339 unsigned int talign;
340 if (!addr_p && !known_alignment
341 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
342 && talign > align)
343 align = talign;
344 else
346 /* Else adjust bitpos accordingly. */
347 bitpos += ptr_bitpos;
348 if (TREE_CODE (exp) == MEM_REF
349 || TREE_CODE (exp) == TARGET_MEM_REF)
350 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
353 else if (TREE_CODE (exp) == STRING_CST)
355 /* STRING_CST are the only constant objects we allow to be not
356 wrapped inside a CONST_DECL. */
357 align = TYPE_ALIGN (TREE_TYPE (exp));
358 if (CONSTANT_CLASS_P (exp))
359 align = targetm.constant_alignment (exp, align);
361 known_alignment = true;
364 /* If there is a non-constant offset part extract the maximum
365 alignment that can prevail. */
366 if (offset)
368 unsigned int trailing_zeros = tree_ctz (offset);
369 if (trailing_zeros < HOST_BITS_PER_INT)
371 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
372 if (inner)
373 align = MIN (align, inner);
377 /* Account for the alignment of runtime coefficients, so that the constant
378 bitpos is guaranteed to be accurate. */
379 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
380 if (alt_align != 0 && alt_align < align)
382 align = alt_align;
383 known_alignment = false;
386 *alignp = align;
387 *bitposp = bitpos.coeffs[0] & (align - 1);
388 return known_alignment;
391 /* For a memory reference expression EXP compute values M and N such that M
392 divides (&EXP - N) and such that N < M. If these numbers can be determined,
393 store M in alignp and N in *BITPOSP and return true. Otherwise return false
394 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
396 bool
397 get_object_alignment_1 (tree exp, unsigned int *alignp,
398 unsigned HOST_WIDE_INT *bitposp)
400 return get_object_alignment_2 (exp, alignp, bitposp, false);
403 /* Return the alignment in bits of EXP, an object. */
405 unsigned int
406 get_object_alignment (tree exp)
408 unsigned HOST_WIDE_INT bitpos = 0;
409 unsigned int align;
411 get_object_alignment_1 (exp, &align, &bitpos);
413 /* align and bitpos now specify known low bits of the pointer.
414 ptr & (align - 1) == bitpos. */
416 if (bitpos != 0)
417 align = least_bit_hwi (bitpos);
418 return align;
421 /* For a pointer valued expression EXP compute values M and N such that M
422 divides (EXP - N) and such that N < M. If these numbers can be determined,
423 store M in alignp and N in *BITPOSP and return true. Return false if
424 the results are just a conservative approximation.
426 If EXP is not a pointer, false is returned too. */
428 bool
429 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
430 unsigned HOST_WIDE_INT *bitposp)
432 STRIP_NOPS (exp);
434 if (TREE_CODE (exp) == ADDR_EXPR)
435 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
436 alignp, bitposp, true);
437 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
439 unsigned int align;
440 unsigned HOST_WIDE_INT bitpos;
441 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
442 &align, &bitpos);
443 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
444 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
445 else
447 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
448 if (trailing_zeros < HOST_BITS_PER_INT)
450 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
451 if (inner)
452 align = MIN (align, inner);
455 *alignp = align;
456 *bitposp = bitpos & (align - 1);
457 return res;
459 else if (TREE_CODE (exp) == SSA_NAME
460 && POINTER_TYPE_P (TREE_TYPE (exp)))
462 unsigned int ptr_align, ptr_misalign;
463 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
465 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
467 *bitposp = ptr_misalign * BITS_PER_UNIT;
468 *alignp = ptr_align * BITS_PER_UNIT;
469 /* Make sure to return a sensible alignment when the multiplication
470 by BITS_PER_UNIT overflowed. */
471 if (*alignp == 0)
472 *alignp = 1u << (HOST_BITS_PER_INT - 1);
473 /* We cannot really tell whether this result is an approximation. */
474 return false;
476 else
478 *bitposp = 0;
479 *alignp = BITS_PER_UNIT;
480 return false;
483 else if (TREE_CODE (exp) == INTEGER_CST)
485 *alignp = BIGGEST_ALIGNMENT;
486 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
487 & (BIGGEST_ALIGNMENT - 1));
488 return true;
491 *bitposp = 0;
492 *alignp = BITS_PER_UNIT;
493 return false;
496 /* Return the alignment in bits of EXP, a pointer valued expression.
497 The alignment returned is, by default, the alignment of the thing that
498 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
500 Otherwise, look at the expression to see if we can do better, i.e., if the
501 expression is actually pointing at an object whose alignment is tighter. */
503 unsigned int
504 get_pointer_alignment (tree exp)
506 unsigned HOST_WIDE_INT bitpos = 0;
507 unsigned int align;
509 get_pointer_alignment_1 (exp, &align, &bitpos);
511 /* align and bitpos now specify known low bits of the pointer.
512 ptr & (align - 1) == bitpos. */
514 if (bitpos != 0)
515 align = least_bit_hwi (bitpos);
517 return align;
520 /* Return the number of leading non-zero elements in the sequence
521 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
522 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
524 unsigned
525 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
527 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
529 unsigned n;
531 if (eltsize == 1)
533 /* Optimize the common case of plain char. */
534 for (n = 0; n < maxelts; n++)
536 const char *elt = (const char*) ptr + n;
537 if (!*elt)
538 break;
541 else
543 for (n = 0; n < maxelts; n++)
545 const char *elt = (const char*) ptr + n * eltsize;
546 if (!memcmp (elt, "\0\0\0\0", eltsize))
547 break;
550 return n;
553 /* Compute the length of a null-terminated character string or wide
554 character string handling character sizes of 1, 2, and 4 bytes.
555 TREE_STRING_LENGTH is not the right way because it evaluates to
556 the size of the character array in bytes (as opposed to characters)
557 and because it can contain a zero byte in the middle.
559 ONLY_VALUE should be nonzero if the result is not going to be emitted
560 into the instruction stream and zero if it is going to be expanded.
561 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
562 is returned, otherwise NULL, since
563 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
564 evaluate the side-effects.
566 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
567 accesses. Note that this implies the result is not going to be emitted
568 into the instruction stream.
570 The value returned is of type `ssizetype'.
572 Unfortunately, string_constant can't access the values of const char
573 arrays with initializers, so neither can we do so here. */
575 tree
576 c_strlen (tree src, int only_value)
578 STRIP_NOPS (src);
579 if (TREE_CODE (src) == COND_EXPR
580 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
582 tree len1, len2;
584 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
585 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
586 if (tree_int_cst_equal (len1, len2))
587 return len1;
590 if (TREE_CODE (src) == COMPOUND_EXPR
591 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
592 return c_strlen (TREE_OPERAND (src, 1), only_value);
594 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
596 /* Offset from the beginning of the string in bytes. */
597 tree byteoff;
598 src = string_constant (src, &byteoff);
599 if (src == 0)
600 return NULL_TREE;
602 /* Determine the size of the string element. */
603 unsigned eltsize
604 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src))));
606 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
607 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
608 in case the latter is less than the size of the array, such as when
609 SRC refers to a short string literal used to initialize a large array.
610 In that case, the elements of the array after the terminating NUL are
611 all NUL. */
612 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
613 strelts = strelts / eltsize - 1;
615 HOST_WIDE_INT maxelts = strelts;
616 tree type = TREE_TYPE (src);
617 if (tree size = TYPE_SIZE_UNIT (type))
618 if (tree_fits_shwi_p (size))
620 maxelts = tree_to_uhwi (size);
621 maxelts = maxelts / eltsize - 1;
624 /* PTR can point to the byte representation of any string type, including
625 char* and wchar_t*. */
626 const char *ptr = TREE_STRING_POINTER (src);
628 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
630 /* If the string has an internal NUL character followed by any
631 non-NUL characters (e.g., "foo\0bar"), we can't compute
632 the offset to the following NUL if we don't know where to
633 start searching for it. */
634 unsigned len = string_length (ptr, eltsize, strelts);
635 if (len < strelts)
637 /* Return when an embedded null character is found. */
638 return NULL_TREE;
641 if (!maxelts)
642 return ssize_int (0);
644 /* We don't know the starting offset, but we do know that the string
645 has no internal zero bytes. If the offset falls within the bounds
646 of the string subtract the offset from the length of the string,
647 and return that. Otherwise the length is zero. Take care to
648 use SAVE_EXPR in case the OFFSET has side-effects. */
649 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff) : byteoff;
650 offsave = fold_convert (ssizetype, offsave);
651 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
652 build_int_cst (ssizetype, len * eltsize));
653 tree lenexp = size_diffop_loc (loc, ssize_int (strelts * eltsize), offsave);
654 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
655 build_zero_cst (ssizetype));
658 /* Offset from the beginning of the string in elements. */
659 HOST_WIDE_INT eltoff;
661 /* We have a known offset into the string. Start searching there for
662 a null character if we can represent it as a single HOST_WIDE_INT. */
663 if (byteoff == 0)
664 eltoff = 0;
665 else if (! tree_fits_shwi_p (byteoff))
666 eltoff = -1;
667 else
668 eltoff = tree_to_shwi (byteoff) / eltsize;
670 /* If the offset is known to be out of bounds, warn, and call strlen at
671 runtime. */
672 if (eltoff < 0 || eltoff > maxelts)
674 /* Suppress multiple warnings for propagated constant strings. */
675 if (only_value != 2
676 && !TREE_NO_WARNING (src))
678 warning_at (loc, OPT_Warray_bounds,
679 "offset %qwi outside bounds of constant string",
680 eltoff);
681 TREE_NO_WARNING (src) = 1;
683 return NULL_TREE;
686 /* Use strlen to search for the first zero byte. Since any strings
687 constructed with build_string will have nulls appended, we win even
688 if we get handed something like (char[4])"abcd".
690 Since ELTOFF is our starting index into the string, no further
691 calculation is needed. */
692 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
693 maxelts - eltoff);
695 return ssize_int (len);
698 /* Return a constant integer corresponding to target reading
699 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
701 static rtx
702 c_readstr (const char *str, scalar_int_mode mode)
704 HOST_WIDE_INT ch;
705 unsigned int i, j;
706 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
708 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
709 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
710 / HOST_BITS_PER_WIDE_INT;
712 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
713 for (i = 0; i < len; i++)
714 tmp[i] = 0;
716 ch = 1;
717 for (i = 0; i < GET_MODE_SIZE (mode); i++)
719 j = i;
720 if (WORDS_BIG_ENDIAN)
721 j = GET_MODE_SIZE (mode) - i - 1;
722 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
723 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
724 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
725 j *= BITS_PER_UNIT;
727 if (ch)
728 ch = (unsigned char) str[i];
729 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
732 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
733 return immed_wide_int_const (c, mode);
736 /* Cast a target constant CST to target CHAR and if that value fits into
737 host char type, return zero and put that value into variable pointed to by
738 P. */
740 static int
741 target_char_cast (tree cst, char *p)
743 unsigned HOST_WIDE_INT val, hostval;
745 if (TREE_CODE (cst) != INTEGER_CST
746 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
747 return 1;
749 /* Do not care if it fits or not right here. */
750 val = TREE_INT_CST_LOW (cst);
752 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
753 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
755 hostval = val;
756 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
757 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
759 if (val != hostval)
760 return 1;
762 *p = hostval;
763 return 0;
766 /* Similar to save_expr, but assumes that arbitrary code is not executed
767 in between the multiple evaluations. In particular, we assume that a
768 non-addressable local variable will not be modified. */
770 static tree
771 builtin_save_expr (tree exp)
773 if (TREE_CODE (exp) == SSA_NAME
774 || (TREE_ADDRESSABLE (exp) == 0
775 && (TREE_CODE (exp) == PARM_DECL
776 || (VAR_P (exp) && !TREE_STATIC (exp)))))
777 return exp;
779 return save_expr (exp);
782 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
783 times to get the address of either a higher stack frame, or a return
784 address located within it (depending on FNDECL_CODE). */
786 static rtx
787 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
789 int i;
790 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
791 if (tem == NULL_RTX)
793 /* For a zero count with __builtin_return_address, we don't care what
794 frame address we return, because target-specific definitions will
795 override us. Therefore frame pointer elimination is OK, and using
796 the soft frame pointer is OK.
798 For a nonzero count, or a zero count with __builtin_frame_address,
799 we require a stable offset from the current frame pointer to the
800 previous one, so we must use the hard frame pointer, and
801 we must disable frame pointer elimination. */
802 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
803 tem = frame_pointer_rtx;
804 else
806 tem = hard_frame_pointer_rtx;
808 /* Tell reload not to eliminate the frame pointer. */
809 crtl->accesses_prior_frames = 1;
813 if (count > 0)
814 SETUP_FRAME_ADDRESSES ();
816 /* On the SPARC, the return address is not in the frame, it is in a
817 register. There is no way to access it off of the current frame
818 pointer, but it can be accessed off the previous frame pointer by
819 reading the value from the register window save area. */
820 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
821 count--;
823 /* Scan back COUNT frames to the specified frame. */
824 for (i = 0; i < count; i++)
826 /* Assume the dynamic chain pointer is in the word that the
827 frame address points to, unless otherwise specified. */
828 tem = DYNAMIC_CHAIN_ADDRESS (tem);
829 tem = memory_address (Pmode, tem);
830 tem = gen_frame_mem (Pmode, tem);
831 tem = copy_to_reg (tem);
834 /* For __builtin_frame_address, return what we've got. But, on
835 the SPARC for example, we may have to add a bias. */
836 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
837 return FRAME_ADDR_RTX (tem);
839 /* For __builtin_return_address, get the return address from that frame. */
840 #ifdef RETURN_ADDR_RTX
841 tem = RETURN_ADDR_RTX (count, tem);
842 #else
843 tem = memory_address (Pmode,
844 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
845 tem = gen_frame_mem (Pmode, tem);
846 #endif
847 return tem;
850 /* Alias set used for setjmp buffer. */
851 static alias_set_type setjmp_alias_set = -1;
853 /* Construct the leading half of a __builtin_setjmp call. Control will
854 return to RECEIVER_LABEL. This is also called directly by the SJLJ
855 exception handling code. */
857 void
858 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
860 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
861 rtx stack_save;
862 rtx mem;
864 if (setjmp_alias_set == -1)
865 setjmp_alias_set = new_alias_set ();
867 buf_addr = convert_memory_address (Pmode, buf_addr);
869 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
871 /* We store the frame pointer and the address of receiver_label in
872 the buffer and use the rest of it for the stack save area, which
873 is machine-dependent. */
875 mem = gen_rtx_MEM (Pmode, buf_addr);
876 set_mem_alias_set (mem, setjmp_alias_set);
877 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
879 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
880 GET_MODE_SIZE (Pmode))),
881 set_mem_alias_set (mem, setjmp_alias_set);
883 emit_move_insn (validize_mem (mem),
884 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
886 stack_save = gen_rtx_MEM (sa_mode,
887 plus_constant (Pmode, buf_addr,
888 2 * GET_MODE_SIZE (Pmode)));
889 set_mem_alias_set (stack_save, setjmp_alias_set);
890 emit_stack_save (SAVE_NONLOCAL, &stack_save);
892 /* If there is further processing to do, do it. */
893 if (targetm.have_builtin_setjmp_setup ())
894 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
896 /* We have a nonlocal label. */
897 cfun->has_nonlocal_label = 1;
900 /* Construct the trailing part of a __builtin_setjmp call. This is
901 also called directly by the SJLJ exception handling code.
902 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
904 void
905 expand_builtin_setjmp_receiver (rtx receiver_label)
907 rtx chain;
909 /* Mark the FP as used when we get here, so we have to make sure it's
910 marked as used by this function. */
911 emit_use (hard_frame_pointer_rtx);
913 /* Mark the static chain as clobbered here so life information
914 doesn't get messed up for it. */
915 chain = rtx_for_static_chain (current_function_decl, true);
916 if (chain && REG_P (chain))
917 emit_clobber (chain);
919 /* Now put in the code to restore the frame pointer, and argument
920 pointer, if needed. */
921 if (! targetm.have_nonlocal_goto ())
923 /* First adjust our frame pointer to its actual value. It was
924 previously set to the start of the virtual area corresponding to
925 the stacked variables when we branched here and now needs to be
926 adjusted to the actual hardware fp value.
928 Assignments to virtual registers are converted by
929 instantiate_virtual_regs into the corresponding assignment
930 to the underlying register (fp in this case) that makes
931 the original assignment true.
932 So the following insn will actually be decrementing fp by
933 TARGET_STARTING_FRAME_OFFSET. */
934 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
936 /* Restoring the frame pointer also modifies the hard frame pointer.
937 Mark it used (so that the previous assignment remains live once
938 the frame pointer is eliminated) and clobbered (to represent the
939 implicit update from the assignment). */
940 emit_use (hard_frame_pointer_rtx);
941 emit_clobber (hard_frame_pointer_rtx);
944 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
946 /* If the argument pointer can be eliminated in favor of the
947 frame pointer, we don't need to restore it. We assume here
948 that if such an elimination is present, it can always be used.
949 This is the case on all known machines; if we don't make this
950 assumption, we do unnecessary saving on many machines. */
951 size_t i;
952 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
954 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
955 if (elim_regs[i].from == ARG_POINTER_REGNUM
956 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
957 break;
959 if (i == ARRAY_SIZE (elim_regs))
961 /* Now restore our arg pointer from the address at which it
962 was saved in our stack frame. */
963 emit_move_insn (crtl->args.internal_arg_pointer,
964 copy_to_reg (get_arg_pointer_save_area ()));
968 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
969 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
970 else if (targetm.have_nonlocal_goto_receiver ())
971 emit_insn (targetm.gen_nonlocal_goto_receiver ());
972 else
973 { /* Nothing */ }
975 /* We must not allow the code we just generated to be reordered by
976 scheduling. Specifically, the update of the frame pointer must
977 happen immediately, not later. */
978 emit_insn (gen_blockage ());
981 /* __builtin_longjmp is passed a pointer to an array of five words (not
982 all will be used on all machines). It operates similarly to the C
983 library function of the same name, but is more efficient. Much of
984 the code below is copied from the handling of non-local gotos. */
986 static void
987 expand_builtin_longjmp (rtx buf_addr, rtx value)
989 rtx fp, lab, stack;
990 rtx_insn *insn, *last;
991 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
993 /* DRAP is needed for stack realign if longjmp is expanded to current
994 function */
995 if (SUPPORTS_STACK_ALIGNMENT)
996 crtl->need_drap = true;
998 if (setjmp_alias_set == -1)
999 setjmp_alias_set = new_alias_set ();
1001 buf_addr = convert_memory_address (Pmode, buf_addr);
1003 buf_addr = force_reg (Pmode, buf_addr);
1005 /* We require that the user must pass a second argument of 1, because
1006 that is what builtin_setjmp will return. */
1007 gcc_assert (value == const1_rtx);
1009 last = get_last_insn ();
1010 if (targetm.have_builtin_longjmp ())
1011 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1012 else
1014 fp = gen_rtx_MEM (Pmode, buf_addr);
1015 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1016 GET_MODE_SIZE (Pmode)));
1018 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1019 2 * GET_MODE_SIZE (Pmode)));
1020 set_mem_alias_set (fp, setjmp_alias_set);
1021 set_mem_alias_set (lab, setjmp_alias_set);
1022 set_mem_alias_set (stack, setjmp_alias_set);
1024 /* Pick up FP, label, and SP from the block and jump. This code is
1025 from expand_goto in stmt.c; see there for detailed comments. */
1026 if (targetm.have_nonlocal_goto ())
1027 /* We have to pass a value to the nonlocal_goto pattern that will
1028 get copied into the static_chain pointer, but it does not matter
1029 what that value is, because builtin_setjmp does not use it. */
1030 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1031 else
1033 lab = copy_to_reg (lab);
1035 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1036 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1038 emit_move_insn (hard_frame_pointer_rtx, fp);
1039 emit_stack_restore (SAVE_NONLOCAL, stack);
1041 emit_use (hard_frame_pointer_rtx);
1042 emit_use (stack_pointer_rtx);
1043 emit_indirect_jump (lab);
1047 /* Search backwards and mark the jump insn as a non-local goto.
1048 Note that this precludes the use of __builtin_longjmp to a
1049 __builtin_setjmp target in the same function. However, we've
1050 already cautioned the user that these functions are for
1051 internal exception handling use only. */
1052 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1054 gcc_assert (insn != last);
1056 if (JUMP_P (insn))
1058 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1059 break;
1061 else if (CALL_P (insn))
1062 break;
1066 static inline bool
1067 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1069 return (iter->i < iter->n);
1072 /* This function validates the types of a function call argument list
1073 against a specified list of tree_codes. If the last specifier is a 0,
1074 that represents an ellipsis, otherwise the last specifier must be a
1075 VOID_TYPE. */
1077 static bool
1078 validate_arglist (const_tree callexpr, ...)
1080 enum tree_code code;
1081 bool res = 0;
1082 va_list ap;
1083 const_call_expr_arg_iterator iter;
1084 const_tree arg;
1086 va_start (ap, callexpr);
1087 init_const_call_expr_arg_iterator (callexpr, &iter);
1089 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1090 tree fn = CALL_EXPR_FN (callexpr);
1091 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1093 for (unsigned argno = 1; ; ++argno)
1095 code = (enum tree_code) va_arg (ap, int);
1097 switch (code)
1099 case 0:
1100 /* This signifies an ellipses, any further arguments are all ok. */
1101 res = true;
1102 goto end;
1103 case VOID_TYPE:
1104 /* This signifies an endlink, if no arguments remain, return
1105 true, otherwise return false. */
1106 res = !more_const_call_expr_args_p (&iter);
1107 goto end;
1108 case POINTER_TYPE:
1109 /* The actual argument must be nonnull when either the whole
1110 called function has been declared nonnull, or when the formal
1111 argument corresponding to the actual argument has been. */
1112 if (argmap
1113 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1115 arg = next_const_call_expr_arg (&iter);
1116 if (!validate_arg (arg, code) || integer_zerop (arg))
1117 goto end;
1118 break;
1120 /* FALLTHRU */
1121 default:
1122 /* If no parameters remain or the parameter's code does not
1123 match the specified code, return false. Otherwise continue
1124 checking any remaining arguments. */
1125 arg = next_const_call_expr_arg (&iter);
1126 if (!validate_arg (arg, code))
1127 goto end;
1128 break;
1132 /* We need gotos here since we can only have one VA_CLOSE in a
1133 function. */
1134 end: ;
1135 va_end (ap);
1137 BITMAP_FREE (argmap);
1139 return res;
1142 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1143 and the address of the save area. */
1145 static rtx
1146 expand_builtin_nonlocal_goto (tree exp)
1148 tree t_label, t_save_area;
1149 rtx r_label, r_save_area, r_fp, r_sp;
1150 rtx_insn *insn;
1152 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1153 return NULL_RTX;
1155 t_label = CALL_EXPR_ARG (exp, 0);
1156 t_save_area = CALL_EXPR_ARG (exp, 1);
1158 r_label = expand_normal (t_label);
1159 r_label = convert_memory_address (Pmode, r_label);
1160 r_save_area = expand_normal (t_save_area);
1161 r_save_area = convert_memory_address (Pmode, r_save_area);
1162 /* Copy the address of the save location to a register just in case it was
1163 based on the frame pointer. */
1164 r_save_area = copy_to_reg (r_save_area);
1165 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1166 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1167 plus_constant (Pmode, r_save_area,
1168 GET_MODE_SIZE (Pmode)));
1170 crtl->has_nonlocal_goto = 1;
1172 /* ??? We no longer need to pass the static chain value, afaik. */
1173 if (targetm.have_nonlocal_goto ())
1174 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1175 else
1177 r_label = copy_to_reg (r_label);
1179 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1180 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1182 /* Restore frame pointer for containing function. */
1183 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1184 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1186 /* USE of hard_frame_pointer_rtx added for consistency;
1187 not clear if really needed. */
1188 emit_use (hard_frame_pointer_rtx);
1189 emit_use (stack_pointer_rtx);
1191 /* If the architecture is using a GP register, we must
1192 conservatively assume that the target function makes use of it.
1193 The prologue of functions with nonlocal gotos must therefore
1194 initialize the GP register to the appropriate value, and we
1195 must then make sure that this value is live at the point
1196 of the jump. (Note that this doesn't necessarily apply
1197 to targets with a nonlocal_goto pattern; they are free
1198 to implement it in their own way. Note also that this is
1199 a no-op if the GP register is a global invariant.) */
1200 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1201 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1202 emit_use (pic_offset_table_rtx);
1204 emit_indirect_jump (r_label);
1207 /* Search backwards to the jump insn and mark it as a
1208 non-local goto. */
1209 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1211 if (JUMP_P (insn))
1213 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1214 break;
1216 else if (CALL_P (insn))
1217 break;
1220 return const0_rtx;
1223 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1224 (not all will be used on all machines) that was passed to __builtin_setjmp.
1225 It updates the stack pointer in that block to the current value. This is
1226 also called directly by the SJLJ exception handling code. */
1228 void
1229 expand_builtin_update_setjmp_buf (rtx buf_addr)
1231 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1232 buf_addr = convert_memory_address (Pmode, buf_addr);
1233 rtx stack_save
1234 = gen_rtx_MEM (sa_mode,
1235 memory_address
1236 (sa_mode,
1237 plus_constant (Pmode, buf_addr,
1238 2 * GET_MODE_SIZE (Pmode))));
1240 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1243 /* Expand a call to __builtin_prefetch. For a target that does not support
1244 data prefetch, evaluate the memory address argument in case it has side
1245 effects. */
1247 static void
1248 expand_builtin_prefetch (tree exp)
1250 tree arg0, arg1, arg2;
1251 int nargs;
1252 rtx op0, op1, op2;
1254 if (!validate_arglist (exp, POINTER_TYPE, 0))
1255 return;
1257 arg0 = CALL_EXPR_ARG (exp, 0);
1259 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1260 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1261 locality). */
1262 nargs = call_expr_nargs (exp);
1263 if (nargs > 1)
1264 arg1 = CALL_EXPR_ARG (exp, 1);
1265 else
1266 arg1 = integer_zero_node;
1267 if (nargs > 2)
1268 arg2 = CALL_EXPR_ARG (exp, 2);
1269 else
1270 arg2 = integer_three_node;
1272 /* Argument 0 is an address. */
1273 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1275 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1276 if (TREE_CODE (arg1) != INTEGER_CST)
1278 error ("second argument to %<__builtin_prefetch%> must be a constant");
1279 arg1 = integer_zero_node;
1281 op1 = expand_normal (arg1);
1282 /* Argument 1 must be either zero or one. */
1283 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1285 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1286 " using zero");
1287 op1 = const0_rtx;
1290 /* Argument 2 (locality) must be a compile-time constant int. */
1291 if (TREE_CODE (arg2) != INTEGER_CST)
1293 error ("third argument to %<__builtin_prefetch%> must be a constant");
1294 arg2 = integer_zero_node;
1296 op2 = expand_normal (arg2);
1297 /* Argument 2 must be 0, 1, 2, or 3. */
1298 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1300 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1301 op2 = const0_rtx;
1304 if (targetm.have_prefetch ())
1306 struct expand_operand ops[3];
1308 create_address_operand (&ops[0], op0);
1309 create_integer_operand (&ops[1], INTVAL (op1));
1310 create_integer_operand (&ops[2], INTVAL (op2));
1311 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1312 return;
1315 /* Don't do anything with direct references to volatile memory, but
1316 generate code to handle other side effects. */
1317 if (!MEM_P (op0) && side_effects_p (op0))
1318 emit_insn (op0);
1321 /* Get a MEM rtx for expression EXP which is the address of an operand
1322 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1323 the maximum length of the block of memory that might be accessed or
1324 NULL if unknown. */
1326 static rtx
1327 get_memory_rtx (tree exp, tree len)
1329 tree orig_exp = exp;
1330 rtx addr, mem;
1332 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1333 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1334 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1335 exp = TREE_OPERAND (exp, 0);
1337 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1338 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1340 /* Get an expression we can use to find the attributes to assign to MEM.
1341 First remove any nops. */
1342 while (CONVERT_EXPR_P (exp)
1343 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1344 exp = TREE_OPERAND (exp, 0);
1346 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1347 (as builtin stringops may alias with anything). */
1348 exp = fold_build2 (MEM_REF,
1349 build_array_type (char_type_node,
1350 build_range_type (sizetype,
1351 size_one_node, len)),
1352 exp, build_int_cst (ptr_type_node, 0));
1354 /* If the MEM_REF has no acceptable address, try to get the base object
1355 from the original address we got, and build an all-aliasing
1356 unknown-sized access to that one. */
1357 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1358 set_mem_attributes (mem, exp, 0);
1359 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1360 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1361 0))))
1363 exp = build_fold_addr_expr (exp);
1364 exp = fold_build2 (MEM_REF,
1365 build_array_type (char_type_node,
1366 build_range_type (sizetype,
1367 size_zero_node,
1368 NULL)),
1369 exp, build_int_cst (ptr_type_node, 0));
1370 set_mem_attributes (mem, exp, 0);
1372 set_mem_alias_set (mem, 0);
1373 return mem;
1376 /* Built-in functions to perform an untyped call and return. */
1378 #define apply_args_mode \
1379 (this_target_builtins->x_apply_args_mode)
1380 #define apply_result_mode \
1381 (this_target_builtins->x_apply_result_mode)
1383 /* Return the size required for the block returned by __builtin_apply_args,
1384 and initialize apply_args_mode. */
1386 static int
1387 apply_args_size (void)
1389 static int size = -1;
1390 int align;
1391 unsigned int regno;
1393 /* The values computed by this function never change. */
1394 if (size < 0)
1396 /* The first value is the incoming arg-pointer. */
1397 size = GET_MODE_SIZE (Pmode);
1399 /* The second value is the structure value address unless this is
1400 passed as an "invisible" first argument. */
1401 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1402 size += GET_MODE_SIZE (Pmode);
1404 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1405 if (FUNCTION_ARG_REGNO_P (regno))
1407 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1409 gcc_assert (mode != VOIDmode);
1411 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1412 if (size % align != 0)
1413 size = CEIL (size, align) * align;
1414 size += GET_MODE_SIZE (mode);
1415 apply_args_mode[regno] = mode;
1417 else
1419 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1422 return size;
1425 /* Return the size required for the block returned by __builtin_apply,
1426 and initialize apply_result_mode. */
1428 static int
1429 apply_result_size (void)
1431 static int size = -1;
1432 int align, regno;
1434 /* The values computed by this function never change. */
1435 if (size < 0)
1437 size = 0;
1439 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1440 if (targetm.calls.function_value_regno_p (regno))
1442 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1444 gcc_assert (mode != VOIDmode);
1446 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1447 if (size % align != 0)
1448 size = CEIL (size, align) * align;
1449 size += GET_MODE_SIZE (mode);
1450 apply_result_mode[regno] = mode;
1452 else
1453 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1455 /* Allow targets that use untyped_call and untyped_return to override
1456 the size so that machine-specific information can be stored here. */
1457 #ifdef APPLY_RESULT_SIZE
1458 size = APPLY_RESULT_SIZE;
1459 #endif
1461 return size;
1464 /* Create a vector describing the result block RESULT. If SAVEP is true,
1465 the result block is used to save the values; otherwise it is used to
1466 restore the values. */
1468 static rtx
1469 result_vector (int savep, rtx result)
1471 int regno, size, align, nelts;
1472 fixed_size_mode mode;
1473 rtx reg, mem;
1474 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1476 size = nelts = 0;
1477 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1478 if ((mode = apply_result_mode[regno]) != VOIDmode)
1480 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1481 if (size % align != 0)
1482 size = CEIL (size, align) * align;
1483 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1484 mem = adjust_address (result, mode, size);
1485 savevec[nelts++] = (savep
1486 ? gen_rtx_SET (mem, reg)
1487 : gen_rtx_SET (reg, mem));
1488 size += GET_MODE_SIZE (mode);
1490 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1493 /* Save the state required to perform an untyped call with the same
1494 arguments as were passed to the current function. */
1496 static rtx
1497 expand_builtin_apply_args_1 (void)
1499 rtx registers, tem;
1500 int size, align, regno;
1501 fixed_size_mode mode;
1502 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1504 /* Create a block where the arg-pointer, structure value address,
1505 and argument registers can be saved. */
1506 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1508 /* Walk past the arg-pointer and structure value address. */
1509 size = GET_MODE_SIZE (Pmode);
1510 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1511 size += GET_MODE_SIZE (Pmode);
1513 /* Save each register used in calling a function to the block. */
1514 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1515 if ((mode = apply_args_mode[regno]) != VOIDmode)
1517 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1518 if (size % align != 0)
1519 size = CEIL (size, align) * align;
1521 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1523 emit_move_insn (adjust_address (registers, mode, size), tem);
1524 size += GET_MODE_SIZE (mode);
1527 /* Save the arg pointer to the block. */
1528 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1529 /* We need the pointer as the caller actually passed them to us, not
1530 as we might have pretended they were passed. Make sure it's a valid
1531 operand, as emit_move_insn isn't expected to handle a PLUS. */
1532 if (STACK_GROWS_DOWNWARD)
1534 = force_operand (plus_constant (Pmode, tem,
1535 crtl->args.pretend_args_size),
1536 NULL_RTX);
1537 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1539 size = GET_MODE_SIZE (Pmode);
1541 /* Save the structure value address unless this is passed as an
1542 "invisible" first argument. */
1543 if (struct_incoming_value)
1545 emit_move_insn (adjust_address (registers, Pmode, size),
1546 copy_to_reg (struct_incoming_value));
1547 size += GET_MODE_SIZE (Pmode);
1550 /* Return the address of the block. */
1551 return copy_addr_to_reg (XEXP (registers, 0));
1554 /* __builtin_apply_args returns block of memory allocated on
1555 the stack into which is stored the arg pointer, structure
1556 value address, static chain, and all the registers that might
1557 possibly be used in performing a function call. The code is
1558 moved to the start of the function so the incoming values are
1559 saved. */
1561 static rtx
1562 expand_builtin_apply_args (void)
1564 /* Don't do __builtin_apply_args more than once in a function.
1565 Save the result of the first call and reuse it. */
1566 if (apply_args_value != 0)
1567 return apply_args_value;
1569 /* When this function is called, it means that registers must be
1570 saved on entry to this function. So we migrate the
1571 call to the first insn of this function. */
1572 rtx temp;
1574 start_sequence ();
1575 temp = expand_builtin_apply_args_1 ();
1576 rtx_insn *seq = get_insns ();
1577 end_sequence ();
1579 apply_args_value = temp;
1581 /* Put the insns after the NOTE that starts the function.
1582 If this is inside a start_sequence, make the outer-level insn
1583 chain current, so the code is placed at the start of the
1584 function. If internal_arg_pointer is a non-virtual pseudo,
1585 it needs to be placed after the function that initializes
1586 that pseudo. */
1587 push_topmost_sequence ();
1588 if (REG_P (crtl->args.internal_arg_pointer)
1589 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1590 emit_insn_before (seq, parm_birth_insn);
1591 else
1592 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1593 pop_topmost_sequence ();
1594 return temp;
1598 /* Perform an untyped call and save the state required to perform an
1599 untyped return of whatever value was returned by the given function. */
1601 static rtx
1602 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1604 int size, align, regno;
1605 fixed_size_mode mode;
1606 rtx incoming_args, result, reg, dest, src;
1607 rtx_call_insn *call_insn;
1608 rtx old_stack_level = 0;
1609 rtx call_fusage = 0;
1610 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1612 arguments = convert_memory_address (Pmode, arguments);
1614 /* Create a block where the return registers can be saved. */
1615 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1617 /* Fetch the arg pointer from the ARGUMENTS block. */
1618 incoming_args = gen_reg_rtx (Pmode);
1619 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1620 if (!STACK_GROWS_DOWNWARD)
1621 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1622 incoming_args, 0, OPTAB_LIB_WIDEN);
1624 /* Push a new argument block and copy the arguments. Do not allow
1625 the (potential) memcpy call below to interfere with our stack
1626 manipulations. */
1627 do_pending_stack_adjust ();
1628 NO_DEFER_POP;
1630 /* Save the stack with nonlocal if available. */
1631 if (targetm.have_save_stack_nonlocal ())
1632 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1633 else
1634 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1636 /* Allocate a block of memory onto the stack and copy the memory
1637 arguments to the outgoing arguments address. We can pass TRUE
1638 as the 4th argument because we just saved the stack pointer
1639 and will restore it right after the call. */
1640 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1642 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1643 may have already set current_function_calls_alloca to true.
1644 current_function_calls_alloca won't be set if argsize is zero,
1645 so we have to guarantee need_drap is true here. */
1646 if (SUPPORTS_STACK_ALIGNMENT)
1647 crtl->need_drap = true;
1649 dest = virtual_outgoing_args_rtx;
1650 if (!STACK_GROWS_DOWNWARD)
1652 if (CONST_INT_P (argsize))
1653 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1654 else
1655 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1657 dest = gen_rtx_MEM (BLKmode, dest);
1658 set_mem_align (dest, PARM_BOUNDARY);
1659 src = gen_rtx_MEM (BLKmode, incoming_args);
1660 set_mem_align (src, PARM_BOUNDARY);
1661 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1663 /* Refer to the argument block. */
1664 apply_args_size ();
1665 arguments = gen_rtx_MEM (BLKmode, arguments);
1666 set_mem_align (arguments, PARM_BOUNDARY);
1668 /* Walk past the arg-pointer and structure value address. */
1669 size = GET_MODE_SIZE (Pmode);
1670 if (struct_value)
1671 size += GET_MODE_SIZE (Pmode);
1673 /* Restore each of the registers previously saved. Make USE insns
1674 for each of these registers for use in making the call. */
1675 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1676 if ((mode = apply_args_mode[regno]) != VOIDmode)
1678 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1679 if (size % align != 0)
1680 size = CEIL (size, align) * align;
1681 reg = gen_rtx_REG (mode, regno);
1682 emit_move_insn (reg, adjust_address (arguments, mode, size));
1683 use_reg (&call_fusage, reg);
1684 size += GET_MODE_SIZE (mode);
1687 /* Restore the structure value address unless this is passed as an
1688 "invisible" first argument. */
1689 size = GET_MODE_SIZE (Pmode);
1690 if (struct_value)
1692 rtx value = gen_reg_rtx (Pmode);
1693 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1694 emit_move_insn (struct_value, value);
1695 if (REG_P (struct_value))
1696 use_reg (&call_fusage, struct_value);
1697 size += GET_MODE_SIZE (Pmode);
1700 /* All arguments and registers used for the call are set up by now! */
1701 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1703 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1704 and we don't want to load it into a register as an optimization,
1705 because prepare_call_address already did it if it should be done. */
1706 if (GET_CODE (function) != SYMBOL_REF)
1707 function = memory_address (FUNCTION_MODE, function);
1709 /* Generate the actual call instruction and save the return value. */
1710 if (targetm.have_untyped_call ())
1712 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1713 emit_call_insn (targetm.gen_untyped_call (mem, result,
1714 result_vector (1, result)));
1716 else if (targetm.have_call_value ())
1718 rtx valreg = 0;
1720 /* Locate the unique return register. It is not possible to
1721 express a call that sets more than one return register using
1722 call_value; use untyped_call for that. In fact, untyped_call
1723 only needs to save the return registers in the given block. */
1724 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1725 if ((mode = apply_result_mode[regno]) != VOIDmode)
1727 gcc_assert (!valreg); /* have_untyped_call required. */
1729 valreg = gen_rtx_REG (mode, regno);
1732 emit_insn (targetm.gen_call_value (valreg,
1733 gen_rtx_MEM (FUNCTION_MODE, function),
1734 const0_rtx, NULL_RTX, const0_rtx));
1736 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1738 else
1739 gcc_unreachable ();
1741 /* Find the CALL insn we just emitted, and attach the register usage
1742 information. */
1743 call_insn = last_call_insn ();
1744 add_function_usage_to (call_insn, call_fusage);
1746 /* Restore the stack. */
1747 if (targetm.have_save_stack_nonlocal ())
1748 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1749 else
1750 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1751 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1753 OK_DEFER_POP;
1755 /* Return the address of the result block. */
1756 result = copy_addr_to_reg (XEXP (result, 0));
1757 return convert_memory_address (ptr_mode, result);
1760 /* Perform an untyped return. */
1762 static void
1763 expand_builtin_return (rtx result)
1765 int size, align, regno;
1766 fixed_size_mode mode;
1767 rtx reg;
1768 rtx_insn *call_fusage = 0;
1770 result = convert_memory_address (Pmode, result);
1772 apply_result_size ();
1773 result = gen_rtx_MEM (BLKmode, result);
1775 if (targetm.have_untyped_return ())
1777 rtx vector = result_vector (0, result);
1778 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1779 emit_barrier ();
1780 return;
1783 /* Restore the return value and note that each value is used. */
1784 size = 0;
1785 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1786 if ((mode = apply_result_mode[regno]) != VOIDmode)
1788 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1789 if (size % align != 0)
1790 size = CEIL (size, align) * align;
1791 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1792 emit_move_insn (reg, adjust_address (result, mode, size));
1794 push_to_sequence (call_fusage);
1795 emit_use (reg);
1796 call_fusage = get_insns ();
1797 end_sequence ();
1798 size += GET_MODE_SIZE (mode);
1801 /* Put the USE insns before the return. */
1802 emit_insn (call_fusage);
1804 /* Return whatever values was restored by jumping directly to the end
1805 of the function. */
1806 expand_naked_return ();
1809 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1811 static enum type_class
1812 type_to_class (tree type)
1814 switch (TREE_CODE (type))
1816 case VOID_TYPE: return void_type_class;
1817 case INTEGER_TYPE: return integer_type_class;
1818 case ENUMERAL_TYPE: return enumeral_type_class;
1819 case BOOLEAN_TYPE: return boolean_type_class;
1820 case POINTER_TYPE: return pointer_type_class;
1821 case REFERENCE_TYPE: return reference_type_class;
1822 case OFFSET_TYPE: return offset_type_class;
1823 case REAL_TYPE: return real_type_class;
1824 case COMPLEX_TYPE: return complex_type_class;
1825 case FUNCTION_TYPE: return function_type_class;
1826 case METHOD_TYPE: return method_type_class;
1827 case RECORD_TYPE: return record_type_class;
1828 case UNION_TYPE:
1829 case QUAL_UNION_TYPE: return union_type_class;
1830 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1831 ? string_type_class : array_type_class);
1832 case LANG_TYPE: return lang_type_class;
1833 default: return no_type_class;
1837 /* Expand a call EXP to __builtin_classify_type. */
1839 static rtx
1840 expand_builtin_classify_type (tree exp)
1842 if (call_expr_nargs (exp))
1843 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1844 return GEN_INT (no_type_class);
1847 /* This helper macro, meant to be used in mathfn_built_in below, determines
1848 which among a set of builtin math functions is appropriate for a given type
1849 mode. The `F' (float) and `L' (long double) are automatically generated
1850 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1851 types, there are additional types that are considered with 'F32', 'F64',
1852 'F128', etc. suffixes. */
1853 #define CASE_MATHFN(MATHFN) \
1854 CASE_CFN_##MATHFN: \
1855 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1856 fcodel = BUILT_IN_##MATHFN##L ; break;
1857 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1858 types. */
1859 #define CASE_MATHFN_FLOATN(MATHFN) \
1860 CASE_CFN_##MATHFN: \
1861 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1862 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1863 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1864 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1865 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1866 break;
1867 /* Similar to above, but appends _R after any F/L suffix. */
1868 #define CASE_MATHFN_REENT(MATHFN) \
1869 case CFN_BUILT_IN_##MATHFN##_R: \
1870 case CFN_BUILT_IN_##MATHFN##F_R: \
1871 case CFN_BUILT_IN_##MATHFN##L_R: \
1872 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1873 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1875 /* Return a function equivalent to FN but operating on floating-point
1876 values of type TYPE, or END_BUILTINS if no such function exists.
1877 This is purely an operation on function codes; it does not guarantee
1878 that the target actually has an implementation of the function. */
1880 static built_in_function
1881 mathfn_built_in_2 (tree type, combined_fn fn)
1883 tree mtype;
1884 built_in_function fcode, fcodef, fcodel;
1885 built_in_function fcodef16 = END_BUILTINS;
1886 built_in_function fcodef32 = END_BUILTINS;
1887 built_in_function fcodef64 = END_BUILTINS;
1888 built_in_function fcodef128 = END_BUILTINS;
1889 built_in_function fcodef32x = END_BUILTINS;
1890 built_in_function fcodef64x = END_BUILTINS;
1891 built_in_function fcodef128x = END_BUILTINS;
1893 switch (fn)
1895 CASE_MATHFN (ACOS)
1896 CASE_MATHFN (ACOSH)
1897 CASE_MATHFN (ASIN)
1898 CASE_MATHFN (ASINH)
1899 CASE_MATHFN (ATAN)
1900 CASE_MATHFN (ATAN2)
1901 CASE_MATHFN (ATANH)
1902 CASE_MATHFN (CBRT)
1903 CASE_MATHFN_FLOATN (CEIL)
1904 CASE_MATHFN (CEXPI)
1905 CASE_MATHFN_FLOATN (COPYSIGN)
1906 CASE_MATHFN (COS)
1907 CASE_MATHFN (COSH)
1908 CASE_MATHFN (DREM)
1909 CASE_MATHFN (ERF)
1910 CASE_MATHFN (ERFC)
1911 CASE_MATHFN (EXP)
1912 CASE_MATHFN (EXP10)
1913 CASE_MATHFN (EXP2)
1914 CASE_MATHFN (EXPM1)
1915 CASE_MATHFN (FABS)
1916 CASE_MATHFN (FDIM)
1917 CASE_MATHFN_FLOATN (FLOOR)
1918 CASE_MATHFN_FLOATN (FMA)
1919 CASE_MATHFN_FLOATN (FMAX)
1920 CASE_MATHFN_FLOATN (FMIN)
1921 CASE_MATHFN (FMOD)
1922 CASE_MATHFN (FREXP)
1923 CASE_MATHFN (GAMMA)
1924 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1925 CASE_MATHFN (HUGE_VAL)
1926 CASE_MATHFN (HYPOT)
1927 CASE_MATHFN (ILOGB)
1928 CASE_MATHFN (ICEIL)
1929 CASE_MATHFN (IFLOOR)
1930 CASE_MATHFN (INF)
1931 CASE_MATHFN (IRINT)
1932 CASE_MATHFN (IROUND)
1933 CASE_MATHFN (ISINF)
1934 CASE_MATHFN (J0)
1935 CASE_MATHFN (J1)
1936 CASE_MATHFN (JN)
1937 CASE_MATHFN (LCEIL)
1938 CASE_MATHFN (LDEXP)
1939 CASE_MATHFN (LFLOOR)
1940 CASE_MATHFN (LGAMMA)
1941 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1942 CASE_MATHFN (LLCEIL)
1943 CASE_MATHFN (LLFLOOR)
1944 CASE_MATHFN (LLRINT)
1945 CASE_MATHFN (LLROUND)
1946 CASE_MATHFN (LOG)
1947 CASE_MATHFN (LOG10)
1948 CASE_MATHFN (LOG1P)
1949 CASE_MATHFN (LOG2)
1950 CASE_MATHFN (LOGB)
1951 CASE_MATHFN (LRINT)
1952 CASE_MATHFN (LROUND)
1953 CASE_MATHFN (MODF)
1954 CASE_MATHFN (NAN)
1955 CASE_MATHFN (NANS)
1956 CASE_MATHFN_FLOATN (NEARBYINT)
1957 CASE_MATHFN (NEXTAFTER)
1958 CASE_MATHFN (NEXTTOWARD)
1959 CASE_MATHFN (POW)
1960 CASE_MATHFN (POWI)
1961 CASE_MATHFN (POW10)
1962 CASE_MATHFN (REMAINDER)
1963 CASE_MATHFN (REMQUO)
1964 CASE_MATHFN_FLOATN (RINT)
1965 CASE_MATHFN_FLOATN (ROUND)
1966 CASE_MATHFN (SCALB)
1967 CASE_MATHFN (SCALBLN)
1968 CASE_MATHFN (SCALBN)
1969 CASE_MATHFN (SIGNBIT)
1970 CASE_MATHFN (SIGNIFICAND)
1971 CASE_MATHFN (SIN)
1972 CASE_MATHFN (SINCOS)
1973 CASE_MATHFN (SINH)
1974 CASE_MATHFN_FLOATN (SQRT)
1975 CASE_MATHFN (TAN)
1976 CASE_MATHFN (TANH)
1977 CASE_MATHFN (TGAMMA)
1978 CASE_MATHFN_FLOATN (TRUNC)
1979 CASE_MATHFN (Y0)
1980 CASE_MATHFN (Y1)
1981 CASE_MATHFN (YN)
1983 default:
1984 return END_BUILTINS;
1987 mtype = TYPE_MAIN_VARIANT (type);
1988 if (mtype == double_type_node)
1989 return fcode;
1990 else if (mtype == float_type_node)
1991 return fcodef;
1992 else if (mtype == long_double_type_node)
1993 return fcodel;
1994 else if (mtype == float16_type_node)
1995 return fcodef16;
1996 else if (mtype == float32_type_node)
1997 return fcodef32;
1998 else if (mtype == float64_type_node)
1999 return fcodef64;
2000 else if (mtype == float128_type_node)
2001 return fcodef128;
2002 else if (mtype == float32x_type_node)
2003 return fcodef32x;
2004 else if (mtype == float64x_type_node)
2005 return fcodef64x;
2006 else if (mtype == float128x_type_node)
2007 return fcodef128x;
2008 else
2009 return END_BUILTINS;
2012 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2013 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2014 otherwise use the explicit declaration. If we can't do the conversion,
2015 return null. */
2017 static tree
2018 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2020 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2021 if (fcode2 == END_BUILTINS)
2022 return NULL_TREE;
2024 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2025 return NULL_TREE;
2027 return builtin_decl_explicit (fcode2);
2030 /* Like mathfn_built_in_1, but always use the implicit array. */
2032 tree
2033 mathfn_built_in (tree type, combined_fn fn)
2035 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2038 /* Like mathfn_built_in_1, but take a built_in_function and
2039 always use the implicit array. */
2041 tree
2042 mathfn_built_in (tree type, enum built_in_function fn)
2044 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2047 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2048 return its code, otherwise return IFN_LAST. Note that this function
2049 only tests whether the function is defined in internals.def, not whether
2050 it is actually available on the target. */
2052 internal_fn
2053 associated_internal_fn (tree fndecl)
2055 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2056 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2057 switch (DECL_FUNCTION_CODE (fndecl))
2059 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2060 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2061 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2062 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2063 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2064 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2065 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2066 #include "internal-fn.def"
2068 CASE_FLT_FN (BUILT_IN_POW10):
2069 return IFN_EXP10;
2071 CASE_FLT_FN (BUILT_IN_DREM):
2072 return IFN_REMAINDER;
2074 CASE_FLT_FN (BUILT_IN_SCALBN):
2075 CASE_FLT_FN (BUILT_IN_SCALBLN):
2076 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2077 return IFN_LDEXP;
2078 return IFN_LAST;
2080 default:
2081 return IFN_LAST;
2085 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2086 on the current target by a call to an internal function, return the
2087 code of that internal function, otherwise return IFN_LAST. The caller
2088 is responsible for ensuring that any side-effects of the built-in
2089 call are dealt with correctly. E.g. if CALL sets errno, the caller
2090 must decide that the errno result isn't needed or make it available
2091 in some other way. */
2093 internal_fn
2094 replacement_internal_fn (gcall *call)
2096 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2098 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2099 if (ifn != IFN_LAST)
2101 tree_pair types = direct_internal_fn_types (ifn, call);
2102 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2103 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2104 return ifn;
2107 return IFN_LAST;
2110 /* Expand a call to the builtin trinary math functions (fma).
2111 Return NULL_RTX if a normal call should be emitted rather than expanding the
2112 function in-line. EXP is the expression that is a call to the builtin
2113 function; if convenient, the result should be placed in TARGET.
2114 SUBTARGET may be used as the target for computing one of EXP's
2115 operands. */
2117 static rtx
2118 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2120 optab builtin_optab;
2121 rtx op0, op1, op2, result;
2122 rtx_insn *insns;
2123 tree fndecl = get_callee_fndecl (exp);
2124 tree arg0, arg1, arg2;
2125 machine_mode mode;
2127 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2128 return NULL_RTX;
2130 arg0 = CALL_EXPR_ARG (exp, 0);
2131 arg1 = CALL_EXPR_ARG (exp, 1);
2132 arg2 = CALL_EXPR_ARG (exp, 2);
2134 switch (DECL_FUNCTION_CODE (fndecl))
2136 CASE_FLT_FN (BUILT_IN_FMA):
2137 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2138 builtin_optab = fma_optab; break;
2139 default:
2140 gcc_unreachable ();
2143 /* Make a suitable register to place result in. */
2144 mode = TYPE_MODE (TREE_TYPE (exp));
2146 /* Before working hard, check whether the instruction is available. */
2147 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2148 return NULL_RTX;
2150 result = gen_reg_rtx (mode);
2152 /* Always stabilize the argument list. */
2153 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2154 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2155 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2157 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2158 op1 = expand_normal (arg1);
2159 op2 = expand_normal (arg2);
2161 start_sequence ();
2163 /* Compute into RESULT.
2164 Set RESULT to wherever the result comes back. */
2165 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2166 result, 0);
2168 /* If we were unable to expand via the builtin, stop the sequence
2169 (without outputting the insns) and call to the library function
2170 with the stabilized argument list. */
2171 if (result == 0)
2173 end_sequence ();
2174 return expand_call (exp, target, target == const0_rtx);
2177 /* Output the entire sequence. */
2178 insns = get_insns ();
2179 end_sequence ();
2180 emit_insn (insns);
2182 return result;
2185 /* Expand a call to the builtin sin and cos math functions.
2186 Return NULL_RTX if a normal call should be emitted rather than expanding the
2187 function in-line. EXP is the expression that is a call to the builtin
2188 function; if convenient, the result should be placed in TARGET.
2189 SUBTARGET may be used as the target for computing one of EXP's
2190 operands. */
2192 static rtx
2193 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2195 optab builtin_optab;
2196 rtx op0;
2197 rtx_insn *insns;
2198 tree fndecl = get_callee_fndecl (exp);
2199 machine_mode mode;
2200 tree arg;
2202 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2203 return NULL_RTX;
2205 arg = CALL_EXPR_ARG (exp, 0);
2207 switch (DECL_FUNCTION_CODE (fndecl))
2209 CASE_FLT_FN (BUILT_IN_SIN):
2210 CASE_FLT_FN (BUILT_IN_COS):
2211 builtin_optab = sincos_optab; break;
2212 default:
2213 gcc_unreachable ();
2216 /* Make a suitable register to place result in. */
2217 mode = TYPE_MODE (TREE_TYPE (exp));
2219 /* Check if sincos insn is available, otherwise fallback
2220 to sin or cos insn. */
2221 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2222 switch (DECL_FUNCTION_CODE (fndecl))
2224 CASE_FLT_FN (BUILT_IN_SIN):
2225 builtin_optab = sin_optab; break;
2226 CASE_FLT_FN (BUILT_IN_COS):
2227 builtin_optab = cos_optab; break;
2228 default:
2229 gcc_unreachable ();
2232 /* Before working hard, check whether the instruction is available. */
2233 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2235 rtx result = gen_reg_rtx (mode);
2237 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2238 need to expand the argument again. This way, we will not perform
2239 side-effects more the once. */
2240 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2242 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2244 start_sequence ();
2246 /* Compute into RESULT.
2247 Set RESULT to wherever the result comes back. */
2248 if (builtin_optab == sincos_optab)
2250 int ok;
2252 switch (DECL_FUNCTION_CODE (fndecl))
2254 CASE_FLT_FN (BUILT_IN_SIN):
2255 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2256 break;
2257 CASE_FLT_FN (BUILT_IN_COS):
2258 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2259 break;
2260 default:
2261 gcc_unreachable ();
2263 gcc_assert (ok);
2265 else
2266 result = expand_unop (mode, builtin_optab, op0, result, 0);
2268 if (result != 0)
2270 /* Output the entire sequence. */
2271 insns = get_insns ();
2272 end_sequence ();
2273 emit_insn (insns);
2274 return result;
2277 /* If we were unable to expand via the builtin, stop the sequence
2278 (without outputting the insns) and call to the library function
2279 with the stabilized argument list. */
2280 end_sequence ();
2283 return expand_call (exp, target, target == const0_rtx);
2286 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2287 return an RTL instruction code that implements the functionality.
2288 If that isn't possible or available return CODE_FOR_nothing. */
2290 static enum insn_code
2291 interclass_mathfn_icode (tree arg, tree fndecl)
2293 bool errno_set = false;
2294 optab builtin_optab = unknown_optab;
2295 machine_mode mode;
2297 switch (DECL_FUNCTION_CODE (fndecl))
2299 CASE_FLT_FN (BUILT_IN_ILOGB):
2300 errno_set = true; builtin_optab = ilogb_optab; break;
2301 CASE_FLT_FN (BUILT_IN_ISINF):
2302 builtin_optab = isinf_optab; break;
2303 case BUILT_IN_ISNORMAL:
2304 case BUILT_IN_ISFINITE:
2305 CASE_FLT_FN (BUILT_IN_FINITE):
2306 case BUILT_IN_FINITED32:
2307 case BUILT_IN_FINITED64:
2308 case BUILT_IN_FINITED128:
2309 case BUILT_IN_ISINFD32:
2310 case BUILT_IN_ISINFD64:
2311 case BUILT_IN_ISINFD128:
2312 /* These builtins have no optabs (yet). */
2313 break;
2314 default:
2315 gcc_unreachable ();
2318 /* There's no easy way to detect the case we need to set EDOM. */
2319 if (flag_errno_math && errno_set)
2320 return CODE_FOR_nothing;
2322 /* Optab mode depends on the mode of the input argument. */
2323 mode = TYPE_MODE (TREE_TYPE (arg));
2325 if (builtin_optab)
2326 return optab_handler (builtin_optab, mode);
2327 return CODE_FOR_nothing;
2330 /* Expand a call to one of the builtin math functions that operate on
2331 floating point argument and output an integer result (ilogb, isinf,
2332 isnan, etc).
2333 Return 0 if a normal call should be emitted rather than expanding the
2334 function in-line. EXP is the expression that is a call to the builtin
2335 function; if convenient, the result should be placed in TARGET. */
2337 static rtx
2338 expand_builtin_interclass_mathfn (tree exp, rtx target)
2340 enum insn_code icode = CODE_FOR_nothing;
2341 rtx op0;
2342 tree fndecl = get_callee_fndecl (exp);
2343 machine_mode mode;
2344 tree arg;
2346 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2347 return NULL_RTX;
2349 arg = CALL_EXPR_ARG (exp, 0);
2350 icode = interclass_mathfn_icode (arg, fndecl);
2351 mode = TYPE_MODE (TREE_TYPE (arg));
2353 if (icode != CODE_FOR_nothing)
2355 struct expand_operand ops[1];
2356 rtx_insn *last = get_last_insn ();
2357 tree orig_arg = arg;
2359 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2360 need to expand the argument again. This way, we will not perform
2361 side-effects more the once. */
2362 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2364 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2366 if (mode != GET_MODE (op0))
2367 op0 = convert_to_mode (mode, op0, 0);
2369 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2370 if (maybe_legitimize_operands (icode, 0, 1, ops)
2371 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2372 return ops[0].value;
2374 delete_insns_since (last);
2375 CALL_EXPR_ARG (exp, 0) = orig_arg;
2378 return NULL_RTX;
2381 /* Expand a call to the builtin sincos math function.
2382 Return NULL_RTX if a normal call should be emitted rather than expanding the
2383 function in-line. EXP is the expression that is a call to the builtin
2384 function. */
2386 static rtx
2387 expand_builtin_sincos (tree exp)
2389 rtx op0, op1, op2, target1, target2;
2390 machine_mode mode;
2391 tree arg, sinp, cosp;
2392 int result;
2393 location_t loc = EXPR_LOCATION (exp);
2394 tree alias_type, alias_off;
2396 if (!validate_arglist (exp, REAL_TYPE,
2397 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2398 return NULL_RTX;
2400 arg = CALL_EXPR_ARG (exp, 0);
2401 sinp = CALL_EXPR_ARG (exp, 1);
2402 cosp = CALL_EXPR_ARG (exp, 2);
2404 /* Make a suitable register to place result in. */
2405 mode = TYPE_MODE (TREE_TYPE (arg));
2407 /* Check if sincos insn is available, otherwise emit the call. */
2408 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2409 return NULL_RTX;
2411 target1 = gen_reg_rtx (mode);
2412 target2 = gen_reg_rtx (mode);
2414 op0 = expand_normal (arg);
2415 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2416 alias_off = build_int_cst (alias_type, 0);
2417 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2418 sinp, alias_off));
2419 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2420 cosp, alias_off));
2422 /* Compute into target1 and target2.
2423 Set TARGET to wherever the result comes back. */
2424 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2425 gcc_assert (result);
2427 /* Move target1 and target2 to the memory locations indicated
2428 by op1 and op2. */
2429 emit_move_insn (op1, target1);
2430 emit_move_insn (op2, target2);
2432 return const0_rtx;
2435 /* Expand a call to the internal cexpi builtin to the sincos math function.
2436 EXP is the expression that is a call to the builtin function; if convenient,
2437 the result should be placed in TARGET. */
2439 static rtx
2440 expand_builtin_cexpi (tree exp, rtx target)
2442 tree fndecl = get_callee_fndecl (exp);
2443 tree arg, type;
2444 machine_mode mode;
2445 rtx op0, op1, op2;
2446 location_t loc = EXPR_LOCATION (exp);
2448 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2449 return NULL_RTX;
2451 arg = CALL_EXPR_ARG (exp, 0);
2452 type = TREE_TYPE (arg);
2453 mode = TYPE_MODE (TREE_TYPE (arg));
2455 /* Try expanding via a sincos optab, fall back to emitting a libcall
2456 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2457 is only generated from sincos, cexp or if we have either of them. */
2458 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2460 op1 = gen_reg_rtx (mode);
2461 op2 = gen_reg_rtx (mode);
2463 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2465 /* Compute into op1 and op2. */
2466 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2468 else if (targetm.libc_has_function (function_sincos))
2470 tree call, fn = NULL_TREE;
2471 tree top1, top2;
2472 rtx op1a, op2a;
2474 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2475 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2476 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2477 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2478 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2479 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2480 else
2481 gcc_unreachable ();
2483 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2484 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2485 op1a = copy_addr_to_reg (XEXP (op1, 0));
2486 op2a = copy_addr_to_reg (XEXP (op2, 0));
2487 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2488 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2490 /* Make sure not to fold the sincos call again. */
2491 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2492 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2493 call, 3, arg, top1, top2));
2495 else
2497 tree call, fn = NULL_TREE, narg;
2498 tree ctype = build_complex_type (type);
2500 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2501 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2502 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2503 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2504 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2505 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2506 else
2507 gcc_unreachable ();
2509 /* If we don't have a decl for cexp create one. This is the
2510 friendliest fallback if the user calls __builtin_cexpi
2511 without full target C99 function support. */
2512 if (fn == NULL_TREE)
2514 tree fntype;
2515 const char *name = NULL;
2517 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2518 name = "cexpf";
2519 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2520 name = "cexp";
2521 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2522 name = "cexpl";
2524 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2525 fn = build_fn_decl (name, fntype);
2528 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2529 build_real (type, dconst0), arg);
2531 /* Make sure not to fold the cexp call again. */
2532 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2533 return expand_expr (build_call_nary (ctype, call, 1, narg),
2534 target, VOIDmode, EXPAND_NORMAL);
2537 /* Now build the proper return type. */
2538 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2539 make_tree (TREE_TYPE (arg), op2),
2540 make_tree (TREE_TYPE (arg), op1)),
2541 target, VOIDmode, EXPAND_NORMAL);
2544 /* Conveniently construct a function call expression. FNDECL names the
2545 function to be called, N is the number of arguments, and the "..."
2546 parameters are the argument expressions. Unlike build_call_exr
2547 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2549 static tree
2550 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2552 va_list ap;
2553 tree fntype = TREE_TYPE (fndecl);
2554 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2556 va_start (ap, n);
2557 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2558 va_end (ap);
2559 SET_EXPR_LOCATION (fn, loc);
2560 return fn;
2563 /* Expand a call to one of the builtin rounding functions gcc defines
2564 as an extension (lfloor and lceil). As these are gcc extensions we
2565 do not need to worry about setting errno to EDOM.
2566 If expanding via optab fails, lower expression to (int)(floor(x)).
2567 EXP is the expression that is a call to the builtin function;
2568 if convenient, the result should be placed in TARGET. */
2570 static rtx
2571 expand_builtin_int_roundingfn (tree exp, rtx target)
2573 convert_optab builtin_optab;
2574 rtx op0, tmp;
2575 rtx_insn *insns;
2576 tree fndecl = get_callee_fndecl (exp);
2577 enum built_in_function fallback_fn;
2578 tree fallback_fndecl;
2579 machine_mode mode;
2580 tree arg;
2582 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2583 gcc_unreachable ();
2585 arg = CALL_EXPR_ARG (exp, 0);
2587 switch (DECL_FUNCTION_CODE (fndecl))
2589 CASE_FLT_FN (BUILT_IN_ICEIL):
2590 CASE_FLT_FN (BUILT_IN_LCEIL):
2591 CASE_FLT_FN (BUILT_IN_LLCEIL):
2592 builtin_optab = lceil_optab;
2593 fallback_fn = BUILT_IN_CEIL;
2594 break;
2596 CASE_FLT_FN (BUILT_IN_IFLOOR):
2597 CASE_FLT_FN (BUILT_IN_LFLOOR):
2598 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2599 builtin_optab = lfloor_optab;
2600 fallback_fn = BUILT_IN_FLOOR;
2601 break;
2603 default:
2604 gcc_unreachable ();
2607 /* Make a suitable register to place result in. */
2608 mode = TYPE_MODE (TREE_TYPE (exp));
2610 target = gen_reg_rtx (mode);
2612 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2613 need to expand the argument again. This way, we will not perform
2614 side-effects more the once. */
2615 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2617 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2619 start_sequence ();
2621 /* Compute into TARGET. */
2622 if (expand_sfix_optab (target, op0, builtin_optab))
2624 /* Output the entire sequence. */
2625 insns = get_insns ();
2626 end_sequence ();
2627 emit_insn (insns);
2628 return target;
2631 /* If we were unable to expand via the builtin, stop the sequence
2632 (without outputting the insns). */
2633 end_sequence ();
2635 /* Fall back to floating point rounding optab. */
2636 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2638 /* For non-C99 targets we may end up without a fallback fndecl here
2639 if the user called __builtin_lfloor directly. In this case emit
2640 a call to the floor/ceil variants nevertheless. This should result
2641 in the best user experience for not full C99 targets. */
2642 if (fallback_fndecl == NULL_TREE)
2644 tree fntype;
2645 const char *name = NULL;
2647 switch (DECL_FUNCTION_CODE (fndecl))
2649 case BUILT_IN_ICEIL:
2650 case BUILT_IN_LCEIL:
2651 case BUILT_IN_LLCEIL:
2652 name = "ceil";
2653 break;
2654 case BUILT_IN_ICEILF:
2655 case BUILT_IN_LCEILF:
2656 case BUILT_IN_LLCEILF:
2657 name = "ceilf";
2658 break;
2659 case BUILT_IN_ICEILL:
2660 case BUILT_IN_LCEILL:
2661 case BUILT_IN_LLCEILL:
2662 name = "ceill";
2663 break;
2664 case BUILT_IN_IFLOOR:
2665 case BUILT_IN_LFLOOR:
2666 case BUILT_IN_LLFLOOR:
2667 name = "floor";
2668 break;
2669 case BUILT_IN_IFLOORF:
2670 case BUILT_IN_LFLOORF:
2671 case BUILT_IN_LLFLOORF:
2672 name = "floorf";
2673 break;
2674 case BUILT_IN_IFLOORL:
2675 case BUILT_IN_LFLOORL:
2676 case BUILT_IN_LLFLOORL:
2677 name = "floorl";
2678 break;
2679 default:
2680 gcc_unreachable ();
2683 fntype = build_function_type_list (TREE_TYPE (arg),
2684 TREE_TYPE (arg), NULL_TREE);
2685 fallback_fndecl = build_fn_decl (name, fntype);
2688 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2690 tmp = expand_normal (exp);
2691 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2693 /* Truncate the result of floating point optab to integer
2694 via expand_fix (). */
2695 target = gen_reg_rtx (mode);
2696 expand_fix (target, tmp, 0);
2698 return target;
2701 /* Expand a call to one of the builtin math functions doing integer
2702 conversion (lrint).
2703 Return 0 if a normal call should be emitted rather than expanding the
2704 function in-line. EXP is the expression that is a call to the builtin
2705 function; if convenient, the result should be placed in TARGET. */
2707 static rtx
2708 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2710 convert_optab builtin_optab;
2711 rtx op0;
2712 rtx_insn *insns;
2713 tree fndecl = get_callee_fndecl (exp);
2714 tree arg;
2715 machine_mode mode;
2716 enum built_in_function fallback_fn = BUILT_IN_NONE;
2718 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2719 gcc_unreachable ();
2721 arg = CALL_EXPR_ARG (exp, 0);
2723 switch (DECL_FUNCTION_CODE (fndecl))
2725 CASE_FLT_FN (BUILT_IN_IRINT):
2726 fallback_fn = BUILT_IN_LRINT;
2727 gcc_fallthrough ();
2728 CASE_FLT_FN (BUILT_IN_LRINT):
2729 CASE_FLT_FN (BUILT_IN_LLRINT):
2730 builtin_optab = lrint_optab;
2731 break;
2733 CASE_FLT_FN (BUILT_IN_IROUND):
2734 fallback_fn = BUILT_IN_LROUND;
2735 gcc_fallthrough ();
2736 CASE_FLT_FN (BUILT_IN_LROUND):
2737 CASE_FLT_FN (BUILT_IN_LLROUND):
2738 builtin_optab = lround_optab;
2739 break;
2741 default:
2742 gcc_unreachable ();
2745 /* There's no easy way to detect the case we need to set EDOM. */
2746 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2747 return NULL_RTX;
2749 /* Make a suitable register to place result in. */
2750 mode = TYPE_MODE (TREE_TYPE (exp));
2752 /* There's no easy way to detect the case we need to set EDOM. */
2753 if (!flag_errno_math)
2755 rtx result = gen_reg_rtx (mode);
2757 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2758 need to expand the argument again. This way, we will not perform
2759 side-effects more the once. */
2760 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2762 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2764 start_sequence ();
2766 if (expand_sfix_optab (result, op0, builtin_optab))
2768 /* Output the entire sequence. */
2769 insns = get_insns ();
2770 end_sequence ();
2771 emit_insn (insns);
2772 return result;
2775 /* If we were unable to expand via the builtin, stop the sequence
2776 (without outputting the insns) and call to the library function
2777 with the stabilized argument list. */
2778 end_sequence ();
2781 if (fallback_fn != BUILT_IN_NONE)
2783 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2784 targets, (int) round (x) should never be transformed into
2785 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2786 a call to lround in the hope that the target provides at least some
2787 C99 functions. This should result in the best user experience for
2788 not full C99 targets. */
2789 tree fallback_fndecl = mathfn_built_in_1
2790 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2792 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2793 fallback_fndecl, 1, arg);
2795 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2796 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2797 return convert_to_mode (mode, target, 0);
2800 return expand_call (exp, target, target == const0_rtx);
2803 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2804 a normal call should be emitted rather than expanding the function
2805 in-line. EXP is the expression that is a call to the builtin
2806 function; if convenient, the result should be placed in TARGET. */
2808 static rtx
2809 expand_builtin_powi (tree exp, rtx target)
2811 tree arg0, arg1;
2812 rtx op0, op1;
2813 machine_mode mode;
2814 machine_mode mode2;
2816 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2817 return NULL_RTX;
2819 arg0 = CALL_EXPR_ARG (exp, 0);
2820 arg1 = CALL_EXPR_ARG (exp, 1);
2821 mode = TYPE_MODE (TREE_TYPE (exp));
2823 /* Emit a libcall to libgcc. */
2825 /* Mode of the 2nd argument must match that of an int. */
2826 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2828 if (target == NULL_RTX)
2829 target = gen_reg_rtx (mode);
2831 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2832 if (GET_MODE (op0) != mode)
2833 op0 = convert_to_mode (mode, op0, 0);
2834 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2835 if (GET_MODE (op1) != mode2)
2836 op1 = convert_to_mode (mode2, op1, 0);
2838 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2839 target, LCT_CONST, mode,
2840 op0, mode, op1, mode2);
2842 return target;
2845 /* Expand expression EXP which is a call to the strlen builtin. Return
2846 NULL_RTX if we failed and the caller should emit a normal call, otherwise
2847 try to get the result in TARGET, if convenient. */
2849 static rtx
2850 expand_builtin_strlen (tree exp, rtx target,
2851 machine_mode target_mode)
2853 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2854 return NULL_RTX;
2856 struct expand_operand ops[4];
2857 rtx pat;
2858 tree len;
2859 tree src = CALL_EXPR_ARG (exp, 0);
2860 rtx src_reg;
2861 rtx_insn *before_strlen;
2862 machine_mode insn_mode;
2863 enum insn_code icode = CODE_FOR_nothing;
2864 unsigned int align;
2866 /* If the length can be computed at compile-time, return it. */
2867 len = c_strlen (src, 0);
2868 if (len)
2869 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2871 /* If the length can be computed at compile-time and is constant
2872 integer, but there are side-effects in src, evaluate
2873 src for side-effects, then return len.
2874 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2875 can be optimized into: i++; x = 3; */
2876 len = c_strlen (src, 1);
2877 if (len && TREE_CODE (len) == INTEGER_CST)
2879 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2880 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2883 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2885 /* If SRC is not a pointer type, don't do this operation inline. */
2886 if (align == 0)
2887 return NULL_RTX;
2889 /* Bail out if we can't compute strlen in the right mode. */
2890 FOR_EACH_MODE_FROM (insn_mode, target_mode)
2892 icode = optab_handler (strlen_optab, insn_mode);
2893 if (icode != CODE_FOR_nothing)
2894 break;
2896 if (insn_mode == VOIDmode)
2897 return NULL_RTX;
2899 /* Make a place to hold the source address. We will not expand
2900 the actual source until we are sure that the expansion will
2901 not fail -- there are trees that cannot be expanded twice. */
2902 src_reg = gen_reg_rtx (Pmode);
2904 /* Mark the beginning of the strlen sequence so we can emit the
2905 source operand later. */
2906 before_strlen = get_last_insn ();
2908 create_output_operand (&ops[0], target, insn_mode);
2909 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2910 create_integer_operand (&ops[2], 0);
2911 create_integer_operand (&ops[3], align);
2912 if (!maybe_expand_insn (icode, 4, ops))
2913 return NULL_RTX;
2915 /* Check to see if the argument was declared attribute nonstring
2916 and if so, issue a warning since at this point it's not known
2917 to be nul-terminated. */
2918 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
2920 /* Now that we are assured of success, expand the source. */
2921 start_sequence ();
2922 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2923 if (pat != src_reg)
2925 #ifdef POINTERS_EXTEND_UNSIGNED
2926 if (GET_MODE (pat) != Pmode)
2927 pat = convert_to_mode (Pmode, pat,
2928 POINTERS_EXTEND_UNSIGNED);
2929 #endif
2930 emit_move_insn (src_reg, pat);
2932 pat = get_insns ();
2933 end_sequence ();
2935 if (before_strlen)
2936 emit_insn_after (pat, before_strlen);
2937 else
2938 emit_insn_before (pat, get_insns ());
2940 /* Return the value in the proper mode for this function. */
2941 if (GET_MODE (ops[0].value) == target_mode)
2942 target = ops[0].value;
2943 else if (target != 0)
2944 convert_move (target, ops[0].value, 0);
2945 else
2946 target = convert_to_mode (target_mode, ops[0].value, 0);
2948 return target;
2951 /* Expand call EXP to the strnlen built-in, returning the result
2952 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
2954 static rtx
2955 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
2957 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2958 return NULL_RTX;
2960 tree src = CALL_EXPR_ARG (exp, 0);
2961 tree bound = CALL_EXPR_ARG (exp, 1);
2963 if (!bound)
2964 return NULL_RTX;
2966 location_t loc = UNKNOWN_LOCATION;
2967 if (EXPR_HAS_LOCATION (exp))
2968 loc = EXPR_LOCATION (exp);
2970 tree maxobjsize = max_object_size ();
2971 tree func = get_callee_fndecl (exp);
2973 tree len = c_strlen (src, 0);
2975 if (TREE_CODE (bound) == INTEGER_CST)
2977 if (!TREE_NO_WARNING (exp)
2978 && tree_int_cst_lt (maxobjsize, bound)
2979 && warning_at (loc, OPT_Wstringop_overflow_,
2980 "%K%qD specified bound %E "
2981 "exceeds maximum object size %E",
2982 exp, func, bound, maxobjsize))
2983 TREE_NO_WARNING (exp) = true;
2985 if (!len || TREE_CODE (len) != INTEGER_CST)
2986 return NULL_RTX;
2988 len = fold_convert_loc (loc, size_type_node, len);
2989 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
2990 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2993 if (TREE_CODE (bound) != SSA_NAME)
2994 return NULL_RTX;
2996 wide_int min, max;
2997 enum value_range_type rng = get_range_info (bound, &min, &max);
2998 if (rng != VR_RANGE)
2999 return NULL_RTX;
3001 if (!TREE_NO_WARNING (exp)
3002 && wi::ltu_p (wi::to_wide (maxobjsize), min)
3003 && warning_at (loc, OPT_Wstringop_overflow_,
3004 "%K%qD specified bound [%wu, %wu] "
3005 "exceeds maximum object size %E",
3006 exp, func, min.to_uhwi (), max.to_uhwi (), maxobjsize))
3007 TREE_NO_WARNING (exp) = true;
3009 if (!len || TREE_CODE (len) != INTEGER_CST)
3010 return NULL_RTX;
3012 if (wi::gtu_p (min, wi::to_wide (len)))
3013 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3015 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3016 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3019 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3020 bytes from constant string DATA + OFFSET and return it as target
3021 constant. */
3023 static rtx
3024 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3025 scalar_int_mode mode)
3027 const char *str = (const char *) data;
3029 gcc_assert (offset >= 0
3030 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3031 <= strlen (str) + 1));
3033 return c_readstr (str + offset, mode);
3036 /* LEN specify length of the block of memcpy/memset operation.
3037 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3038 In some cases we can make very likely guess on max size, then we
3039 set it into PROBABLE_MAX_SIZE. */
3041 static void
3042 determine_block_size (tree len, rtx len_rtx,
3043 unsigned HOST_WIDE_INT *min_size,
3044 unsigned HOST_WIDE_INT *max_size,
3045 unsigned HOST_WIDE_INT *probable_max_size)
3047 if (CONST_INT_P (len_rtx))
3049 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3050 return;
3052 else
3054 wide_int min, max;
3055 enum value_range_type range_type = VR_UNDEFINED;
3057 /* Determine bounds from the type. */
3058 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3059 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3060 else
3061 *min_size = 0;
3062 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3063 *probable_max_size = *max_size
3064 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3065 else
3066 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3068 if (TREE_CODE (len) == SSA_NAME)
3069 range_type = get_range_info (len, &min, &max);
3070 if (range_type == VR_RANGE)
3072 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3073 *min_size = min.to_uhwi ();
3074 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3075 *probable_max_size = *max_size = max.to_uhwi ();
3077 else if (range_type == VR_ANTI_RANGE)
3079 /* Anti range 0...N lets us to determine minimal size to N+1. */
3080 if (min == 0)
3082 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3083 *min_size = max.to_uhwi () + 1;
3085 /* Code like
3087 int n;
3088 if (n < 100)
3089 memcpy (a, b, n)
3091 Produce anti range allowing negative values of N. We still
3092 can use the information and make a guess that N is not negative.
3094 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3095 *probable_max_size = min.to_uhwi () - 1;
3098 gcc_checking_assert (*max_size <=
3099 (unsigned HOST_WIDE_INT)
3100 GET_MODE_MASK (GET_MODE (len_rtx)));
3103 /* Try to verify that the sizes and lengths of the arguments to a string
3104 manipulation function given by EXP are within valid bounds and that
3105 the operation does not lead to buffer overflow or read past the end.
3106 Arguments other than EXP may be null. When non-null, the arguments
3107 have the following meaning:
3108 DST is the destination of a copy call or NULL otherwise.
3109 SRC is the source of a copy call or NULL otherwise.
3110 DSTWRITE is the number of bytes written into the destination obtained
3111 from the user-supplied size argument to the function (such as in
3112 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3113 MAXREAD is the user-supplied bound on the length of the source sequence
3114 (such as in strncat(d, s, N). It specifies the upper limit on the number
3115 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3116 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3117 expression EXP is a string function call (as opposed to a memory call
3118 like memcpy). As an exception, SRCSTR can also be an integer denoting
3119 the precomputed size of the source string or object (for functions like
3120 memcpy).
3121 DSTSIZE is the size of the destination object specified by the last
3122 argument to the _chk builtins, typically resulting from the expansion
3123 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3124 DSTSIZE).
3126 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3127 SIZE_MAX.
3129 If the call is successfully verified as safe return true, otherwise
3130 return false. */
3132 static bool
3133 check_access (tree exp, tree, tree, tree dstwrite,
3134 tree maxread, tree srcstr, tree dstsize)
3136 int opt = OPT_Wstringop_overflow_;
3138 /* The size of the largest object is half the address space, or
3139 PTRDIFF_MAX. (This is way too permissive.) */
3140 tree maxobjsize = max_object_size ();
3142 /* Either the length of the source string for string functions or
3143 the size of the source object for raw memory functions. */
3144 tree slen = NULL_TREE;
3146 tree range[2] = { NULL_TREE, NULL_TREE };
3148 /* Set to true when the exact number of bytes written by a string
3149 function like strcpy is not known and the only thing that is
3150 known is that it must be at least one (for the terminating nul). */
3151 bool at_least_one = false;
3152 if (srcstr)
3154 /* SRCSTR is normally a pointer to string but as a special case
3155 it can be an integer denoting the length of a string. */
3156 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
3158 /* Try to determine the range of lengths the source string
3159 refers to. If it can be determined and is less than
3160 the upper bound given by MAXREAD add one to it for
3161 the terminating nul. Otherwise, set it to one for
3162 the same reason, or to MAXREAD as appropriate. */
3163 get_range_strlen (srcstr, range);
3164 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
3166 if (maxread && tree_int_cst_le (maxread, range[0]))
3167 range[0] = range[1] = maxread;
3168 else
3169 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3170 range[0], size_one_node);
3172 if (maxread && tree_int_cst_le (maxread, range[1]))
3173 range[1] = maxread;
3174 else if (!integer_all_onesp (range[1]))
3175 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3176 range[1], size_one_node);
3178 slen = range[0];
3180 else
3182 at_least_one = true;
3183 slen = size_one_node;
3186 else
3187 slen = srcstr;
3190 if (!dstwrite && !maxread)
3192 /* When the only available piece of data is the object size
3193 there is nothing to do. */
3194 if (!slen)
3195 return true;
3197 /* Otherwise, when the length of the source sequence is known
3198 (as with strlen), set DSTWRITE to it. */
3199 if (!range[0])
3200 dstwrite = slen;
3203 if (!dstsize)
3204 dstsize = maxobjsize;
3206 if (dstwrite)
3207 get_size_range (dstwrite, range);
3209 tree func = get_callee_fndecl (exp);
3211 /* First check the number of bytes to be written against the maximum
3212 object size. */
3213 if (range[0]
3214 && TREE_CODE (range[0]) == INTEGER_CST
3215 && tree_int_cst_lt (maxobjsize, range[0]))
3217 if (TREE_NO_WARNING (exp))
3218 return false;
3220 location_t loc = tree_nonartificial_location (exp);
3221 loc = expansion_point_location_if_in_system_header (loc);
3223 bool warned;
3224 if (range[0] == range[1])
3225 warned = warning_at (loc, opt,
3226 "%K%qD specified size %E "
3227 "exceeds maximum object size %E",
3228 exp, func, range[0], maxobjsize);
3229 else
3230 warned = warning_at (loc, opt,
3231 "%K%qD specified size between %E and %E "
3232 "exceeds maximum object size %E",
3233 exp, func,
3234 range[0], range[1], maxobjsize);
3235 if (warned)
3236 TREE_NO_WARNING (exp) = true;
3238 return false;
3241 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3242 constant, and in range of unsigned HOST_WIDE_INT. */
3243 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3245 /* Next check the number of bytes to be written against the destination
3246 object size. */
3247 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
3249 if (range[0]
3250 && TREE_CODE (range[0]) == INTEGER_CST
3251 && ((tree_fits_uhwi_p (dstsize)
3252 && tree_int_cst_lt (dstsize, range[0]))
3253 || (dstwrite
3254 && tree_fits_uhwi_p (dstwrite)
3255 && tree_int_cst_lt (dstwrite, range[0]))))
3257 if (TREE_NO_WARNING (exp))
3258 return false;
3260 location_t loc = tree_nonartificial_location (exp);
3261 loc = expansion_point_location_if_in_system_header (loc);
3263 if (dstwrite == slen && at_least_one)
3265 /* This is a call to strcpy with a destination of 0 size
3266 and a source of unknown length. The call will write
3267 at least one byte past the end of the destination. */
3268 warning_at (loc, opt,
3269 "%K%qD writing %E or more bytes into a region "
3270 "of size %E overflows the destination",
3271 exp, func, range[0], dstsize);
3273 else if (tree_int_cst_equal (range[0], range[1]))
3274 warning_n (loc, opt, tree_to_uhwi (range[0]),
3275 "%K%qD writing %E byte into a region "
3276 "of size %E overflows the destination",
3277 "%K%qD writing %E bytes into a region "
3278 "of size %E overflows the destination",
3279 exp, func, range[0], dstsize);
3280 else if (tree_int_cst_sign_bit (range[1]))
3282 /* Avoid printing the upper bound if it's invalid. */
3283 warning_at (loc, opt,
3284 "%K%qD writing %E or more bytes into a region "
3285 "of size %E overflows the destination",
3286 exp, func, range[0], dstsize);
3288 else
3289 warning_at (loc, opt,
3290 "%K%qD writing between %E and %E bytes into "
3291 "a region of size %E overflows the destination",
3292 exp, func, range[0], range[1],
3293 dstsize);
3295 /* Return error when an overflow has been detected. */
3296 return false;
3300 /* Check the maximum length of the source sequence against the size
3301 of the destination object if known, or against the maximum size
3302 of an object. */
3303 if (maxread)
3305 get_size_range (maxread, range);
3307 /* Use the lower end for MAXREAD from now on. */
3308 if (range[0])
3309 maxread = range[0];
3311 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
3313 location_t loc = tree_nonartificial_location (exp);
3314 loc = expansion_point_location_if_in_system_header (loc);
3316 if (tree_int_cst_lt (maxobjsize, range[0]))
3318 if (TREE_NO_WARNING (exp))
3319 return false;
3321 /* Warn about crazy big sizes first since that's more
3322 likely to be meaningful than saying that the bound
3323 is greater than the object size if both are big. */
3324 if (range[0] == range[1])
3325 warning_at (loc, opt,
3326 "%K%qD specified bound %E "
3327 "exceeds maximum object size %E",
3328 exp, func,
3329 range[0], maxobjsize);
3330 else
3331 warning_at (loc, opt,
3332 "%K%qD specified bound between %E and %E "
3333 "exceeds maximum object size %E",
3334 exp, func,
3335 range[0], range[1], maxobjsize);
3337 return false;
3340 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
3342 if (TREE_NO_WARNING (exp))
3343 return false;
3345 if (tree_int_cst_equal (range[0], range[1]))
3346 warning_at (loc, opt,
3347 "%K%qD specified bound %E "
3348 "exceeds destination size %E",
3349 exp, func,
3350 range[0], dstsize);
3351 else
3352 warning_at (loc, opt,
3353 "%K%qD specified bound between %E and %E "
3354 "exceeds destination size %E",
3355 exp, func,
3356 range[0], range[1], dstsize);
3357 return false;
3362 /* Check for reading past the end of SRC. */
3363 if (slen
3364 && slen == srcstr
3365 && dstwrite && range[0]
3366 && tree_int_cst_lt (slen, range[0]))
3368 if (TREE_NO_WARNING (exp))
3369 return false;
3371 location_t loc = tree_nonartificial_location (exp);
3373 if (tree_int_cst_equal (range[0], range[1]))
3374 warning_n (loc, opt, tree_to_uhwi (range[0]),
3375 "%K%qD reading %E byte from a region of size %E",
3376 "%K%qD reading %E bytes from a region of size %E",
3377 exp, func, range[0], slen);
3378 else if (tree_int_cst_sign_bit (range[1]))
3380 /* Avoid printing the upper bound if it's invalid. */
3381 warning_at (loc, opt,
3382 "%K%qD reading %E or more bytes from a region "
3383 "of size %E",
3384 exp, func, range[0], slen);
3386 else
3387 warning_at (loc, opt,
3388 "%K%qD reading between %E and %E bytes from a region "
3389 "of size %E",
3390 exp, func, range[0], range[1], slen);
3391 return false;
3394 return true;
3397 /* Helper to compute the size of the object referenced by the DEST
3398 expression which must have pointer type, using Object Size type
3399 OSTYPE (only the least significant 2 bits are used). Return
3400 an estimate of the size of the object if successful or NULL when
3401 the size cannot be determined. When the referenced object involves
3402 a non-constant offset in some range the returned value represents
3403 the largest size given the smallest non-negative offset in the
3404 range. The function is intended for diagnostics and should not
3405 be used to influence code generation or optimization. */
3407 tree
3408 compute_objsize (tree dest, int ostype)
3410 unsigned HOST_WIDE_INT size;
3412 /* Only the two least significant bits are meaningful. */
3413 ostype &= 3;
3415 if (compute_builtin_object_size (dest, ostype, &size))
3416 return build_int_cst (sizetype, size);
3418 if (TREE_CODE (dest) == SSA_NAME)
3420 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3421 if (!is_gimple_assign (stmt))
3422 return NULL_TREE;
3424 dest = gimple_assign_rhs1 (stmt);
3426 tree_code code = gimple_assign_rhs_code (stmt);
3427 if (code == POINTER_PLUS_EXPR)
3429 /* compute_builtin_object_size fails for addresses with
3430 non-constant offsets. Try to determine the range of
3431 such an offset here and use it to adjust the constant
3432 size. */
3433 tree off = gimple_assign_rhs2 (stmt);
3434 if (TREE_CODE (off) == INTEGER_CST)
3436 if (tree size = compute_objsize (dest, ostype))
3438 wide_int wioff = wi::to_wide (off);
3439 wide_int wisiz = wi::to_wide (size);
3441 /* Ignore negative offsets for now. For others,
3442 use the lower bound as the most optimistic
3443 estimate of the (remaining) size. */
3444 if (wi::sign_mask (wioff))
3446 else if (wi::ltu_p (wioff, wisiz))
3447 return wide_int_to_tree (TREE_TYPE (size),
3448 wi::sub (wisiz, wioff));
3449 else
3450 return size_zero_node;
3453 else if (TREE_CODE (off) == SSA_NAME
3454 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3456 wide_int min, max;
3457 enum value_range_type rng = get_range_info (off, &min, &max);
3459 if (rng == VR_RANGE)
3461 if (tree size = compute_objsize (dest, ostype))
3463 wide_int wisiz = wi::to_wide (size);
3465 /* Ignore negative offsets for now. For others,
3466 use the lower bound as the most optimistic
3467 estimate of the (remaining)size. */
3468 if (wi::sign_mask (min))
3470 else if (wi::ltu_p (min, wisiz))
3471 return wide_int_to_tree (TREE_TYPE (size),
3472 wi::sub (wisiz, min));
3473 else
3474 return size_zero_node;
3479 else if (code != ADDR_EXPR)
3480 return NULL_TREE;
3483 /* Unless computing the largest size (for memcpy and other raw memory
3484 functions), try to determine the size of the object from its type. */
3485 if (!ostype)
3486 return NULL_TREE;
3488 if (TREE_CODE (dest) != ADDR_EXPR)
3489 return NULL_TREE;
3491 tree type = TREE_TYPE (dest);
3492 if (TREE_CODE (type) == POINTER_TYPE)
3493 type = TREE_TYPE (type);
3495 type = TYPE_MAIN_VARIANT (type);
3497 if (TREE_CODE (type) == ARRAY_TYPE
3498 && !array_at_struct_end_p (TREE_OPERAND (dest, 0)))
3500 /* Return the constant size unless it's zero (that's a zero-length
3501 array likely at the end of a struct). */
3502 tree size = TYPE_SIZE_UNIT (type);
3503 if (size && TREE_CODE (size) == INTEGER_CST
3504 && !integer_zerop (size))
3505 return size;
3508 return NULL_TREE;
3511 /* Helper to determine and check the sizes of the source and the destination
3512 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3513 call expression, DEST is the destination argument, SRC is the source
3514 argument or null, and LEN is the number of bytes. Use Object Size type-0
3515 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3516 (no overflow or invalid sizes), false otherwise. */
3518 static bool
3519 check_memop_access (tree exp, tree dest, tree src, tree size)
3521 /* For functions like memset and memcpy that operate on raw memory
3522 try to determine the size of the largest source and destination
3523 object using type-0 Object Size regardless of the object size
3524 type specified by the option. */
3525 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3526 tree dstsize = compute_objsize (dest, 0);
3528 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
3529 srcsize, dstsize);
3532 /* Validate memchr arguments without performing any expansion.
3533 Return NULL_RTX. */
3535 static rtx
3536 expand_builtin_memchr (tree exp, rtx)
3538 if (!validate_arglist (exp,
3539 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3540 return NULL_RTX;
3542 tree arg1 = CALL_EXPR_ARG (exp, 0);
3543 tree len = CALL_EXPR_ARG (exp, 2);
3545 /* Diagnose calls where the specified length exceeds the size
3546 of the object. */
3547 if (warn_stringop_overflow)
3549 tree size = compute_objsize (arg1, 0);
3550 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
3551 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
3554 return NULL_RTX;
3557 /* Expand a call EXP to the memcpy builtin.
3558 Return NULL_RTX if we failed, the caller should emit a normal call,
3559 otherwise try to get the result in TARGET, if convenient (and in
3560 mode MODE if that's convenient). */
3562 static rtx
3563 expand_builtin_memcpy (tree exp, rtx target)
3565 if (!validate_arglist (exp,
3566 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3567 return NULL_RTX;
3569 tree dest = CALL_EXPR_ARG (exp, 0);
3570 tree src = CALL_EXPR_ARG (exp, 1);
3571 tree len = CALL_EXPR_ARG (exp, 2);
3573 check_memop_access (exp, dest, src, len);
3575 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3576 /*endp=*/ 0);
3579 /* Check a call EXP to the memmove built-in for validity.
3580 Return NULL_RTX on both success and failure. */
3582 static rtx
3583 expand_builtin_memmove (tree exp, rtx)
3585 if (!validate_arglist (exp,
3586 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3587 return NULL_RTX;
3589 tree dest = CALL_EXPR_ARG (exp, 0);
3590 tree src = CALL_EXPR_ARG (exp, 1);
3591 tree len = CALL_EXPR_ARG (exp, 2);
3593 check_memop_access (exp, dest, src, len);
3595 return NULL_RTX;
3598 /* Expand a call EXP to the mempcpy builtin.
3599 Return NULL_RTX if we failed; the caller should emit a normal call,
3600 otherwise try to get the result in TARGET, if convenient (and in
3601 mode MODE if that's convenient). If ENDP is 0 return the
3602 destination pointer, if ENDP is 1 return the end pointer ala
3603 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3604 stpcpy. */
3606 static rtx
3607 expand_builtin_mempcpy (tree exp, rtx target)
3609 if (!validate_arglist (exp,
3610 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3611 return NULL_RTX;
3613 tree dest = CALL_EXPR_ARG (exp, 0);
3614 tree src = CALL_EXPR_ARG (exp, 1);
3615 tree len = CALL_EXPR_ARG (exp, 2);
3617 /* Policy does not generally allow using compute_objsize (which
3618 is used internally by check_memop_size) to change code generation
3619 or drive optimization decisions.
3621 In this instance it is safe because the code we generate has
3622 the same semantics regardless of the return value of
3623 check_memop_sizes. Exactly the same amount of data is copied
3624 and the return value is exactly the same in both cases.
3626 Furthermore, check_memop_size always uses mode 0 for the call to
3627 compute_objsize, so the imprecise nature of compute_objsize is
3628 avoided. */
3630 /* Avoid expanding mempcpy into memcpy when the call is determined
3631 to overflow the buffer. This also prevents the same overflow
3632 from being diagnosed again when expanding memcpy. */
3633 if (!check_memop_access (exp, dest, src, len))
3634 return NULL_RTX;
3636 return expand_builtin_mempcpy_args (dest, src, len,
3637 target, exp, /*endp=*/ 1);
3640 /* Helper function to do the actual work for expand of memory copy family
3641 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3642 of memory from SRC to DEST and assign to TARGET if convenient.
3643 If ENDP is 0 return the
3644 destination pointer, if ENDP is 1 return the end pointer ala
3645 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3646 stpcpy. */
3648 static rtx
3649 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3650 rtx target, tree exp, int endp)
3652 const char *src_str;
3653 unsigned int src_align = get_pointer_alignment (src);
3654 unsigned int dest_align = get_pointer_alignment (dest);
3655 rtx dest_mem, src_mem, dest_addr, len_rtx;
3656 HOST_WIDE_INT expected_size = -1;
3657 unsigned int expected_align = 0;
3658 unsigned HOST_WIDE_INT min_size;
3659 unsigned HOST_WIDE_INT max_size;
3660 unsigned HOST_WIDE_INT probable_max_size;
3662 /* If DEST is not a pointer type, call the normal function. */
3663 if (dest_align == 0)
3664 return NULL_RTX;
3666 /* If either SRC is not a pointer type, don't do this
3667 operation in-line. */
3668 if (src_align == 0)
3669 return NULL_RTX;
3671 if (currently_expanding_gimple_stmt)
3672 stringop_block_profile (currently_expanding_gimple_stmt,
3673 &expected_align, &expected_size);
3675 if (expected_align < dest_align)
3676 expected_align = dest_align;
3677 dest_mem = get_memory_rtx (dest, len);
3678 set_mem_align (dest_mem, dest_align);
3679 len_rtx = expand_normal (len);
3680 determine_block_size (len, len_rtx, &min_size, &max_size,
3681 &probable_max_size);
3682 src_str = c_getstr (src);
3684 /* If SRC is a string constant and block move would be done
3685 by pieces, we can avoid loading the string from memory
3686 and only stored the computed constants. */
3687 if (src_str
3688 && CONST_INT_P (len_rtx)
3689 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3690 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3691 CONST_CAST (char *, src_str),
3692 dest_align, false))
3694 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3695 builtin_memcpy_read_str,
3696 CONST_CAST (char *, src_str),
3697 dest_align, false, endp);
3698 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3699 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3700 return dest_mem;
3703 src_mem = get_memory_rtx (src, len);
3704 set_mem_align (src_mem, src_align);
3706 /* Copy word part most expediently. */
3707 enum block_op_methods method = BLOCK_OP_NORMAL;
3708 if (CALL_EXPR_TAILCALL (exp) && (endp == 0 || target == const0_rtx))
3709 method = BLOCK_OP_TAILCALL;
3710 if (endp == 1 && target != const0_rtx)
3711 method = BLOCK_OP_NO_LIBCALL_RET;
3712 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3713 expected_align, expected_size,
3714 min_size, max_size, probable_max_size);
3715 if (dest_addr == pc_rtx)
3716 return NULL_RTX;
3718 if (dest_addr == 0)
3720 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3721 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3724 if (endp && target != const0_rtx)
3726 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3727 /* stpcpy pointer to last byte. */
3728 if (endp == 2)
3729 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3732 return dest_addr;
3735 static rtx
3736 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3737 rtx target, tree orig_exp, int endp)
3739 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3740 endp);
3743 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3744 we failed, the caller should emit a normal call, otherwise try to
3745 get the result in TARGET, if convenient. If ENDP is 0 return the
3746 destination pointer, if ENDP is 1 return the end pointer ala
3747 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3748 stpcpy. */
3750 static rtx
3751 expand_movstr (tree dest, tree src, rtx target, int endp)
3753 struct expand_operand ops[3];
3754 rtx dest_mem;
3755 rtx src_mem;
3757 if (!targetm.have_movstr ())
3758 return NULL_RTX;
3760 dest_mem = get_memory_rtx (dest, NULL);
3761 src_mem = get_memory_rtx (src, NULL);
3762 if (!endp)
3764 target = force_reg (Pmode, XEXP (dest_mem, 0));
3765 dest_mem = replace_equiv_address (dest_mem, target);
3768 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3769 create_fixed_operand (&ops[1], dest_mem);
3770 create_fixed_operand (&ops[2], src_mem);
3771 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3772 return NULL_RTX;
3774 if (endp && target != const0_rtx)
3776 target = ops[0].value;
3777 /* movstr is supposed to set end to the address of the NUL
3778 terminator. If the caller requested a mempcpy-like return value,
3779 adjust it. */
3780 if (endp == 1)
3782 rtx tem = plus_constant (GET_MODE (target),
3783 gen_lowpart (GET_MODE (target), target), 1);
3784 emit_move_insn (target, force_operand (tem, NULL_RTX));
3787 return target;
3790 /* Do some very basic size validation of a call to the strcpy builtin
3791 given by EXP. Return NULL_RTX to have the built-in expand to a call
3792 to the library function. */
3794 static rtx
3795 expand_builtin_strcat (tree exp, rtx)
3797 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3798 || !warn_stringop_overflow)
3799 return NULL_RTX;
3801 tree dest = CALL_EXPR_ARG (exp, 0);
3802 tree src = CALL_EXPR_ARG (exp, 1);
3804 /* There is no way here to determine the length of the string in
3805 the destination to which the SRC string is being appended so
3806 just diagnose cases when the souce string is longer than
3807 the destination object. */
3809 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3811 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
3812 destsize);
3814 return NULL_RTX;
3817 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3818 NULL_RTX if we failed the caller should emit a normal call, otherwise
3819 try to get the result in TARGET, if convenient (and in mode MODE if that's
3820 convenient). */
3822 static rtx
3823 expand_builtin_strcpy (tree exp, rtx target)
3825 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3826 return NULL_RTX;
3828 tree dest = CALL_EXPR_ARG (exp, 0);
3829 tree src = CALL_EXPR_ARG (exp, 1);
3831 if (warn_stringop_overflow)
3833 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3834 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3835 src, destsize);
3838 if (rtx ret = expand_builtin_strcpy_args (dest, src, target))
3840 /* Check to see if the argument was declared attribute nonstring
3841 and if so, issue a warning since at this point it's not known
3842 to be nul-terminated. */
3843 tree fndecl = get_callee_fndecl (exp);
3844 maybe_warn_nonstring_arg (fndecl, exp);
3845 return ret;
3848 return NULL_RTX;
3851 /* Helper function to do the actual work for expand_builtin_strcpy. The
3852 arguments to the builtin_strcpy call DEST and SRC are broken out
3853 so that this can also be called without constructing an actual CALL_EXPR.
3854 The other arguments and return value are the same as for
3855 expand_builtin_strcpy. */
3857 static rtx
3858 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3860 return expand_movstr (dest, src, target, /*endp=*/0);
3863 /* Expand a call EXP to the stpcpy builtin.
3864 Return NULL_RTX if we failed the caller should emit a normal call,
3865 otherwise try to get the result in TARGET, if convenient (and in
3866 mode MODE if that's convenient). */
3868 static rtx
3869 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
3871 tree dst, src;
3872 location_t loc = EXPR_LOCATION (exp);
3874 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3875 return NULL_RTX;
3877 dst = CALL_EXPR_ARG (exp, 0);
3878 src = CALL_EXPR_ARG (exp, 1);
3880 if (warn_stringop_overflow)
3882 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
3883 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3884 src, destsize);
3887 /* If return value is ignored, transform stpcpy into strcpy. */
3888 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3890 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3891 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3892 return expand_expr (result, target, mode, EXPAND_NORMAL);
3894 else
3896 tree len, lenp1;
3897 rtx ret;
3899 /* Ensure we get an actual string whose length can be evaluated at
3900 compile-time, not an expression containing a string. This is
3901 because the latter will potentially produce pessimized code
3902 when used to produce the return value. */
3903 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3904 return expand_movstr (dst, src, target, /*endp=*/2);
3906 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3907 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3908 target, exp, /*endp=*/2);
3910 if (ret)
3911 return ret;
3913 if (TREE_CODE (len) == INTEGER_CST)
3915 rtx len_rtx = expand_normal (len);
3917 if (CONST_INT_P (len_rtx))
3919 ret = expand_builtin_strcpy_args (dst, src, target);
3921 if (ret)
3923 if (! target)
3925 if (mode != VOIDmode)
3926 target = gen_reg_rtx (mode);
3927 else
3928 target = gen_reg_rtx (GET_MODE (ret));
3930 if (GET_MODE (target) != GET_MODE (ret))
3931 ret = gen_lowpart (GET_MODE (target), ret);
3933 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3934 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3935 gcc_assert (ret);
3937 return target;
3942 return expand_movstr (dst, src, target, /*endp=*/2);
3946 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
3947 arguments while being careful to avoid duplicate warnings (which could
3948 be issued if the expander were to expand the call, resulting in it
3949 being emitted in expand_call(). */
3951 static rtx
3952 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3954 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
3956 /* The call has been successfully expanded. Check for nonstring
3957 arguments and issue warnings as appropriate. */
3958 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3959 return ret;
3962 return NULL_RTX;
3965 /* Check a call EXP to the stpncpy built-in for validity.
3966 Return NULL_RTX on both success and failure. */
3968 static rtx
3969 expand_builtin_stpncpy (tree exp, rtx)
3971 if (!validate_arglist (exp,
3972 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3973 || !warn_stringop_overflow)
3974 return NULL_RTX;
3976 /* The source and destination of the call. */
3977 tree dest = CALL_EXPR_ARG (exp, 0);
3978 tree src = CALL_EXPR_ARG (exp, 1);
3980 /* The exact number of bytes to write (not the maximum). */
3981 tree len = CALL_EXPR_ARG (exp, 2);
3983 /* The size of the destination object. */
3984 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3986 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
3988 return NULL_RTX;
3991 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3992 bytes from constant string DATA + OFFSET and return it as target
3993 constant. */
3996 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3997 scalar_int_mode mode)
3999 const char *str = (const char *) data;
4001 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
4002 return const0_rtx;
4004 return c_readstr (str + offset, mode);
4007 /* Helper to check the sizes of sequences and the destination of calls
4008 to __builtin_strncat and __builtin___strncat_chk. Returns true on
4009 success (no overflow or invalid sizes), false otherwise. */
4011 static bool
4012 check_strncat_sizes (tree exp, tree objsize)
4014 tree dest = CALL_EXPR_ARG (exp, 0);
4015 tree src = CALL_EXPR_ARG (exp, 1);
4016 tree maxread = CALL_EXPR_ARG (exp, 2);
4018 /* Try to determine the range of lengths that the source expression
4019 refers to. */
4020 tree lenrange[2];
4021 get_range_strlen (src, lenrange);
4023 /* Try to verify that the destination is big enough for the shortest
4024 string. */
4026 if (!objsize && warn_stringop_overflow)
4028 /* If it hasn't been provided by __strncat_chk, try to determine
4029 the size of the destination object into which the source is
4030 being copied. */
4031 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
4034 /* Add one for the terminating nul. */
4035 tree srclen = (lenrange[0]
4036 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4037 size_one_node)
4038 : NULL_TREE);
4040 /* The strncat function copies at most MAXREAD bytes and always appends
4041 the terminating nul so the specified upper bound should never be equal
4042 to (or greater than) the size of the destination. */
4043 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4044 && tree_int_cst_equal (objsize, maxread))
4046 location_t loc = tree_nonartificial_location (exp);
4047 loc = expansion_point_location_if_in_system_header (loc);
4049 warning_at (loc, OPT_Wstringop_overflow_,
4050 "%K%qD specified bound %E equals destination size",
4051 exp, get_callee_fndecl (exp), maxread);
4053 return false;
4056 if (!srclen
4057 || (maxread && tree_fits_uhwi_p (maxread)
4058 && tree_fits_uhwi_p (srclen)
4059 && tree_int_cst_lt (maxread, srclen)))
4060 srclen = maxread;
4062 /* The number of bytes to write is LEN but check_access will also
4063 check SRCLEN if LEN's value isn't known. */
4064 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
4065 objsize);
4068 /* Similar to expand_builtin_strcat, do some very basic size validation
4069 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
4070 the built-in expand to a call to the library function. */
4072 static rtx
4073 expand_builtin_strncat (tree exp, rtx)
4075 if (!validate_arglist (exp,
4076 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4077 || !warn_stringop_overflow)
4078 return NULL_RTX;
4080 tree dest = CALL_EXPR_ARG (exp, 0);
4081 tree src = CALL_EXPR_ARG (exp, 1);
4082 /* The upper bound on the number of bytes to write. */
4083 tree maxread = CALL_EXPR_ARG (exp, 2);
4084 /* The length of the source sequence. */
4085 tree slen = c_strlen (src, 1);
4087 /* Try to determine the range of lengths that the source expression
4088 refers to. */
4089 tree lenrange[2];
4090 if (slen)
4091 lenrange[0] = lenrange[1] = slen;
4092 else
4093 get_range_strlen (src, lenrange);
4095 /* Try to verify that the destination is big enough for the shortest
4096 string. First try to determine the size of the destination object
4097 into which the source is being copied. */
4098 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4100 /* Add one for the terminating nul. */
4101 tree srclen = (lenrange[0]
4102 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4103 size_one_node)
4104 : NULL_TREE);
4106 /* The strncat function copies at most MAXREAD bytes and always appends
4107 the terminating nul so the specified upper bound should never be equal
4108 to (or greater than) the size of the destination. */
4109 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4110 && tree_int_cst_equal (destsize, maxread))
4112 location_t loc = tree_nonartificial_location (exp);
4113 loc = expansion_point_location_if_in_system_header (loc);
4115 warning_at (loc, OPT_Wstringop_overflow_,
4116 "%K%qD specified bound %E equals destination size",
4117 exp, get_callee_fndecl (exp), maxread);
4119 return NULL_RTX;
4122 if (!srclen
4123 || (maxread && tree_fits_uhwi_p (maxread)
4124 && tree_fits_uhwi_p (srclen)
4125 && tree_int_cst_lt (maxread, srclen)))
4126 srclen = maxread;
4128 /* The number of bytes to write is SRCLEN. */
4129 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
4131 return NULL_RTX;
4134 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4135 NULL_RTX if we failed the caller should emit a normal call. */
4137 static rtx
4138 expand_builtin_strncpy (tree exp, rtx target)
4140 location_t loc = EXPR_LOCATION (exp);
4142 if (validate_arglist (exp,
4143 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4145 tree dest = CALL_EXPR_ARG (exp, 0);
4146 tree src = CALL_EXPR_ARG (exp, 1);
4147 /* The number of bytes to write (not the maximum). */
4148 tree len = CALL_EXPR_ARG (exp, 2);
4149 /* The length of the source sequence. */
4150 tree slen = c_strlen (src, 1);
4152 if (warn_stringop_overflow)
4154 tree destsize = compute_objsize (dest,
4155 warn_stringop_overflow - 1);
4157 /* The number of bytes to write is LEN but check_access will also
4158 check SLEN if LEN's value isn't known. */
4159 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4160 destsize);
4163 /* We must be passed a constant len and src parameter. */
4164 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4165 return NULL_RTX;
4167 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4169 /* We're required to pad with trailing zeros if the requested
4170 len is greater than strlen(s2)+1. In that case try to
4171 use store_by_pieces, if it fails, punt. */
4172 if (tree_int_cst_lt (slen, len))
4174 unsigned int dest_align = get_pointer_alignment (dest);
4175 const char *p = c_getstr (src);
4176 rtx dest_mem;
4178 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4179 || !can_store_by_pieces (tree_to_uhwi (len),
4180 builtin_strncpy_read_str,
4181 CONST_CAST (char *, p),
4182 dest_align, false))
4183 return NULL_RTX;
4185 dest_mem = get_memory_rtx (dest, len);
4186 store_by_pieces (dest_mem, tree_to_uhwi (len),
4187 builtin_strncpy_read_str,
4188 CONST_CAST (char *, p), dest_align, false, 0);
4189 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4190 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4191 return dest_mem;
4194 return NULL_RTX;
4197 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4198 bytes from constant string DATA + OFFSET and return it as target
4199 constant. */
4202 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4203 scalar_int_mode mode)
4205 const char *c = (const char *) data;
4206 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4208 memset (p, *c, GET_MODE_SIZE (mode));
4210 return c_readstr (p, mode);
4213 /* Callback routine for store_by_pieces. Return the RTL of a register
4214 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4215 char value given in the RTL register data. For example, if mode is
4216 4 bytes wide, return the RTL for 0x01010101*data. */
4218 static rtx
4219 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4220 scalar_int_mode mode)
4222 rtx target, coeff;
4223 size_t size;
4224 char *p;
4226 size = GET_MODE_SIZE (mode);
4227 if (size == 1)
4228 return (rtx) data;
4230 p = XALLOCAVEC (char, size);
4231 memset (p, 1, size);
4232 coeff = c_readstr (p, mode);
4234 target = convert_to_mode (mode, (rtx) data, 1);
4235 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4236 return force_reg (mode, target);
4239 /* Expand expression EXP, which is a call to the memset builtin. Return
4240 NULL_RTX if we failed the caller should emit a normal call, otherwise
4241 try to get the result in TARGET, if convenient (and in mode MODE if that's
4242 convenient). */
4244 static rtx
4245 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4247 if (!validate_arglist (exp,
4248 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4249 return NULL_RTX;
4251 tree dest = CALL_EXPR_ARG (exp, 0);
4252 tree val = CALL_EXPR_ARG (exp, 1);
4253 tree len = CALL_EXPR_ARG (exp, 2);
4255 check_memop_access (exp, dest, NULL_TREE, len);
4257 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4260 /* Helper function to do the actual work for expand_builtin_memset. The
4261 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4262 so that this can also be called without constructing an actual CALL_EXPR.
4263 The other arguments and return value are the same as for
4264 expand_builtin_memset. */
4266 static rtx
4267 expand_builtin_memset_args (tree dest, tree val, tree len,
4268 rtx target, machine_mode mode, tree orig_exp)
4270 tree fndecl, fn;
4271 enum built_in_function fcode;
4272 machine_mode val_mode;
4273 char c;
4274 unsigned int dest_align;
4275 rtx dest_mem, dest_addr, len_rtx;
4276 HOST_WIDE_INT expected_size = -1;
4277 unsigned int expected_align = 0;
4278 unsigned HOST_WIDE_INT min_size;
4279 unsigned HOST_WIDE_INT max_size;
4280 unsigned HOST_WIDE_INT probable_max_size;
4282 dest_align = get_pointer_alignment (dest);
4284 /* If DEST is not a pointer type, don't do this operation in-line. */
4285 if (dest_align == 0)
4286 return NULL_RTX;
4288 if (currently_expanding_gimple_stmt)
4289 stringop_block_profile (currently_expanding_gimple_stmt,
4290 &expected_align, &expected_size);
4292 if (expected_align < dest_align)
4293 expected_align = dest_align;
4295 /* If the LEN parameter is zero, return DEST. */
4296 if (integer_zerop (len))
4298 /* Evaluate and ignore VAL in case it has side-effects. */
4299 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4300 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4303 /* Stabilize the arguments in case we fail. */
4304 dest = builtin_save_expr (dest);
4305 val = builtin_save_expr (val);
4306 len = builtin_save_expr (len);
4308 len_rtx = expand_normal (len);
4309 determine_block_size (len, len_rtx, &min_size, &max_size,
4310 &probable_max_size);
4311 dest_mem = get_memory_rtx (dest, len);
4312 val_mode = TYPE_MODE (unsigned_char_type_node);
4314 if (TREE_CODE (val) != INTEGER_CST)
4316 rtx val_rtx;
4318 val_rtx = expand_normal (val);
4319 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4321 /* Assume that we can memset by pieces if we can store
4322 * the coefficients by pieces (in the required modes).
4323 * We can't pass builtin_memset_gen_str as that emits RTL. */
4324 c = 1;
4325 if (tree_fits_uhwi_p (len)
4326 && can_store_by_pieces (tree_to_uhwi (len),
4327 builtin_memset_read_str, &c, dest_align,
4328 true))
4330 val_rtx = force_reg (val_mode, val_rtx);
4331 store_by_pieces (dest_mem, tree_to_uhwi (len),
4332 builtin_memset_gen_str, val_rtx, dest_align,
4333 true, 0);
4335 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4336 dest_align, expected_align,
4337 expected_size, min_size, max_size,
4338 probable_max_size))
4339 goto do_libcall;
4341 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4342 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4343 return dest_mem;
4346 if (target_char_cast (val, &c))
4347 goto do_libcall;
4349 if (c)
4351 if (tree_fits_uhwi_p (len)
4352 && can_store_by_pieces (tree_to_uhwi (len),
4353 builtin_memset_read_str, &c, dest_align,
4354 true))
4355 store_by_pieces (dest_mem, tree_to_uhwi (len),
4356 builtin_memset_read_str, &c, dest_align, true, 0);
4357 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4358 gen_int_mode (c, val_mode),
4359 dest_align, expected_align,
4360 expected_size, min_size, max_size,
4361 probable_max_size))
4362 goto do_libcall;
4364 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4365 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4366 return dest_mem;
4369 set_mem_align (dest_mem, dest_align);
4370 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4371 CALL_EXPR_TAILCALL (orig_exp)
4372 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4373 expected_align, expected_size,
4374 min_size, max_size,
4375 probable_max_size);
4377 if (dest_addr == 0)
4379 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4380 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4383 return dest_addr;
4385 do_libcall:
4386 fndecl = get_callee_fndecl (orig_exp);
4387 fcode = DECL_FUNCTION_CODE (fndecl);
4388 if (fcode == BUILT_IN_MEMSET)
4389 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4390 dest, val, len);
4391 else if (fcode == BUILT_IN_BZERO)
4392 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4393 dest, len);
4394 else
4395 gcc_unreachable ();
4396 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4397 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4398 return expand_call (fn, target, target == const0_rtx);
4401 /* Expand expression EXP, which is a call to the bzero builtin. Return
4402 NULL_RTX if we failed the caller should emit a normal call. */
4404 static rtx
4405 expand_builtin_bzero (tree exp)
4407 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4408 return NULL_RTX;
4410 tree dest = CALL_EXPR_ARG (exp, 0);
4411 tree size = CALL_EXPR_ARG (exp, 1);
4413 check_memop_access (exp, dest, NULL_TREE, size);
4415 /* New argument list transforming bzero(ptr x, int y) to
4416 memset(ptr x, int 0, size_t y). This is done this way
4417 so that if it isn't expanded inline, we fallback to
4418 calling bzero instead of memset. */
4420 location_t loc = EXPR_LOCATION (exp);
4422 return expand_builtin_memset_args (dest, integer_zero_node,
4423 fold_convert_loc (loc,
4424 size_type_node, size),
4425 const0_rtx, VOIDmode, exp);
4428 /* Try to expand cmpstr operation ICODE with the given operands.
4429 Return the result rtx on success, otherwise return null. */
4431 static rtx
4432 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4433 HOST_WIDE_INT align)
4435 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4437 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4438 target = NULL_RTX;
4440 struct expand_operand ops[4];
4441 create_output_operand (&ops[0], target, insn_mode);
4442 create_fixed_operand (&ops[1], arg1_rtx);
4443 create_fixed_operand (&ops[2], arg2_rtx);
4444 create_integer_operand (&ops[3], align);
4445 if (maybe_expand_insn (icode, 4, ops))
4446 return ops[0].value;
4447 return NULL_RTX;
4450 /* Expand expression EXP, which is a call to the memcmp built-in function.
4451 Return NULL_RTX if we failed and the caller should emit a normal call,
4452 otherwise try to get the result in TARGET, if convenient.
4453 RESULT_EQ is true if we can relax the returned value to be either zero
4454 or nonzero, without caring about the sign. */
4456 static rtx
4457 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4459 if (!validate_arglist (exp,
4460 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4461 return NULL_RTX;
4463 tree arg1 = CALL_EXPR_ARG (exp, 0);
4464 tree arg2 = CALL_EXPR_ARG (exp, 1);
4465 tree len = CALL_EXPR_ARG (exp, 2);
4466 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4467 bool no_overflow = true;
4469 /* Diagnose calls where the specified length exceeds the size of either
4470 object. */
4471 tree size = compute_objsize (arg1, 0);
4472 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4473 len, /*maxread=*/NULL_TREE, size,
4474 /*objsize=*/NULL_TREE);
4475 if (no_overflow)
4477 size = compute_objsize (arg2, 0);
4478 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4479 len, /*maxread=*/NULL_TREE, size,
4480 /*objsize=*/NULL_TREE);
4483 /* Due to the performance benefit, always inline the calls first
4484 when result_eq is false. */
4485 rtx result = NULL_RTX;
4487 if (!result_eq && fcode != BUILT_IN_BCMP && no_overflow)
4489 result = inline_expand_builtin_string_cmp (exp, target);
4490 if (result)
4491 return result;
4494 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4495 location_t loc = EXPR_LOCATION (exp);
4497 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4498 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4500 /* If we don't have POINTER_TYPE, call the function. */
4501 if (arg1_align == 0 || arg2_align == 0)
4502 return NULL_RTX;
4504 rtx arg1_rtx = get_memory_rtx (arg1, len);
4505 rtx arg2_rtx = get_memory_rtx (arg2, len);
4506 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4508 /* Set MEM_SIZE as appropriate. */
4509 if (CONST_INT_P (len_rtx))
4511 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4512 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4515 by_pieces_constfn constfn = NULL;
4517 const char *src_str = c_getstr (arg2);
4518 if (result_eq && src_str == NULL)
4520 src_str = c_getstr (arg1);
4521 if (src_str != NULL)
4522 std::swap (arg1_rtx, arg2_rtx);
4525 /* If SRC is a string constant and block move would be done
4526 by pieces, we can avoid loading the string from memory
4527 and only stored the computed constants. */
4528 if (src_str
4529 && CONST_INT_P (len_rtx)
4530 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4531 constfn = builtin_memcpy_read_str;
4533 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4534 TREE_TYPE (len), target,
4535 result_eq, constfn,
4536 CONST_CAST (char *, src_str));
4538 if (result)
4540 /* Return the value in the proper mode for this function. */
4541 if (GET_MODE (result) == mode)
4542 return result;
4544 if (target != 0)
4546 convert_move (target, result, 0);
4547 return target;
4550 return convert_to_mode (mode, result, 0);
4553 return NULL_RTX;
4556 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4557 if we failed the caller should emit a normal call, otherwise try to get
4558 the result in TARGET, if convenient. */
4560 static rtx
4561 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4563 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4564 return NULL_RTX;
4566 /* Due to the performance benefit, always inline the calls first. */
4567 rtx result = NULL_RTX;
4568 result = inline_expand_builtin_string_cmp (exp, target);
4569 if (result)
4570 return result;
4572 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4573 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4574 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4575 return NULL_RTX;
4577 tree arg1 = CALL_EXPR_ARG (exp, 0);
4578 tree arg2 = CALL_EXPR_ARG (exp, 1);
4580 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4581 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4583 /* If we don't have POINTER_TYPE, call the function. */
4584 if (arg1_align == 0 || arg2_align == 0)
4585 return NULL_RTX;
4587 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4588 arg1 = builtin_save_expr (arg1);
4589 arg2 = builtin_save_expr (arg2);
4591 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4592 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4594 /* Try to call cmpstrsi. */
4595 if (cmpstr_icode != CODE_FOR_nothing)
4596 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4597 MIN (arg1_align, arg2_align));
4599 /* Try to determine at least one length and call cmpstrnsi. */
4600 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4602 tree len;
4603 rtx arg3_rtx;
4605 tree len1 = c_strlen (arg1, 1);
4606 tree len2 = c_strlen (arg2, 1);
4608 if (len1)
4609 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4610 if (len2)
4611 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4613 /* If we don't have a constant length for the first, use the length
4614 of the second, if we know it. We don't require a constant for
4615 this case; some cost analysis could be done if both are available
4616 but neither is constant. For now, assume they're equally cheap,
4617 unless one has side effects. If both strings have constant lengths,
4618 use the smaller. */
4620 if (!len1)
4621 len = len2;
4622 else if (!len2)
4623 len = len1;
4624 else if (TREE_SIDE_EFFECTS (len1))
4625 len = len2;
4626 else if (TREE_SIDE_EFFECTS (len2))
4627 len = len1;
4628 else if (TREE_CODE (len1) != INTEGER_CST)
4629 len = len2;
4630 else if (TREE_CODE (len2) != INTEGER_CST)
4631 len = len1;
4632 else if (tree_int_cst_lt (len1, len2))
4633 len = len1;
4634 else
4635 len = len2;
4637 /* If both arguments have side effects, we cannot optimize. */
4638 if (len && !TREE_SIDE_EFFECTS (len))
4640 arg3_rtx = expand_normal (len);
4641 result = expand_cmpstrn_or_cmpmem
4642 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4643 arg3_rtx, MIN (arg1_align, arg2_align));
4647 tree fndecl = get_callee_fndecl (exp);
4648 if (result)
4650 /* Check to see if the argument was declared attribute nonstring
4651 and if so, issue a warning since at this point it's not known
4652 to be nul-terminated. */
4653 maybe_warn_nonstring_arg (fndecl, exp);
4655 /* Return the value in the proper mode for this function. */
4656 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4657 if (GET_MODE (result) == mode)
4658 return result;
4659 if (target == 0)
4660 return convert_to_mode (mode, result, 0);
4661 convert_move (target, result, 0);
4662 return target;
4665 /* Expand the library call ourselves using a stabilized argument
4666 list to avoid re-evaluating the function's arguments twice. */
4667 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4668 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4669 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4670 return expand_call (fn, target, target == const0_rtx);
4673 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4674 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4675 the result in TARGET, if convenient. */
4677 static rtx
4678 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4679 ATTRIBUTE_UNUSED machine_mode mode)
4681 if (!validate_arglist (exp,
4682 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4683 return NULL_RTX;
4685 /* Due to the performance benefit, always inline the calls first. */
4686 rtx result = NULL_RTX;
4687 result = inline_expand_builtin_string_cmp (exp, target);
4688 if (result)
4689 return result;
4691 /* If c_strlen can determine an expression for one of the string
4692 lengths, and it doesn't have side effects, then emit cmpstrnsi
4693 using length MIN(strlen(string)+1, arg3). */
4694 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4695 if (cmpstrn_icode == CODE_FOR_nothing)
4696 return NULL_RTX;
4698 tree len;
4700 tree arg1 = CALL_EXPR_ARG (exp, 0);
4701 tree arg2 = CALL_EXPR_ARG (exp, 1);
4702 tree arg3 = CALL_EXPR_ARG (exp, 2);
4704 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4705 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4707 tree len1 = c_strlen (arg1, 1);
4708 tree len2 = c_strlen (arg2, 1);
4710 location_t loc = EXPR_LOCATION (exp);
4712 if (len1)
4713 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4714 if (len2)
4715 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4717 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4719 /* If we don't have a constant length for the first, use the length
4720 of the second, if we know it. If neither string is constant length,
4721 use the given length argument. We don't require a constant for
4722 this case; some cost analysis could be done if both are available
4723 but neither is constant. For now, assume they're equally cheap,
4724 unless one has side effects. If both strings have constant lengths,
4725 use the smaller. */
4727 if (!len1 && !len2)
4728 len = len3;
4729 else if (!len1)
4730 len = len2;
4731 else if (!len2)
4732 len = len1;
4733 else if (TREE_SIDE_EFFECTS (len1))
4734 len = len2;
4735 else if (TREE_SIDE_EFFECTS (len2))
4736 len = len1;
4737 else if (TREE_CODE (len1) != INTEGER_CST)
4738 len = len2;
4739 else if (TREE_CODE (len2) != INTEGER_CST)
4740 len = len1;
4741 else if (tree_int_cst_lt (len1, len2))
4742 len = len1;
4743 else
4744 len = len2;
4746 /* If we are not using the given length, we must incorporate it here.
4747 The actual new length parameter will be MIN(len,arg3) in this case. */
4748 if (len != len3)
4749 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4750 rtx arg1_rtx = get_memory_rtx (arg1, len);
4751 rtx arg2_rtx = get_memory_rtx (arg2, len);
4752 rtx arg3_rtx = expand_normal (len);
4753 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4754 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4755 MIN (arg1_align, arg2_align));
4757 tree fndecl = get_callee_fndecl (exp);
4758 if (result)
4760 /* Check to see if the argument was declared attribute nonstring
4761 and if so, issue a warning since at this point it's not known
4762 to be nul-terminated. */
4763 maybe_warn_nonstring_arg (fndecl, exp);
4765 /* Return the value in the proper mode for this function. */
4766 mode = TYPE_MODE (TREE_TYPE (exp));
4767 if (GET_MODE (result) == mode)
4768 return result;
4769 if (target == 0)
4770 return convert_to_mode (mode, result, 0);
4771 convert_move (target, result, 0);
4772 return target;
4775 /* Expand the library call ourselves using a stabilized argument
4776 list to avoid re-evaluating the function's arguments twice. */
4777 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4778 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4779 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4780 return expand_call (fn, target, target == const0_rtx);
4783 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4784 if that's convenient. */
4787 expand_builtin_saveregs (void)
4789 rtx val;
4790 rtx_insn *seq;
4792 /* Don't do __builtin_saveregs more than once in a function.
4793 Save the result of the first call and reuse it. */
4794 if (saveregs_value != 0)
4795 return saveregs_value;
4797 /* When this function is called, it means that registers must be
4798 saved on entry to this function. So we migrate the call to the
4799 first insn of this function. */
4801 start_sequence ();
4803 /* Do whatever the machine needs done in this case. */
4804 val = targetm.calls.expand_builtin_saveregs ();
4806 seq = get_insns ();
4807 end_sequence ();
4809 saveregs_value = val;
4811 /* Put the insns after the NOTE that starts the function. If this
4812 is inside a start_sequence, make the outer-level insn chain current, so
4813 the code is placed at the start of the function. */
4814 push_topmost_sequence ();
4815 emit_insn_after (seq, entry_of_function ());
4816 pop_topmost_sequence ();
4818 return val;
4821 /* Expand a call to __builtin_next_arg. */
4823 static rtx
4824 expand_builtin_next_arg (void)
4826 /* Checking arguments is already done in fold_builtin_next_arg
4827 that must be called before this function. */
4828 return expand_binop (ptr_mode, add_optab,
4829 crtl->args.internal_arg_pointer,
4830 crtl->args.arg_offset_rtx,
4831 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4834 /* Make it easier for the backends by protecting the valist argument
4835 from multiple evaluations. */
4837 static tree
4838 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4840 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4842 /* The current way of determining the type of valist is completely
4843 bogus. We should have the information on the va builtin instead. */
4844 if (!vatype)
4845 vatype = targetm.fn_abi_va_list (cfun->decl);
4847 if (TREE_CODE (vatype) == ARRAY_TYPE)
4849 if (TREE_SIDE_EFFECTS (valist))
4850 valist = save_expr (valist);
4852 /* For this case, the backends will be expecting a pointer to
4853 vatype, but it's possible we've actually been given an array
4854 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4855 So fix it. */
4856 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4858 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4859 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4862 else
4864 tree pt = build_pointer_type (vatype);
4866 if (! needs_lvalue)
4868 if (! TREE_SIDE_EFFECTS (valist))
4869 return valist;
4871 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4872 TREE_SIDE_EFFECTS (valist) = 1;
4875 if (TREE_SIDE_EFFECTS (valist))
4876 valist = save_expr (valist);
4877 valist = fold_build2_loc (loc, MEM_REF,
4878 vatype, valist, build_int_cst (pt, 0));
4881 return valist;
4884 /* The "standard" definition of va_list is void*. */
4886 tree
4887 std_build_builtin_va_list (void)
4889 return ptr_type_node;
4892 /* The "standard" abi va_list is va_list_type_node. */
4894 tree
4895 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4897 return va_list_type_node;
4900 /* The "standard" type of va_list is va_list_type_node. */
4902 tree
4903 std_canonical_va_list_type (tree type)
4905 tree wtype, htype;
4907 wtype = va_list_type_node;
4908 htype = type;
4910 if (TREE_CODE (wtype) == ARRAY_TYPE)
4912 /* If va_list is an array type, the argument may have decayed
4913 to a pointer type, e.g. by being passed to another function.
4914 In that case, unwrap both types so that we can compare the
4915 underlying records. */
4916 if (TREE_CODE (htype) == ARRAY_TYPE
4917 || POINTER_TYPE_P (htype))
4919 wtype = TREE_TYPE (wtype);
4920 htype = TREE_TYPE (htype);
4923 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4924 return va_list_type_node;
4926 return NULL_TREE;
4929 /* The "standard" implementation of va_start: just assign `nextarg' to
4930 the variable. */
4932 void
4933 std_expand_builtin_va_start (tree valist, rtx nextarg)
4935 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4936 convert_move (va_r, nextarg, 0);
4939 /* Expand EXP, a call to __builtin_va_start. */
4941 static rtx
4942 expand_builtin_va_start (tree exp)
4944 rtx nextarg;
4945 tree valist;
4946 location_t loc = EXPR_LOCATION (exp);
4948 if (call_expr_nargs (exp) < 2)
4950 error_at (loc, "too few arguments to function %<va_start%>");
4951 return const0_rtx;
4954 if (fold_builtin_next_arg (exp, true))
4955 return const0_rtx;
4957 nextarg = expand_builtin_next_arg ();
4958 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4960 if (targetm.expand_builtin_va_start)
4961 targetm.expand_builtin_va_start (valist, nextarg);
4962 else
4963 std_expand_builtin_va_start (valist, nextarg);
4965 return const0_rtx;
4968 /* Expand EXP, a call to __builtin_va_end. */
4970 static rtx
4971 expand_builtin_va_end (tree exp)
4973 tree valist = CALL_EXPR_ARG (exp, 0);
4975 /* Evaluate for side effects, if needed. I hate macros that don't
4976 do that. */
4977 if (TREE_SIDE_EFFECTS (valist))
4978 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4980 return const0_rtx;
4983 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4984 builtin rather than just as an assignment in stdarg.h because of the
4985 nastiness of array-type va_list types. */
4987 static rtx
4988 expand_builtin_va_copy (tree exp)
4990 tree dst, src, t;
4991 location_t loc = EXPR_LOCATION (exp);
4993 dst = CALL_EXPR_ARG (exp, 0);
4994 src = CALL_EXPR_ARG (exp, 1);
4996 dst = stabilize_va_list_loc (loc, dst, 1);
4997 src = stabilize_va_list_loc (loc, src, 0);
4999 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5001 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5003 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5004 TREE_SIDE_EFFECTS (t) = 1;
5005 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5007 else
5009 rtx dstb, srcb, size;
5011 /* Evaluate to pointers. */
5012 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5013 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5014 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5015 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5017 dstb = convert_memory_address (Pmode, dstb);
5018 srcb = convert_memory_address (Pmode, srcb);
5020 /* "Dereference" to BLKmode memories. */
5021 dstb = gen_rtx_MEM (BLKmode, dstb);
5022 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5023 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5024 srcb = gen_rtx_MEM (BLKmode, srcb);
5025 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5026 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5028 /* Copy. */
5029 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5032 return const0_rtx;
5035 /* Expand a call to one of the builtin functions __builtin_frame_address or
5036 __builtin_return_address. */
5038 static rtx
5039 expand_builtin_frame_address (tree fndecl, tree exp)
5041 /* The argument must be a nonnegative integer constant.
5042 It counts the number of frames to scan up the stack.
5043 The value is either the frame pointer value or the return
5044 address saved in that frame. */
5045 if (call_expr_nargs (exp) == 0)
5046 /* Warning about missing arg was already issued. */
5047 return const0_rtx;
5048 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
5050 error ("invalid argument to %qD", fndecl);
5051 return const0_rtx;
5053 else
5055 /* Number of frames to scan up the stack. */
5056 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5058 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
5060 /* Some ports cannot access arbitrary stack frames. */
5061 if (tem == NULL)
5063 warning (0, "unsupported argument to %qD", fndecl);
5064 return const0_rtx;
5067 if (count)
5069 /* Warn since no effort is made to ensure that any frame
5070 beyond the current one exists or can be safely reached. */
5071 warning (OPT_Wframe_address, "calling %qD with "
5072 "a nonzero argument is unsafe", fndecl);
5075 /* For __builtin_frame_address, return what we've got. */
5076 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5077 return tem;
5079 if (!REG_P (tem)
5080 && ! CONSTANT_P (tem))
5081 tem = copy_addr_to_reg (tem);
5082 return tem;
5086 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5087 failed and the caller should emit a normal call. */
5089 static rtx
5090 expand_builtin_alloca (tree exp)
5092 rtx op0;
5093 rtx result;
5094 unsigned int align;
5095 tree fndecl = get_callee_fndecl (exp);
5096 HOST_WIDE_INT max_size;
5097 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5098 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5099 bool valid_arglist
5100 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5101 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5102 VOID_TYPE)
5103 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5104 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5105 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5107 if (!valid_arglist)
5108 return NULL_RTX;
5110 if ((alloca_for_var
5111 && warn_vla_limit >= HOST_WIDE_INT_MAX
5112 && warn_alloc_size_limit < warn_vla_limit)
5113 || (!alloca_for_var
5114 && warn_alloca_limit >= HOST_WIDE_INT_MAX
5115 && warn_alloc_size_limit < warn_alloca_limit
5118 /* -Walloca-larger-than and -Wvla-larger-than settings of
5119 less than HOST_WIDE_INT_MAX override the more general
5120 -Walloc-size-larger-than so unless either of the former
5121 options is smaller than the last one (wchich would imply
5122 that the call was already checked), check the alloca
5123 arguments for overflow. */
5124 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5125 int idx[] = { 0, -1 };
5126 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5129 /* Compute the argument. */
5130 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5132 /* Compute the alignment. */
5133 align = (fcode == BUILT_IN_ALLOCA
5134 ? BIGGEST_ALIGNMENT
5135 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5137 /* Compute the maximum size. */
5138 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5139 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5140 : -1);
5142 /* Allocate the desired space. If the allocation stems from the declaration
5143 of a variable-sized object, it cannot accumulate. */
5144 result
5145 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5146 result = convert_memory_address (ptr_mode, result);
5148 return result;
5151 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5152 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5153 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5154 handle_builtin_stack_restore function. */
5156 static rtx
5157 expand_asan_emit_allocas_unpoison (tree exp)
5159 tree arg0 = CALL_EXPR_ARG (exp, 0);
5160 tree arg1 = CALL_EXPR_ARG (exp, 1);
5161 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5162 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5163 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5164 stack_pointer_rtx, NULL_RTX, 0,
5165 OPTAB_LIB_WIDEN);
5166 off = convert_modes (ptr_mode, Pmode, off, 0);
5167 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5168 OPTAB_LIB_WIDEN);
5169 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5170 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5171 top, ptr_mode, bot, ptr_mode);
5172 return ret;
5175 /* Expand a call to bswap builtin in EXP.
5176 Return NULL_RTX if a normal call should be emitted rather than expanding the
5177 function in-line. If convenient, the result should be placed in TARGET.
5178 SUBTARGET may be used as the target for computing one of EXP's operands. */
5180 static rtx
5181 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5182 rtx subtarget)
5184 tree arg;
5185 rtx op0;
5187 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5188 return NULL_RTX;
5190 arg = CALL_EXPR_ARG (exp, 0);
5191 op0 = expand_expr (arg,
5192 subtarget && GET_MODE (subtarget) == target_mode
5193 ? subtarget : NULL_RTX,
5194 target_mode, EXPAND_NORMAL);
5195 if (GET_MODE (op0) != target_mode)
5196 op0 = convert_to_mode (target_mode, op0, 1);
5198 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5200 gcc_assert (target);
5202 return convert_to_mode (target_mode, target, 1);
5205 /* Expand a call to a unary builtin in EXP.
5206 Return NULL_RTX if a normal call should be emitted rather than expanding the
5207 function in-line. If convenient, the result should be placed in TARGET.
5208 SUBTARGET may be used as the target for computing one of EXP's operands. */
5210 static rtx
5211 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5212 rtx subtarget, optab op_optab)
5214 rtx op0;
5216 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5217 return NULL_RTX;
5219 /* Compute the argument. */
5220 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5221 (subtarget
5222 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5223 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5224 VOIDmode, EXPAND_NORMAL);
5225 /* Compute op, into TARGET if possible.
5226 Set TARGET to wherever the result comes back. */
5227 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5228 op_optab, op0, target, op_optab != clrsb_optab);
5229 gcc_assert (target);
5231 return convert_to_mode (target_mode, target, 0);
5234 /* Expand a call to __builtin_expect. We just return our argument
5235 as the builtin_expect semantic should've been already executed by
5236 tree branch prediction pass. */
5238 static rtx
5239 expand_builtin_expect (tree exp, rtx target)
5241 tree arg;
5243 if (call_expr_nargs (exp) < 2)
5244 return const0_rtx;
5245 arg = CALL_EXPR_ARG (exp, 0);
5247 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5248 /* When guessing was done, the hints should be already stripped away. */
5249 gcc_assert (!flag_guess_branch_prob
5250 || optimize == 0 || seen_error ());
5251 return target;
5254 /* Expand a call to __builtin_assume_aligned. We just return our first
5255 argument as the builtin_assume_aligned semantic should've been already
5256 executed by CCP. */
5258 static rtx
5259 expand_builtin_assume_aligned (tree exp, rtx target)
5261 if (call_expr_nargs (exp) < 2)
5262 return const0_rtx;
5263 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5264 EXPAND_NORMAL);
5265 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5266 && (call_expr_nargs (exp) < 3
5267 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5268 return target;
5271 void
5272 expand_builtin_trap (void)
5274 if (targetm.have_trap ())
5276 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5277 /* For trap insns when not accumulating outgoing args force
5278 REG_ARGS_SIZE note to prevent crossjumping of calls with
5279 different args sizes. */
5280 if (!ACCUMULATE_OUTGOING_ARGS)
5281 add_args_size_note (insn, stack_pointer_delta);
5283 else
5285 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5286 tree call_expr = build_call_expr (fn, 0);
5287 expand_call (call_expr, NULL_RTX, false);
5290 emit_barrier ();
5293 /* Expand a call to __builtin_unreachable. We do nothing except emit
5294 a barrier saying that control flow will not pass here.
5296 It is the responsibility of the program being compiled to ensure
5297 that control flow does never reach __builtin_unreachable. */
5298 static void
5299 expand_builtin_unreachable (void)
5301 emit_barrier ();
5304 /* Expand EXP, a call to fabs, fabsf or fabsl.
5305 Return NULL_RTX if a normal call should be emitted rather than expanding
5306 the function inline. If convenient, the result should be placed
5307 in TARGET. SUBTARGET may be used as the target for computing
5308 the operand. */
5310 static rtx
5311 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5313 machine_mode mode;
5314 tree arg;
5315 rtx op0;
5317 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5318 return NULL_RTX;
5320 arg = CALL_EXPR_ARG (exp, 0);
5321 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5322 mode = TYPE_MODE (TREE_TYPE (arg));
5323 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5324 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5327 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5328 Return NULL is a normal call should be emitted rather than expanding the
5329 function inline. If convenient, the result should be placed in TARGET.
5330 SUBTARGET may be used as the target for computing the operand. */
5332 static rtx
5333 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5335 rtx op0, op1;
5336 tree arg;
5338 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5339 return NULL_RTX;
5341 arg = CALL_EXPR_ARG (exp, 0);
5342 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5344 arg = CALL_EXPR_ARG (exp, 1);
5345 op1 = expand_normal (arg);
5347 return expand_copysign (op0, op1, target);
5350 /* Expand a call to __builtin___clear_cache. */
5352 static rtx
5353 expand_builtin___clear_cache (tree exp)
5355 if (!targetm.code_for_clear_cache)
5357 #ifdef CLEAR_INSN_CACHE
5358 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5359 does something. Just do the default expansion to a call to
5360 __clear_cache(). */
5361 return NULL_RTX;
5362 #else
5363 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5364 does nothing. There is no need to call it. Do nothing. */
5365 return const0_rtx;
5366 #endif /* CLEAR_INSN_CACHE */
5369 /* We have a "clear_cache" insn, and it will handle everything. */
5370 tree begin, end;
5371 rtx begin_rtx, end_rtx;
5373 /* We must not expand to a library call. If we did, any
5374 fallback library function in libgcc that might contain a call to
5375 __builtin___clear_cache() would recurse infinitely. */
5376 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5378 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5379 return const0_rtx;
5382 if (targetm.have_clear_cache ())
5384 struct expand_operand ops[2];
5386 begin = CALL_EXPR_ARG (exp, 0);
5387 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5389 end = CALL_EXPR_ARG (exp, 1);
5390 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5392 create_address_operand (&ops[0], begin_rtx);
5393 create_address_operand (&ops[1], end_rtx);
5394 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5395 return const0_rtx;
5397 return const0_rtx;
5400 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5402 static rtx
5403 round_trampoline_addr (rtx tramp)
5405 rtx temp, addend, mask;
5407 /* If we don't need too much alignment, we'll have been guaranteed
5408 proper alignment by get_trampoline_type. */
5409 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5410 return tramp;
5412 /* Round address up to desired boundary. */
5413 temp = gen_reg_rtx (Pmode);
5414 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5415 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5417 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5418 temp, 0, OPTAB_LIB_WIDEN);
5419 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5420 temp, 0, OPTAB_LIB_WIDEN);
5422 return tramp;
5425 static rtx
5426 expand_builtin_init_trampoline (tree exp, bool onstack)
5428 tree t_tramp, t_func, t_chain;
5429 rtx m_tramp, r_tramp, r_chain, tmp;
5431 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5432 POINTER_TYPE, VOID_TYPE))
5433 return NULL_RTX;
5435 t_tramp = CALL_EXPR_ARG (exp, 0);
5436 t_func = CALL_EXPR_ARG (exp, 1);
5437 t_chain = CALL_EXPR_ARG (exp, 2);
5439 r_tramp = expand_normal (t_tramp);
5440 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5441 MEM_NOTRAP_P (m_tramp) = 1;
5443 /* If ONSTACK, the TRAMP argument should be the address of a field
5444 within the local function's FRAME decl. Either way, let's see if
5445 we can fill in the MEM_ATTRs for this memory. */
5446 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5447 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5449 /* Creator of a heap trampoline is responsible for making sure the
5450 address is aligned to at least STACK_BOUNDARY. Normally malloc
5451 will ensure this anyhow. */
5452 tmp = round_trampoline_addr (r_tramp);
5453 if (tmp != r_tramp)
5455 m_tramp = change_address (m_tramp, BLKmode, tmp);
5456 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5457 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5460 /* The FUNC argument should be the address of the nested function.
5461 Extract the actual function decl to pass to the hook. */
5462 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5463 t_func = TREE_OPERAND (t_func, 0);
5464 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5466 r_chain = expand_normal (t_chain);
5468 /* Generate insns to initialize the trampoline. */
5469 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5471 if (onstack)
5473 trampolines_created = 1;
5475 if (targetm.calls.custom_function_descriptors != 0)
5476 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5477 "trampoline generated for nested function %qD", t_func);
5480 return const0_rtx;
5483 static rtx
5484 expand_builtin_adjust_trampoline (tree exp)
5486 rtx tramp;
5488 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5489 return NULL_RTX;
5491 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5492 tramp = round_trampoline_addr (tramp);
5493 if (targetm.calls.trampoline_adjust_address)
5494 tramp = targetm.calls.trampoline_adjust_address (tramp);
5496 return tramp;
5499 /* Expand a call to the builtin descriptor initialization routine.
5500 A descriptor is made up of a couple of pointers to the static
5501 chain and the code entry in this order. */
5503 static rtx
5504 expand_builtin_init_descriptor (tree exp)
5506 tree t_descr, t_func, t_chain;
5507 rtx m_descr, r_descr, r_func, r_chain;
5509 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5510 VOID_TYPE))
5511 return NULL_RTX;
5513 t_descr = CALL_EXPR_ARG (exp, 0);
5514 t_func = CALL_EXPR_ARG (exp, 1);
5515 t_chain = CALL_EXPR_ARG (exp, 2);
5517 r_descr = expand_normal (t_descr);
5518 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5519 MEM_NOTRAP_P (m_descr) = 1;
5521 r_func = expand_normal (t_func);
5522 r_chain = expand_normal (t_chain);
5524 /* Generate insns to initialize the descriptor. */
5525 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5526 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5527 POINTER_SIZE / BITS_PER_UNIT), r_func);
5529 return const0_rtx;
5532 /* Expand a call to the builtin descriptor adjustment routine. */
5534 static rtx
5535 expand_builtin_adjust_descriptor (tree exp)
5537 rtx tramp;
5539 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5540 return NULL_RTX;
5542 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5544 /* Unalign the descriptor to allow runtime identification. */
5545 tramp = plus_constant (ptr_mode, tramp,
5546 targetm.calls.custom_function_descriptors);
5548 return force_operand (tramp, NULL_RTX);
5551 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5552 function. The function first checks whether the back end provides
5553 an insn to implement signbit for the respective mode. If not, it
5554 checks whether the floating point format of the value is such that
5555 the sign bit can be extracted. If that is not the case, error out.
5556 EXP is the expression that is a call to the builtin function; if
5557 convenient, the result should be placed in TARGET. */
5558 static rtx
5559 expand_builtin_signbit (tree exp, rtx target)
5561 const struct real_format *fmt;
5562 scalar_float_mode fmode;
5563 scalar_int_mode rmode, imode;
5564 tree arg;
5565 int word, bitpos;
5566 enum insn_code icode;
5567 rtx temp;
5568 location_t loc = EXPR_LOCATION (exp);
5570 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5571 return NULL_RTX;
5573 arg = CALL_EXPR_ARG (exp, 0);
5574 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5575 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5576 fmt = REAL_MODE_FORMAT (fmode);
5578 arg = builtin_save_expr (arg);
5580 /* Expand the argument yielding a RTX expression. */
5581 temp = expand_normal (arg);
5583 /* Check if the back end provides an insn that handles signbit for the
5584 argument's mode. */
5585 icode = optab_handler (signbit_optab, fmode);
5586 if (icode != CODE_FOR_nothing)
5588 rtx_insn *last = get_last_insn ();
5589 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5590 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5591 return target;
5592 delete_insns_since (last);
5595 /* For floating point formats without a sign bit, implement signbit
5596 as "ARG < 0.0". */
5597 bitpos = fmt->signbit_ro;
5598 if (bitpos < 0)
5600 /* But we can't do this if the format supports signed zero. */
5601 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5603 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5604 build_real (TREE_TYPE (arg), dconst0));
5605 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5608 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5610 imode = int_mode_for_mode (fmode).require ();
5611 temp = gen_lowpart (imode, temp);
5613 else
5615 imode = word_mode;
5616 /* Handle targets with different FP word orders. */
5617 if (FLOAT_WORDS_BIG_ENDIAN)
5618 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5619 else
5620 word = bitpos / BITS_PER_WORD;
5621 temp = operand_subword_force (temp, word, fmode);
5622 bitpos = bitpos % BITS_PER_WORD;
5625 /* Force the intermediate word_mode (or narrower) result into a
5626 register. This avoids attempting to create paradoxical SUBREGs
5627 of floating point modes below. */
5628 temp = force_reg (imode, temp);
5630 /* If the bitpos is within the "result mode" lowpart, the operation
5631 can be implement with a single bitwise AND. Otherwise, we need
5632 a right shift and an AND. */
5634 if (bitpos < GET_MODE_BITSIZE (rmode))
5636 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5638 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5639 temp = gen_lowpart (rmode, temp);
5640 temp = expand_binop (rmode, and_optab, temp,
5641 immed_wide_int_const (mask, rmode),
5642 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5644 else
5646 /* Perform a logical right shift to place the signbit in the least
5647 significant bit, then truncate the result to the desired mode
5648 and mask just this bit. */
5649 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5650 temp = gen_lowpart (rmode, temp);
5651 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5652 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5655 return temp;
5658 /* Expand fork or exec calls. TARGET is the desired target of the
5659 call. EXP is the call. FN is the
5660 identificator of the actual function. IGNORE is nonzero if the
5661 value is to be ignored. */
5663 static rtx
5664 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5666 tree id, decl;
5667 tree call;
5669 /* If we are not profiling, just call the function. */
5670 if (!profile_arc_flag)
5671 return NULL_RTX;
5673 /* Otherwise call the wrapper. This should be equivalent for the rest of
5674 compiler, so the code does not diverge, and the wrapper may run the
5675 code necessary for keeping the profiling sane. */
5677 switch (DECL_FUNCTION_CODE (fn))
5679 case BUILT_IN_FORK:
5680 id = get_identifier ("__gcov_fork");
5681 break;
5683 case BUILT_IN_EXECL:
5684 id = get_identifier ("__gcov_execl");
5685 break;
5687 case BUILT_IN_EXECV:
5688 id = get_identifier ("__gcov_execv");
5689 break;
5691 case BUILT_IN_EXECLP:
5692 id = get_identifier ("__gcov_execlp");
5693 break;
5695 case BUILT_IN_EXECLE:
5696 id = get_identifier ("__gcov_execle");
5697 break;
5699 case BUILT_IN_EXECVP:
5700 id = get_identifier ("__gcov_execvp");
5701 break;
5703 case BUILT_IN_EXECVE:
5704 id = get_identifier ("__gcov_execve");
5705 break;
5707 default:
5708 gcc_unreachable ();
5711 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5712 FUNCTION_DECL, id, TREE_TYPE (fn));
5713 DECL_EXTERNAL (decl) = 1;
5714 TREE_PUBLIC (decl) = 1;
5715 DECL_ARTIFICIAL (decl) = 1;
5716 TREE_NOTHROW (decl) = 1;
5717 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5718 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5719 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5720 return expand_call (call, target, ignore);
5725 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5726 the pointer in these functions is void*, the tree optimizers may remove
5727 casts. The mode computed in expand_builtin isn't reliable either, due
5728 to __sync_bool_compare_and_swap.
5730 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5731 group of builtins. This gives us log2 of the mode size. */
5733 static inline machine_mode
5734 get_builtin_sync_mode (int fcode_diff)
5736 /* The size is not negotiable, so ask not to get BLKmode in return
5737 if the target indicates that a smaller size would be better. */
5738 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5741 /* Expand the memory expression LOC and return the appropriate memory operand
5742 for the builtin_sync operations. */
5744 static rtx
5745 get_builtin_sync_mem (tree loc, machine_mode mode)
5747 rtx addr, mem;
5749 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5750 addr = convert_memory_address (Pmode, addr);
5752 /* Note that we explicitly do not want any alias information for this
5753 memory, so that we kill all other live memories. Otherwise we don't
5754 satisfy the full barrier semantics of the intrinsic. */
5755 mem = validize_mem (gen_rtx_MEM (mode, addr));
5757 /* The alignment needs to be at least according to that of the mode. */
5758 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5759 get_pointer_alignment (loc)));
5760 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5761 MEM_VOLATILE_P (mem) = 1;
5763 return mem;
5766 /* Make sure an argument is in the right mode.
5767 EXP is the tree argument.
5768 MODE is the mode it should be in. */
5770 static rtx
5771 expand_expr_force_mode (tree exp, machine_mode mode)
5773 rtx val;
5774 machine_mode old_mode;
5776 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5777 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5778 of CONST_INTs, where we know the old_mode only from the call argument. */
5780 old_mode = GET_MODE (val);
5781 if (old_mode == VOIDmode)
5782 old_mode = TYPE_MODE (TREE_TYPE (exp));
5783 val = convert_modes (mode, old_mode, val, 1);
5784 return val;
5788 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5789 EXP is the CALL_EXPR. CODE is the rtx code
5790 that corresponds to the arithmetic or logical operation from the name;
5791 an exception here is that NOT actually means NAND. TARGET is an optional
5792 place for us to store the results; AFTER is true if this is the
5793 fetch_and_xxx form. */
5795 static rtx
5796 expand_builtin_sync_operation (machine_mode mode, tree exp,
5797 enum rtx_code code, bool after,
5798 rtx target)
5800 rtx val, mem;
5801 location_t loc = EXPR_LOCATION (exp);
5803 if (code == NOT && warn_sync_nand)
5805 tree fndecl = get_callee_fndecl (exp);
5806 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5808 static bool warned_f_a_n, warned_n_a_f;
5810 switch (fcode)
5812 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5813 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5814 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5815 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5816 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5817 if (warned_f_a_n)
5818 break;
5820 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5821 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5822 warned_f_a_n = true;
5823 break;
5825 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5826 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5827 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5828 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5829 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5830 if (warned_n_a_f)
5831 break;
5833 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5834 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5835 warned_n_a_f = true;
5836 break;
5838 default:
5839 gcc_unreachable ();
5843 /* Expand the operands. */
5844 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5845 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5847 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5848 after);
5851 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5852 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5853 true if this is the boolean form. TARGET is a place for us to store the
5854 results; this is NOT optional if IS_BOOL is true. */
5856 static rtx
5857 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5858 bool is_bool, rtx target)
5860 rtx old_val, new_val, mem;
5861 rtx *pbool, *poval;
5863 /* Expand the operands. */
5864 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5865 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5866 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5868 pbool = poval = NULL;
5869 if (target != const0_rtx)
5871 if (is_bool)
5872 pbool = &target;
5873 else
5874 poval = &target;
5876 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5877 false, MEMMODEL_SYNC_SEQ_CST,
5878 MEMMODEL_SYNC_SEQ_CST))
5879 return NULL_RTX;
5881 return target;
5884 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5885 general form is actually an atomic exchange, and some targets only
5886 support a reduced form with the second argument being a constant 1.
5887 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5888 the results. */
5890 static rtx
5891 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5892 rtx target)
5894 rtx val, mem;
5896 /* Expand the operands. */
5897 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5898 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5900 return expand_sync_lock_test_and_set (target, mem, val);
5903 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5905 static void
5906 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5908 rtx mem;
5910 /* Expand the operands. */
5911 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5913 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5916 /* Given an integer representing an ``enum memmodel'', verify its
5917 correctness and return the memory model enum. */
5919 static enum memmodel
5920 get_memmodel (tree exp)
5922 rtx op;
5923 unsigned HOST_WIDE_INT val;
5924 source_location loc
5925 = expansion_point_location_if_in_system_header (input_location);
5927 /* If the parameter is not a constant, it's a run time value so we'll just
5928 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5929 if (TREE_CODE (exp) != INTEGER_CST)
5930 return MEMMODEL_SEQ_CST;
5932 op = expand_normal (exp);
5934 val = INTVAL (op);
5935 if (targetm.memmodel_check)
5936 val = targetm.memmodel_check (val);
5937 else if (val & ~MEMMODEL_MASK)
5939 warning_at (loc, OPT_Winvalid_memory_model,
5940 "unknown architecture specifier in memory model to builtin");
5941 return MEMMODEL_SEQ_CST;
5944 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5945 if (memmodel_base (val) >= MEMMODEL_LAST)
5947 warning_at (loc, OPT_Winvalid_memory_model,
5948 "invalid memory model argument to builtin");
5949 return MEMMODEL_SEQ_CST;
5952 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5953 be conservative and promote consume to acquire. */
5954 if (val == MEMMODEL_CONSUME)
5955 val = MEMMODEL_ACQUIRE;
5957 return (enum memmodel) val;
5960 /* Expand the __atomic_exchange intrinsic:
5961 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5962 EXP is the CALL_EXPR.
5963 TARGET is an optional place for us to store the results. */
5965 static rtx
5966 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5968 rtx val, mem;
5969 enum memmodel model;
5971 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5973 if (!flag_inline_atomics)
5974 return NULL_RTX;
5976 /* Expand the operands. */
5977 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5978 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5980 return expand_atomic_exchange (target, mem, val, model);
5983 /* Expand the __atomic_compare_exchange intrinsic:
5984 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5985 TYPE desired, BOOL weak,
5986 enum memmodel success,
5987 enum memmodel failure)
5988 EXP is the CALL_EXPR.
5989 TARGET is an optional place for us to store the results. */
5991 static rtx
5992 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5993 rtx target)
5995 rtx expect, desired, mem, oldval;
5996 rtx_code_label *label;
5997 enum memmodel success, failure;
5998 tree weak;
5999 bool is_weak;
6000 source_location loc
6001 = expansion_point_location_if_in_system_header (input_location);
6003 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6004 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6006 if (failure > success)
6008 warning_at (loc, OPT_Winvalid_memory_model,
6009 "failure memory model cannot be stronger than success "
6010 "memory model for %<__atomic_compare_exchange%>");
6011 success = MEMMODEL_SEQ_CST;
6014 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6016 warning_at (loc, OPT_Winvalid_memory_model,
6017 "invalid failure memory model for "
6018 "%<__atomic_compare_exchange%>");
6019 failure = MEMMODEL_SEQ_CST;
6020 success = MEMMODEL_SEQ_CST;
6024 if (!flag_inline_atomics)
6025 return NULL_RTX;
6027 /* Expand the operands. */
6028 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6030 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6031 expect = convert_memory_address (Pmode, expect);
6032 expect = gen_rtx_MEM (mode, expect);
6033 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6035 weak = CALL_EXPR_ARG (exp, 3);
6036 is_weak = false;
6037 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
6038 is_weak = true;
6040 if (target == const0_rtx)
6041 target = NULL;
6043 /* Lest the rtl backend create a race condition with an imporoper store
6044 to memory, always create a new pseudo for OLDVAL. */
6045 oldval = NULL;
6047 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6048 is_weak, success, failure))
6049 return NULL_RTX;
6051 /* Conditionally store back to EXPECT, lest we create a race condition
6052 with an improper store to memory. */
6053 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6054 the normal case where EXPECT is totally private, i.e. a register. At
6055 which point the store can be unconditional. */
6056 label = gen_label_rtx ();
6057 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6058 GET_MODE (target), 1, label);
6059 emit_move_insn (expect, oldval);
6060 emit_label (label);
6062 return target;
6065 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6066 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6067 call. The weak parameter must be dropped to match the expected parameter
6068 list and the expected argument changed from value to pointer to memory
6069 slot. */
6071 static void
6072 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6074 unsigned int z;
6075 vec<tree, va_gc> *vec;
6077 vec_alloc (vec, 5);
6078 vec->quick_push (gimple_call_arg (call, 0));
6079 tree expected = gimple_call_arg (call, 1);
6080 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6081 TREE_TYPE (expected));
6082 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6083 if (expd != x)
6084 emit_move_insn (x, expd);
6085 tree v = make_tree (TREE_TYPE (expected), x);
6086 vec->quick_push (build1 (ADDR_EXPR,
6087 build_pointer_type (TREE_TYPE (expected)), v));
6088 vec->quick_push (gimple_call_arg (call, 2));
6089 /* Skip the boolean weak parameter. */
6090 for (z = 4; z < 6; z++)
6091 vec->quick_push (gimple_call_arg (call, z));
6092 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6093 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6094 gcc_assert (bytes_log2 < 5);
6095 built_in_function fncode
6096 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6097 + bytes_log2);
6098 tree fndecl = builtin_decl_explicit (fncode);
6099 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6100 fndecl);
6101 tree exp = build_call_vec (boolean_type_node, fn, vec);
6102 tree lhs = gimple_call_lhs (call);
6103 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6104 if (lhs)
6106 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6107 if (GET_MODE (boolret) != mode)
6108 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6109 x = force_reg (mode, x);
6110 write_complex_part (target, boolret, true);
6111 write_complex_part (target, x, false);
6115 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6117 void
6118 expand_ifn_atomic_compare_exchange (gcall *call)
6120 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6121 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6122 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6123 rtx expect, desired, mem, oldval, boolret;
6124 enum memmodel success, failure;
6125 tree lhs;
6126 bool is_weak;
6127 source_location loc
6128 = expansion_point_location_if_in_system_header (gimple_location (call));
6130 success = get_memmodel (gimple_call_arg (call, 4));
6131 failure = get_memmodel (gimple_call_arg (call, 5));
6133 if (failure > success)
6135 warning_at (loc, OPT_Winvalid_memory_model,
6136 "failure memory model cannot be stronger than success "
6137 "memory model for %<__atomic_compare_exchange%>");
6138 success = MEMMODEL_SEQ_CST;
6141 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6143 warning_at (loc, OPT_Winvalid_memory_model,
6144 "invalid failure memory model for "
6145 "%<__atomic_compare_exchange%>");
6146 failure = MEMMODEL_SEQ_CST;
6147 success = MEMMODEL_SEQ_CST;
6150 if (!flag_inline_atomics)
6152 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6153 return;
6156 /* Expand the operands. */
6157 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6159 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6160 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6162 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6164 boolret = NULL;
6165 oldval = NULL;
6167 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6168 is_weak, success, failure))
6170 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6171 return;
6174 lhs = gimple_call_lhs (call);
6175 if (lhs)
6177 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6178 if (GET_MODE (boolret) != mode)
6179 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6180 write_complex_part (target, boolret, true);
6181 write_complex_part (target, oldval, false);
6185 /* Expand the __atomic_load intrinsic:
6186 TYPE __atomic_load (TYPE *object, enum memmodel)
6187 EXP is the CALL_EXPR.
6188 TARGET is an optional place for us to store the results. */
6190 static rtx
6191 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6193 rtx mem;
6194 enum memmodel model;
6196 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6197 if (is_mm_release (model) || is_mm_acq_rel (model))
6199 source_location loc
6200 = expansion_point_location_if_in_system_header (input_location);
6201 warning_at (loc, OPT_Winvalid_memory_model,
6202 "invalid memory model for %<__atomic_load%>");
6203 model = MEMMODEL_SEQ_CST;
6206 if (!flag_inline_atomics)
6207 return NULL_RTX;
6209 /* Expand the operand. */
6210 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6212 return expand_atomic_load (target, mem, model);
6216 /* Expand the __atomic_store intrinsic:
6217 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6218 EXP is the CALL_EXPR.
6219 TARGET is an optional place for us to store the results. */
6221 static rtx
6222 expand_builtin_atomic_store (machine_mode mode, tree exp)
6224 rtx mem, val;
6225 enum memmodel model;
6227 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6228 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6229 || is_mm_release (model)))
6231 source_location loc
6232 = expansion_point_location_if_in_system_header (input_location);
6233 warning_at (loc, OPT_Winvalid_memory_model,
6234 "invalid memory model for %<__atomic_store%>");
6235 model = MEMMODEL_SEQ_CST;
6238 if (!flag_inline_atomics)
6239 return NULL_RTX;
6241 /* Expand the operands. */
6242 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6243 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6245 return expand_atomic_store (mem, val, model, false);
6248 /* Expand the __atomic_fetch_XXX intrinsic:
6249 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6250 EXP is the CALL_EXPR.
6251 TARGET is an optional place for us to store the results.
6252 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6253 FETCH_AFTER is true if returning the result of the operation.
6254 FETCH_AFTER is false if returning the value before the operation.
6255 IGNORE is true if the result is not used.
6256 EXT_CALL is the correct builtin for an external call if this cannot be
6257 resolved to an instruction sequence. */
6259 static rtx
6260 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6261 enum rtx_code code, bool fetch_after,
6262 bool ignore, enum built_in_function ext_call)
6264 rtx val, mem, ret;
6265 enum memmodel model;
6266 tree fndecl;
6267 tree addr;
6269 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6271 /* Expand the operands. */
6272 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6273 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6275 /* Only try generating instructions if inlining is turned on. */
6276 if (flag_inline_atomics)
6278 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6279 if (ret)
6280 return ret;
6283 /* Return if a different routine isn't needed for the library call. */
6284 if (ext_call == BUILT_IN_NONE)
6285 return NULL_RTX;
6287 /* Change the call to the specified function. */
6288 fndecl = get_callee_fndecl (exp);
6289 addr = CALL_EXPR_FN (exp);
6290 STRIP_NOPS (addr);
6292 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6293 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6295 /* If we will emit code after the call, the call can not be a tail call.
6296 If it is emitted as a tail call, a barrier is emitted after it, and
6297 then all trailing code is removed. */
6298 if (!ignore)
6299 CALL_EXPR_TAILCALL (exp) = 0;
6301 /* Expand the call here so we can emit trailing code. */
6302 ret = expand_call (exp, target, ignore);
6304 /* Replace the original function just in case it matters. */
6305 TREE_OPERAND (addr, 0) = fndecl;
6307 /* Then issue the arithmetic correction to return the right result. */
6308 if (!ignore)
6310 if (code == NOT)
6312 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6313 OPTAB_LIB_WIDEN);
6314 ret = expand_simple_unop (mode, NOT, ret, target, true);
6316 else
6317 ret = expand_simple_binop (mode, code, ret, val, target, true,
6318 OPTAB_LIB_WIDEN);
6320 return ret;
6323 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6325 void
6326 expand_ifn_atomic_bit_test_and (gcall *call)
6328 tree ptr = gimple_call_arg (call, 0);
6329 tree bit = gimple_call_arg (call, 1);
6330 tree flag = gimple_call_arg (call, 2);
6331 tree lhs = gimple_call_lhs (call);
6332 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6333 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6334 enum rtx_code code;
6335 optab optab;
6336 struct expand_operand ops[5];
6338 gcc_assert (flag_inline_atomics);
6340 if (gimple_call_num_args (call) == 4)
6341 model = get_memmodel (gimple_call_arg (call, 3));
6343 rtx mem = get_builtin_sync_mem (ptr, mode);
6344 rtx val = expand_expr_force_mode (bit, mode);
6346 switch (gimple_call_internal_fn (call))
6348 case IFN_ATOMIC_BIT_TEST_AND_SET:
6349 code = IOR;
6350 optab = atomic_bit_test_and_set_optab;
6351 break;
6352 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6353 code = XOR;
6354 optab = atomic_bit_test_and_complement_optab;
6355 break;
6356 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6357 code = AND;
6358 optab = atomic_bit_test_and_reset_optab;
6359 break;
6360 default:
6361 gcc_unreachable ();
6364 if (lhs == NULL_TREE)
6366 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6367 val, NULL_RTX, true, OPTAB_DIRECT);
6368 if (code == AND)
6369 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6370 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6371 return;
6374 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6375 enum insn_code icode = direct_optab_handler (optab, mode);
6376 gcc_assert (icode != CODE_FOR_nothing);
6377 create_output_operand (&ops[0], target, mode);
6378 create_fixed_operand (&ops[1], mem);
6379 create_convert_operand_to (&ops[2], val, mode, true);
6380 create_integer_operand (&ops[3], model);
6381 create_integer_operand (&ops[4], integer_onep (flag));
6382 if (maybe_expand_insn (icode, 5, ops))
6383 return;
6385 rtx bitval = val;
6386 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6387 val, NULL_RTX, true, OPTAB_DIRECT);
6388 rtx maskval = val;
6389 if (code == AND)
6390 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6391 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6392 code, model, false);
6393 if (integer_onep (flag))
6395 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6396 NULL_RTX, true, OPTAB_DIRECT);
6397 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6398 true, OPTAB_DIRECT);
6400 else
6401 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6402 OPTAB_DIRECT);
6403 if (result != target)
6404 emit_move_insn (target, result);
6407 /* Expand an atomic clear operation.
6408 void _atomic_clear (BOOL *obj, enum memmodel)
6409 EXP is the call expression. */
6411 static rtx
6412 expand_builtin_atomic_clear (tree exp)
6414 machine_mode mode;
6415 rtx mem, ret;
6416 enum memmodel model;
6418 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6419 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6420 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6422 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6424 source_location loc
6425 = expansion_point_location_if_in_system_header (input_location);
6426 warning_at (loc, OPT_Winvalid_memory_model,
6427 "invalid memory model for %<__atomic_store%>");
6428 model = MEMMODEL_SEQ_CST;
6431 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6432 Failing that, a store is issued by __atomic_store. The only way this can
6433 fail is if the bool type is larger than a word size. Unlikely, but
6434 handle it anyway for completeness. Assume a single threaded model since
6435 there is no atomic support in this case, and no barriers are required. */
6436 ret = expand_atomic_store (mem, const0_rtx, model, true);
6437 if (!ret)
6438 emit_move_insn (mem, const0_rtx);
6439 return const0_rtx;
6442 /* Expand an atomic test_and_set operation.
6443 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6444 EXP is the call expression. */
6446 static rtx
6447 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6449 rtx mem;
6450 enum memmodel model;
6451 machine_mode mode;
6453 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6454 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6455 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6457 return expand_atomic_test_and_set (target, mem, model);
6461 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6462 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6464 static tree
6465 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6467 int size;
6468 machine_mode mode;
6469 unsigned int mode_align, type_align;
6471 if (TREE_CODE (arg0) != INTEGER_CST)
6472 return NULL_TREE;
6474 /* We need a corresponding integer mode for the access to be lock-free. */
6475 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6476 if (!int_mode_for_size (size, 0).exists (&mode))
6477 return boolean_false_node;
6479 mode_align = GET_MODE_ALIGNMENT (mode);
6481 if (TREE_CODE (arg1) == INTEGER_CST)
6483 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6485 /* Either this argument is null, or it's a fake pointer encoding
6486 the alignment of the object. */
6487 val = least_bit_hwi (val);
6488 val *= BITS_PER_UNIT;
6490 if (val == 0 || mode_align < val)
6491 type_align = mode_align;
6492 else
6493 type_align = val;
6495 else
6497 tree ttype = TREE_TYPE (arg1);
6499 /* This function is usually invoked and folded immediately by the front
6500 end before anything else has a chance to look at it. The pointer
6501 parameter at this point is usually cast to a void *, so check for that
6502 and look past the cast. */
6503 if (CONVERT_EXPR_P (arg1)
6504 && POINTER_TYPE_P (ttype)
6505 && VOID_TYPE_P (TREE_TYPE (ttype))
6506 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6507 arg1 = TREE_OPERAND (arg1, 0);
6509 ttype = TREE_TYPE (arg1);
6510 gcc_assert (POINTER_TYPE_P (ttype));
6512 /* Get the underlying type of the object. */
6513 ttype = TREE_TYPE (ttype);
6514 type_align = TYPE_ALIGN (ttype);
6517 /* If the object has smaller alignment, the lock free routines cannot
6518 be used. */
6519 if (type_align < mode_align)
6520 return boolean_false_node;
6522 /* Check if a compare_and_swap pattern exists for the mode which represents
6523 the required size. The pattern is not allowed to fail, so the existence
6524 of the pattern indicates support is present. Also require that an
6525 atomic load exists for the required size. */
6526 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6527 return boolean_true_node;
6528 else
6529 return boolean_false_node;
6532 /* Return true if the parameters to call EXP represent an object which will
6533 always generate lock free instructions. The first argument represents the
6534 size of the object, and the second parameter is a pointer to the object
6535 itself. If NULL is passed for the object, then the result is based on
6536 typical alignment for an object of the specified size. Otherwise return
6537 false. */
6539 static rtx
6540 expand_builtin_atomic_always_lock_free (tree exp)
6542 tree size;
6543 tree arg0 = CALL_EXPR_ARG (exp, 0);
6544 tree arg1 = CALL_EXPR_ARG (exp, 1);
6546 if (TREE_CODE (arg0) != INTEGER_CST)
6548 error ("non-constant argument 1 to __atomic_always_lock_free");
6549 return const0_rtx;
6552 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6553 if (size == boolean_true_node)
6554 return const1_rtx;
6555 return const0_rtx;
6558 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6559 is lock free on this architecture. */
6561 static tree
6562 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6564 if (!flag_inline_atomics)
6565 return NULL_TREE;
6567 /* If it isn't always lock free, don't generate a result. */
6568 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6569 return boolean_true_node;
6571 return NULL_TREE;
6574 /* Return true if the parameters to call EXP represent an object which will
6575 always generate lock free instructions. The first argument represents the
6576 size of the object, and the second parameter is a pointer to the object
6577 itself. If NULL is passed for the object, then the result is based on
6578 typical alignment for an object of the specified size. Otherwise return
6579 NULL*/
6581 static rtx
6582 expand_builtin_atomic_is_lock_free (tree exp)
6584 tree size;
6585 tree arg0 = CALL_EXPR_ARG (exp, 0);
6586 tree arg1 = CALL_EXPR_ARG (exp, 1);
6588 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6590 error ("non-integer argument 1 to __atomic_is_lock_free");
6591 return NULL_RTX;
6594 if (!flag_inline_atomics)
6595 return NULL_RTX;
6597 /* If the value is known at compile time, return the RTX for it. */
6598 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6599 if (size == boolean_true_node)
6600 return const1_rtx;
6602 return NULL_RTX;
6605 /* Expand the __atomic_thread_fence intrinsic:
6606 void __atomic_thread_fence (enum memmodel)
6607 EXP is the CALL_EXPR. */
6609 static void
6610 expand_builtin_atomic_thread_fence (tree exp)
6612 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6613 expand_mem_thread_fence (model);
6616 /* Expand the __atomic_signal_fence intrinsic:
6617 void __atomic_signal_fence (enum memmodel)
6618 EXP is the CALL_EXPR. */
6620 static void
6621 expand_builtin_atomic_signal_fence (tree exp)
6623 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6624 expand_mem_signal_fence (model);
6627 /* Expand the __sync_synchronize intrinsic. */
6629 static void
6630 expand_builtin_sync_synchronize (void)
6632 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6635 static rtx
6636 expand_builtin_thread_pointer (tree exp, rtx target)
6638 enum insn_code icode;
6639 if (!validate_arglist (exp, VOID_TYPE))
6640 return const0_rtx;
6641 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6642 if (icode != CODE_FOR_nothing)
6644 struct expand_operand op;
6645 /* If the target is not sutitable then create a new target. */
6646 if (target == NULL_RTX
6647 || !REG_P (target)
6648 || GET_MODE (target) != Pmode)
6649 target = gen_reg_rtx (Pmode);
6650 create_output_operand (&op, target, Pmode);
6651 expand_insn (icode, 1, &op);
6652 return target;
6654 error ("__builtin_thread_pointer is not supported on this target");
6655 return const0_rtx;
6658 static void
6659 expand_builtin_set_thread_pointer (tree exp)
6661 enum insn_code icode;
6662 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6663 return;
6664 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6665 if (icode != CODE_FOR_nothing)
6667 struct expand_operand op;
6668 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6669 Pmode, EXPAND_NORMAL);
6670 create_input_operand (&op, val, Pmode);
6671 expand_insn (icode, 1, &op);
6672 return;
6674 error ("__builtin_set_thread_pointer is not supported on this target");
6678 /* Emit code to restore the current value of stack. */
6680 static void
6681 expand_stack_restore (tree var)
6683 rtx_insn *prev;
6684 rtx sa = expand_normal (var);
6686 sa = convert_memory_address (Pmode, sa);
6688 prev = get_last_insn ();
6689 emit_stack_restore (SAVE_BLOCK, sa);
6691 record_new_stack_level ();
6693 fixup_args_size_notes (prev, get_last_insn (), 0);
6696 /* Emit code to save the current value of stack. */
6698 static rtx
6699 expand_stack_save (void)
6701 rtx ret = NULL_RTX;
6703 emit_stack_save (SAVE_BLOCK, &ret);
6704 return ret;
6707 /* Emit code to get the openacc gang, worker or vector id or size. */
6709 static rtx
6710 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
6712 const char *name;
6713 rtx fallback_retval;
6714 rtx_insn *(*gen_fn) (rtx, rtx);
6715 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
6717 case BUILT_IN_GOACC_PARLEVEL_ID:
6718 name = "__builtin_goacc_parlevel_id";
6719 fallback_retval = const0_rtx;
6720 gen_fn = targetm.gen_oacc_dim_pos;
6721 break;
6722 case BUILT_IN_GOACC_PARLEVEL_SIZE:
6723 name = "__builtin_goacc_parlevel_size";
6724 fallback_retval = const1_rtx;
6725 gen_fn = targetm.gen_oacc_dim_size;
6726 break;
6727 default:
6728 gcc_unreachable ();
6731 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
6733 error ("%qs only supported in OpenACC code", name);
6734 return const0_rtx;
6737 tree arg = CALL_EXPR_ARG (exp, 0);
6738 if (TREE_CODE (arg) != INTEGER_CST)
6740 error ("non-constant argument 0 to %qs", name);
6741 return const0_rtx;
6744 int dim = TREE_INT_CST_LOW (arg);
6745 switch (dim)
6747 case GOMP_DIM_GANG:
6748 case GOMP_DIM_WORKER:
6749 case GOMP_DIM_VECTOR:
6750 break;
6751 default:
6752 error ("illegal argument 0 to %qs", name);
6753 return const0_rtx;
6756 if (ignore)
6757 return target;
6759 if (target == NULL_RTX)
6760 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6762 if (!targetm.have_oacc_dim_size ())
6764 emit_move_insn (target, fallback_retval);
6765 return target;
6768 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
6769 emit_insn (gen_fn (reg, GEN_INT (dim)));
6770 if (reg != target)
6771 emit_move_insn (target, reg);
6773 return target;
6776 /* Expand a string compare operation using a sequence of char comparison
6777 to get rid of the calling overhead, with result going to TARGET if
6778 that's convenient.
6780 VAR_STR is the variable string source;
6781 CONST_STR is the constant string source;
6782 LENGTH is the number of chars to compare;
6783 CONST_STR_N indicates which source string is the constant string;
6784 IS_MEMCMP indicates whether it's a memcmp or strcmp.
6786 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
6788 target = (int) (unsigned char) var_str[0]
6789 - (int) (unsigned char) const_str[0];
6790 if (target != 0)
6791 goto ne_label;
6793 target = (int) (unsigned char) var_str[length - 2]
6794 - (int) (unsigned char) const_str[length - 2];
6795 if (target != 0)
6796 goto ne_label;
6797 target = (int) (unsigned char) var_str[length - 1]
6798 - (int) (unsigned char) const_str[length - 1];
6799 ne_label:
6802 static rtx
6803 inline_string_cmp (rtx target, tree var_str, const char *const_str,
6804 unsigned HOST_WIDE_INT length,
6805 int const_str_n, machine_mode mode)
6807 HOST_WIDE_INT offset = 0;
6808 rtx var_rtx_array
6809 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
6810 rtx var_rtx = NULL_RTX;
6811 rtx const_rtx = NULL_RTX;
6812 rtx result = target ? target : gen_reg_rtx (mode);
6813 rtx_code_label *ne_label = gen_label_rtx ();
6814 tree unit_type_node = unsigned_char_type_node;
6815 scalar_int_mode unit_mode
6816 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
6818 start_sequence ();
6820 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
6822 var_rtx
6823 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
6824 const_rtx = c_readstr (const_str + offset, unit_mode);
6825 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
6826 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
6828 op0 = convert_modes (mode, unit_mode, op0, 1);
6829 op1 = convert_modes (mode, unit_mode, op1, 1);
6830 result = expand_simple_binop (mode, MINUS, op0, op1,
6831 result, 1, OPTAB_WIDEN);
6832 if (i < length - 1)
6833 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
6834 mode, true, ne_label);
6835 offset += GET_MODE_SIZE (unit_mode);
6838 emit_label (ne_label);
6839 rtx_insn *insns = get_insns ();
6840 end_sequence ();
6841 emit_insn (insns);
6843 return result;
6846 /* Inline expansion a call to str(n)cmp, with result going to
6847 TARGET if that's convenient.
6848 If the call is not been inlined, return NULL_RTX. */
6849 static rtx
6850 inline_expand_builtin_string_cmp (tree exp, rtx target)
6852 tree fndecl = get_callee_fndecl (exp);
6853 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6854 unsigned HOST_WIDE_INT length = 0;
6855 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
6857 /* Do NOT apply this inlining expansion when optimizing for size or
6858 optimization level below 2. */
6859 if (optimize < 2 || optimize_insn_for_size_p ())
6860 return NULL_RTX;
6862 gcc_checking_assert (fcode == BUILT_IN_STRCMP
6863 || fcode == BUILT_IN_STRNCMP
6864 || fcode == BUILT_IN_MEMCMP);
6866 /* On a target where the type of the call (int) has same or narrower presicion
6867 than unsigned char, give up the inlining expansion. */
6868 if (TYPE_PRECISION (unsigned_char_type_node)
6869 >= TYPE_PRECISION (TREE_TYPE (exp)))
6870 return NULL_RTX;
6872 tree arg1 = CALL_EXPR_ARG (exp, 0);
6873 tree arg2 = CALL_EXPR_ARG (exp, 1);
6874 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
6876 unsigned HOST_WIDE_INT len1 = 0;
6877 unsigned HOST_WIDE_INT len2 = 0;
6878 unsigned HOST_WIDE_INT len3 = 0;
6880 const char *src_str1 = c_getstr (arg1, &len1);
6881 const char *src_str2 = c_getstr (arg2, &len2);
6883 /* If neither strings is constant string, the call is not qualify. */
6884 if (!src_str1 && !src_str2)
6885 return NULL_RTX;
6887 /* For strncmp, if the length is not a const, not qualify. */
6888 if (is_ncmp && !tree_fits_uhwi_p (len3_tree))
6889 return NULL_RTX;
6891 int const_str_n = 0;
6892 if (!len1)
6893 const_str_n = 2;
6894 else if (!len2)
6895 const_str_n = 1;
6896 else if (len2 > len1)
6897 const_str_n = 1;
6898 else
6899 const_str_n = 2;
6901 gcc_checking_assert (const_str_n > 0);
6902 length = (const_str_n == 1) ? len1 : len2;
6904 if (is_ncmp && (len3 = tree_to_uhwi (len3_tree)) < length)
6905 length = len3;
6907 /* If the length of the comparision is larger than the threshold,
6908 do nothing. */
6909 if (length > (unsigned HOST_WIDE_INT)
6910 PARAM_VALUE (BUILTIN_STRING_CMP_INLINE_LENGTH))
6911 return NULL_RTX;
6913 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6915 /* Now, start inline expansion the call. */
6916 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
6917 (const_str_n == 1) ? src_str1 : src_str2, length,
6918 const_str_n, mode);
6921 /* Expand an expression EXP that calls a built-in function,
6922 with result going to TARGET if that's convenient
6923 (and in mode MODE if that's convenient).
6924 SUBTARGET may be used as the target for computing one of EXP's operands.
6925 IGNORE is nonzero if the value is to be ignored. */
6928 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
6929 int ignore)
6931 tree fndecl = get_callee_fndecl (exp);
6932 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6933 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6934 int flags;
6936 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6937 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6939 /* When ASan is enabled, we don't want to expand some memory/string
6940 builtins and rely on libsanitizer's hooks. This allows us to avoid
6941 redundant checks and be sure, that possible overflow will be detected
6942 by ASan. */
6944 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
6945 return expand_call (exp, target, ignore);
6947 /* When not optimizing, generate calls to library functions for a certain
6948 set of builtins. */
6949 if (!optimize
6950 && !called_as_built_in (fndecl)
6951 && fcode != BUILT_IN_FORK
6952 && fcode != BUILT_IN_EXECL
6953 && fcode != BUILT_IN_EXECV
6954 && fcode != BUILT_IN_EXECLP
6955 && fcode != BUILT_IN_EXECLE
6956 && fcode != BUILT_IN_EXECVP
6957 && fcode != BUILT_IN_EXECVE
6958 && !ALLOCA_FUNCTION_CODE_P (fcode)
6959 && fcode != BUILT_IN_FREE)
6960 return expand_call (exp, target, ignore);
6962 /* The built-in function expanders test for target == const0_rtx
6963 to determine whether the function's result will be ignored. */
6964 if (ignore)
6965 target = const0_rtx;
6967 /* If the result of a pure or const built-in function is ignored, and
6968 none of its arguments are volatile, we can avoid expanding the
6969 built-in call and just evaluate the arguments for side-effects. */
6970 if (target == const0_rtx
6971 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6972 && !(flags & ECF_LOOPING_CONST_OR_PURE))
6974 bool volatilep = false;
6975 tree arg;
6976 call_expr_arg_iterator iter;
6978 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6979 if (TREE_THIS_VOLATILE (arg))
6981 volatilep = true;
6982 break;
6985 if (! volatilep)
6987 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6988 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6989 return const0_rtx;
6993 switch (fcode)
6995 CASE_FLT_FN (BUILT_IN_FABS):
6996 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
6997 case BUILT_IN_FABSD32:
6998 case BUILT_IN_FABSD64:
6999 case BUILT_IN_FABSD128:
7000 target = expand_builtin_fabs (exp, target, subtarget);
7001 if (target)
7002 return target;
7003 break;
7005 CASE_FLT_FN (BUILT_IN_COPYSIGN):
7006 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
7007 target = expand_builtin_copysign (exp, target, subtarget);
7008 if (target)
7009 return target;
7010 break;
7012 /* Just do a normal library call if we were unable to fold
7013 the values. */
7014 CASE_FLT_FN (BUILT_IN_CABS):
7015 break;
7017 CASE_FLT_FN (BUILT_IN_FMA):
7018 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7019 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7020 if (target)
7021 return target;
7022 break;
7024 CASE_FLT_FN (BUILT_IN_ILOGB):
7025 if (! flag_unsafe_math_optimizations)
7026 break;
7027 gcc_fallthrough ();
7028 CASE_FLT_FN (BUILT_IN_ISINF):
7029 CASE_FLT_FN (BUILT_IN_FINITE):
7030 case BUILT_IN_ISFINITE:
7031 case BUILT_IN_ISNORMAL:
7032 target = expand_builtin_interclass_mathfn (exp, target);
7033 if (target)
7034 return target;
7035 break;
7037 CASE_FLT_FN (BUILT_IN_ICEIL):
7038 CASE_FLT_FN (BUILT_IN_LCEIL):
7039 CASE_FLT_FN (BUILT_IN_LLCEIL):
7040 CASE_FLT_FN (BUILT_IN_LFLOOR):
7041 CASE_FLT_FN (BUILT_IN_IFLOOR):
7042 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7043 target = expand_builtin_int_roundingfn (exp, target);
7044 if (target)
7045 return target;
7046 break;
7048 CASE_FLT_FN (BUILT_IN_IRINT):
7049 CASE_FLT_FN (BUILT_IN_LRINT):
7050 CASE_FLT_FN (BUILT_IN_LLRINT):
7051 CASE_FLT_FN (BUILT_IN_IROUND):
7052 CASE_FLT_FN (BUILT_IN_LROUND):
7053 CASE_FLT_FN (BUILT_IN_LLROUND):
7054 target = expand_builtin_int_roundingfn_2 (exp, target);
7055 if (target)
7056 return target;
7057 break;
7059 CASE_FLT_FN (BUILT_IN_POWI):
7060 target = expand_builtin_powi (exp, target);
7061 if (target)
7062 return target;
7063 break;
7065 CASE_FLT_FN (BUILT_IN_CEXPI):
7066 target = expand_builtin_cexpi (exp, target);
7067 gcc_assert (target);
7068 return target;
7070 CASE_FLT_FN (BUILT_IN_SIN):
7071 CASE_FLT_FN (BUILT_IN_COS):
7072 if (! flag_unsafe_math_optimizations)
7073 break;
7074 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7075 if (target)
7076 return target;
7077 break;
7079 CASE_FLT_FN (BUILT_IN_SINCOS):
7080 if (! flag_unsafe_math_optimizations)
7081 break;
7082 target = expand_builtin_sincos (exp);
7083 if (target)
7084 return target;
7085 break;
7087 case BUILT_IN_APPLY_ARGS:
7088 return expand_builtin_apply_args ();
7090 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7091 FUNCTION with a copy of the parameters described by
7092 ARGUMENTS, and ARGSIZE. It returns a block of memory
7093 allocated on the stack into which is stored all the registers
7094 that might possibly be used for returning the result of a
7095 function. ARGUMENTS is the value returned by
7096 __builtin_apply_args. ARGSIZE is the number of bytes of
7097 arguments that must be copied. ??? How should this value be
7098 computed? We'll also need a safe worst case value for varargs
7099 functions. */
7100 case BUILT_IN_APPLY:
7101 if (!validate_arglist (exp, POINTER_TYPE,
7102 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7103 && !validate_arglist (exp, REFERENCE_TYPE,
7104 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7105 return const0_rtx;
7106 else
7108 rtx ops[3];
7110 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7111 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7112 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
7114 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7117 /* __builtin_return (RESULT) causes the function to return the
7118 value described by RESULT. RESULT is address of the block of
7119 memory returned by __builtin_apply. */
7120 case BUILT_IN_RETURN:
7121 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7122 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
7123 return const0_rtx;
7125 case BUILT_IN_SAVEREGS:
7126 return expand_builtin_saveregs ();
7128 case BUILT_IN_VA_ARG_PACK:
7129 /* All valid uses of __builtin_va_arg_pack () are removed during
7130 inlining. */
7131 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
7132 return const0_rtx;
7134 case BUILT_IN_VA_ARG_PACK_LEN:
7135 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7136 inlining. */
7137 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
7138 return const0_rtx;
7140 /* Return the address of the first anonymous stack arg. */
7141 case BUILT_IN_NEXT_ARG:
7142 if (fold_builtin_next_arg (exp, false))
7143 return const0_rtx;
7144 return expand_builtin_next_arg ();
7146 case BUILT_IN_CLEAR_CACHE:
7147 target = expand_builtin___clear_cache (exp);
7148 if (target)
7149 return target;
7150 break;
7152 case BUILT_IN_CLASSIFY_TYPE:
7153 return expand_builtin_classify_type (exp);
7155 case BUILT_IN_CONSTANT_P:
7156 return const0_rtx;
7158 case BUILT_IN_FRAME_ADDRESS:
7159 case BUILT_IN_RETURN_ADDRESS:
7160 return expand_builtin_frame_address (fndecl, exp);
7162 /* Returns the address of the area where the structure is returned.
7163 0 otherwise. */
7164 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
7165 if (call_expr_nargs (exp) != 0
7166 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
7167 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
7168 return const0_rtx;
7169 else
7170 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7172 CASE_BUILT_IN_ALLOCA:
7173 target = expand_builtin_alloca (exp);
7174 if (target)
7175 return target;
7176 break;
7178 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7179 return expand_asan_emit_allocas_unpoison (exp);
7181 case BUILT_IN_STACK_SAVE:
7182 return expand_stack_save ();
7184 case BUILT_IN_STACK_RESTORE:
7185 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
7186 return const0_rtx;
7188 case BUILT_IN_BSWAP16:
7189 case BUILT_IN_BSWAP32:
7190 case BUILT_IN_BSWAP64:
7191 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
7192 if (target)
7193 return target;
7194 break;
7196 CASE_INT_FN (BUILT_IN_FFS):
7197 target = expand_builtin_unop (target_mode, exp, target,
7198 subtarget, ffs_optab);
7199 if (target)
7200 return target;
7201 break;
7203 CASE_INT_FN (BUILT_IN_CLZ):
7204 target = expand_builtin_unop (target_mode, exp, target,
7205 subtarget, clz_optab);
7206 if (target)
7207 return target;
7208 break;
7210 CASE_INT_FN (BUILT_IN_CTZ):
7211 target = expand_builtin_unop (target_mode, exp, target,
7212 subtarget, ctz_optab);
7213 if (target)
7214 return target;
7215 break;
7217 CASE_INT_FN (BUILT_IN_CLRSB):
7218 target = expand_builtin_unop (target_mode, exp, target,
7219 subtarget, clrsb_optab);
7220 if (target)
7221 return target;
7222 break;
7224 CASE_INT_FN (BUILT_IN_POPCOUNT):
7225 target = expand_builtin_unop (target_mode, exp, target,
7226 subtarget, popcount_optab);
7227 if (target)
7228 return target;
7229 break;
7231 CASE_INT_FN (BUILT_IN_PARITY):
7232 target = expand_builtin_unop (target_mode, exp, target,
7233 subtarget, parity_optab);
7234 if (target)
7235 return target;
7236 break;
7238 case BUILT_IN_STRLEN:
7239 target = expand_builtin_strlen (exp, target, target_mode);
7240 if (target)
7241 return target;
7242 break;
7244 case BUILT_IN_STRNLEN:
7245 target = expand_builtin_strnlen (exp, target, target_mode);
7246 if (target)
7247 return target;
7248 break;
7250 case BUILT_IN_STRCAT:
7251 target = expand_builtin_strcat (exp, target);
7252 if (target)
7253 return target;
7254 break;
7256 case BUILT_IN_STRCPY:
7257 target = expand_builtin_strcpy (exp, target);
7258 if (target)
7259 return target;
7260 break;
7262 case BUILT_IN_STRNCAT:
7263 target = expand_builtin_strncat (exp, target);
7264 if (target)
7265 return target;
7266 break;
7268 case BUILT_IN_STRNCPY:
7269 target = expand_builtin_strncpy (exp, target);
7270 if (target)
7271 return target;
7272 break;
7274 case BUILT_IN_STPCPY:
7275 target = expand_builtin_stpcpy (exp, target, mode);
7276 if (target)
7277 return target;
7278 break;
7280 case BUILT_IN_STPNCPY:
7281 target = expand_builtin_stpncpy (exp, target);
7282 if (target)
7283 return target;
7284 break;
7286 case BUILT_IN_MEMCHR:
7287 target = expand_builtin_memchr (exp, target);
7288 if (target)
7289 return target;
7290 break;
7292 case BUILT_IN_MEMCPY:
7293 target = expand_builtin_memcpy (exp, target);
7294 if (target)
7295 return target;
7296 break;
7298 case BUILT_IN_MEMMOVE:
7299 target = expand_builtin_memmove (exp, target);
7300 if (target)
7301 return target;
7302 break;
7304 case BUILT_IN_MEMPCPY:
7305 target = expand_builtin_mempcpy (exp, target);
7306 if (target)
7307 return target;
7308 break;
7310 case BUILT_IN_MEMSET:
7311 target = expand_builtin_memset (exp, target, mode);
7312 if (target)
7313 return target;
7314 break;
7316 case BUILT_IN_BZERO:
7317 target = expand_builtin_bzero (exp);
7318 if (target)
7319 return target;
7320 break;
7322 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7323 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
7324 when changing it to a strcmp call. */
7325 case BUILT_IN_STRCMP_EQ:
7326 target = expand_builtin_memcmp (exp, target, true);
7327 if (target)
7328 return target;
7330 /* Change this call back to a BUILT_IN_STRCMP. */
7331 TREE_OPERAND (exp, 1)
7332 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7334 /* Delete the last parameter. */
7335 unsigned int i;
7336 vec<tree, va_gc> *arg_vec;
7337 vec_alloc (arg_vec, 2);
7338 for (i = 0; i < 2; i++)
7339 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7340 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7341 /* FALLTHROUGH */
7343 case BUILT_IN_STRCMP:
7344 target = expand_builtin_strcmp (exp, target);
7345 if (target)
7346 return target;
7347 break;
7349 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7350 back to a BUILT_IN_STRNCMP. */
7351 case BUILT_IN_STRNCMP_EQ:
7352 target = expand_builtin_memcmp (exp, target, true);
7353 if (target)
7354 return target;
7356 /* Change it back to a BUILT_IN_STRNCMP. */
7357 TREE_OPERAND (exp, 1)
7358 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7359 /* FALLTHROUGH */
7361 case BUILT_IN_STRNCMP:
7362 target = expand_builtin_strncmp (exp, target, mode);
7363 if (target)
7364 return target;
7365 break;
7367 case BUILT_IN_BCMP:
7368 case BUILT_IN_MEMCMP:
7369 case BUILT_IN_MEMCMP_EQ:
7370 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7371 if (target)
7372 return target;
7373 if (fcode == BUILT_IN_MEMCMP_EQ)
7375 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7376 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7378 break;
7380 case BUILT_IN_SETJMP:
7381 /* This should have been lowered to the builtins below. */
7382 gcc_unreachable ();
7384 case BUILT_IN_SETJMP_SETUP:
7385 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7386 and the receiver label. */
7387 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7389 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7390 VOIDmode, EXPAND_NORMAL);
7391 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7392 rtx_insn *label_r = label_rtx (label);
7394 /* This is copied from the handling of non-local gotos. */
7395 expand_builtin_setjmp_setup (buf_addr, label_r);
7396 nonlocal_goto_handler_labels
7397 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7398 nonlocal_goto_handler_labels);
7399 /* ??? Do not let expand_label treat us as such since we would
7400 not want to be both on the list of non-local labels and on
7401 the list of forced labels. */
7402 FORCED_LABEL (label) = 0;
7403 return const0_rtx;
7405 break;
7407 case BUILT_IN_SETJMP_RECEIVER:
7408 /* __builtin_setjmp_receiver is passed the receiver label. */
7409 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7411 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7412 rtx_insn *label_r = label_rtx (label);
7414 expand_builtin_setjmp_receiver (label_r);
7415 return const0_rtx;
7417 break;
7419 /* __builtin_longjmp is passed a pointer to an array of five words.
7420 It's similar to the C library longjmp function but works with
7421 __builtin_setjmp above. */
7422 case BUILT_IN_LONGJMP:
7423 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7425 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7426 VOIDmode, EXPAND_NORMAL);
7427 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7429 if (value != const1_rtx)
7431 error ("%<__builtin_longjmp%> second argument must be 1");
7432 return const0_rtx;
7435 expand_builtin_longjmp (buf_addr, value);
7436 return const0_rtx;
7438 break;
7440 case BUILT_IN_NONLOCAL_GOTO:
7441 target = expand_builtin_nonlocal_goto (exp);
7442 if (target)
7443 return target;
7444 break;
7446 /* This updates the setjmp buffer that is its argument with the value
7447 of the current stack pointer. */
7448 case BUILT_IN_UPDATE_SETJMP_BUF:
7449 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7451 rtx buf_addr
7452 = expand_normal (CALL_EXPR_ARG (exp, 0));
7454 expand_builtin_update_setjmp_buf (buf_addr);
7455 return const0_rtx;
7457 break;
7459 case BUILT_IN_TRAP:
7460 expand_builtin_trap ();
7461 return const0_rtx;
7463 case BUILT_IN_UNREACHABLE:
7464 expand_builtin_unreachable ();
7465 return const0_rtx;
7467 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7468 case BUILT_IN_SIGNBITD32:
7469 case BUILT_IN_SIGNBITD64:
7470 case BUILT_IN_SIGNBITD128:
7471 target = expand_builtin_signbit (exp, target);
7472 if (target)
7473 return target;
7474 break;
7476 /* Various hooks for the DWARF 2 __throw routine. */
7477 case BUILT_IN_UNWIND_INIT:
7478 expand_builtin_unwind_init ();
7479 return const0_rtx;
7480 case BUILT_IN_DWARF_CFA:
7481 return virtual_cfa_rtx;
7482 #ifdef DWARF2_UNWIND_INFO
7483 case BUILT_IN_DWARF_SP_COLUMN:
7484 return expand_builtin_dwarf_sp_column ();
7485 case BUILT_IN_INIT_DWARF_REG_SIZES:
7486 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7487 return const0_rtx;
7488 #endif
7489 case BUILT_IN_FROB_RETURN_ADDR:
7490 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7491 case BUILT_IN_EXTRACT_RETURN_ADDR:
7492 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7493 case BUILT_IN_EH_RETURN:
7494 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7495 CALL_EXPR_ARG (exp, 1));
7496 return const0_rtx;
7497 case BUILT_IN_EH_RETURN_DATA_REGNO:
7498 return expand_builtin_eh_return_data_regno (exp);
7499 case BUILT_IN_EXTEND_POINTER:
7500 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7501 case BUILT_IN_EH_POINTER:
7502 return expand_builtin_eh_pointer (exp);
7503 case BUILT_IN_EH_FILTER:
7504 return expand_builtin_eh_filter (exp);
7505 case BUILT_IN_EH_COPY_VALUES:
7506 return expand_builtin_eh_copy_values (exp);
7508 case BUILT_IN_VA_START:
7509 return expand_builtin_va_start (exp);
7510 case BUILT_IN_VA_END:
7511 return expand_builtin_va_end (exp);
7512 case BUILT_IN_VA_COPY:
7513 return expand_builtin_va_copy (exp);
7514 case BUILT_IN_EXPECT:
7515 return expand_builtin_expect (exp, target);
7516 case BUILT_IN_ASSUME_ALIGNED:
7517 return expand_builtin_assume_aligned (exp, target);
7518 case BUILT_IN_PREFETCH:
7519 expand_builtin_prefetch (exp);
7520 return const0_rtx;
7522 case BUILT_IN_INIT_TRAMPOLINE:
7523 return expand_builtin_init_trampoline (exp, true);
7524 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7525 return expand_builtin_init_trampoline (exp, false);
7526 case BUILT_IN_ADJUST_TRAMPOLINE:
7527 return expand_builtin_adjust_trampoline (exp);
7529 case BUILT_IN_INIT_DESCRIPTOR:
7530 return expand_builtin_init_descriptor (exp);
7531 case BUILT_IN_ADJUST_DESCRIPTOR:
7532 return expand_builtin_adjust_descriptor (exp);
7534 case BUILT_IN_FORK:
7535 case BUILT_IN_EXECL:
7536 case BUILT_IN_EXECV:
7537 case BUILT_IN_EXECLP:
7538 case BUILT_IN_EXECLE:
7539 case BUILT_IN_EXECVP:
7540 case BUILT_IN_EXECVE:
7541 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7542 if (target)
7543 return target;
7544 break;
7546 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7547 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7548 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7549 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7550 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7551 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7552 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7553 if (target)
7554 return target;
7555 break;
7557 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7558 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7559 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7560 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7561 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7562 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7563 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7564 if (target)
7565 return target;
7566 break;
7568 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7569 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7570 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7571 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7572 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7573 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7574 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7575 if (target)
7576 return target;
7577 break;
7579 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7580 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7581 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7582 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7583 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7584 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7585 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7586 if (target)
7587 return target;
7588 break;
7590 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7591 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7592 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7593 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7594 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7595 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7596 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7597 if (target)
7598 return target;
7599 break;
7601 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7602 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7603 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7604 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7605 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7606 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7607 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7608 if (target)
7609 return target;
7610 break;
7612 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7613 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7614 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7615 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7616 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7617 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7618 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7619 if (target)
7620 return target;
7621 break;
7623 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7624 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7625 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7626 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7627 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7628 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7629 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7630 if (target)
7631 return target;
7632 break;
7634 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7635 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7636 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7637 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7638 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7639 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7640 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7641 if (target)
7642 return target;
7643 break;
7645 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7646 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7647 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7648 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7649 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7650 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7651 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7652 if (target)
7653 return target;
7654 break;
7656 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7657 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7658 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7659 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7660 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7661 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7662 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7663 if (target)
7664 return target;
7665 break;
7667 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7668 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7669 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7670 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7671 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7672 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7673 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7674 if (target)
7675 return target;
7676 break;
7678 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7679 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7680 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7681 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7682 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7683 if (mode == VOIDmode)
7684 mode = TYPE_MODE (boolean_type_node);
7685 if (!target || !register_operand (target, mode))
7686 target = gen_reg_rtx (mode);
7688 mode = get_builtin_sync_mode
7689 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7690 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7691 if (target)
7692 return target;
7693 break;
7695 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7696 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7697 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7698 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7699 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7700 mode = get_builtin_sync_mode
7701 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7702 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7703 if (target)
7704 return target;
7705 break;
7707 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7708 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7709 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7710 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7711 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7712 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7713 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7714 if (target)
7715 return target;
7716 break;
7718 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7719 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7720 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7721 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7722 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7723 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7724 expand_builtin_sync_lock_release (mode, exp);
7725 return const0_rtx;
7727 case BUILT_IN_SYNC_SYNCHRONIZE:
7728 expand_builtin_sync_synchronize ();
7729 return const0_rtx;
7731 case BUILT_IN_ATOMIC_EXCHANGE_1:
7732 case BUILT_IN_ATOMIC_EXCHANGE_2:
7733 case BUILT_IN_ATOMIC_EXCHANGE_4:
7734 case BUILT_IN_ATOMIC_EXCHANGE_8:
7735 case BUILT_IN_ATOMIC_EXCHANGE_16:
7736 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7737 target = expand_builtin_atomic_exchange (mode, exp, target);
7738 if (target)
7739 return target;
7740 break;
7742 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7743 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7744 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7745 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7746 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7748 unsigned int nargs, z;
7749 vec<tree, va_gc> *vec;
7751 mode =
7752 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7753 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7754 if (target)
7755 return target;
7757 /* If this is turned into an external library call, the weak parameter
7758 must be dropped to match the expected parameter list. */
7759 nargs = call_expr_nargs (exp);
7760 vec_alloc (vec, nargs - 1);
7761 for (z = 0; z < 3; z++)
7762 vec->quick_push (CALL_EXPR_ARG (exp, z));
7763 /* Skip the boolean weak parameter. */
7764 for (z = 4; z < 6; z++)
7765 vec->quick_push (CALL_EXPR_ARG (exp, z));
7766 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7767 break;
7770 case BUILT_IN_ATOMIC_LOAD_1:
7771 case BUILT_IN_ATOMIC_LOAD_2:
7772 case BUILT_IN_ATOMIC_LOAD_4:
7773 case BUILT_IN_ATOMIC_LOAD_8:
7774 case BUILT_IN_ATOMIC_LOAD_16:
7775 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7776 target = expand_builtin_atomic_load (mode, exp, target);
7777 if (target)
7778 return target;
7779 break;
7781 case BUILT_IN_ATOMIC_STORE_1:
7782 case BUILT_IN_ATOMIC_STORE_2:
7783 case BUILT_IN_ATOMIC_STORE_4:
7784 case BUILT_IN_ATOMIC_STORE_8:
7785 case BUILT_IN_ATOMIC_STORE_16:
7786 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7787 target = expand_builtin_atomic_store (mode, exp);
7788 if (target)
7789 return const0_rtx;
7790 break;
7792 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7793 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7794 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7795 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7796 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7798 enum built_in_function lib;
7799 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7800 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7801 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7802 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7803 ignore, lib);
7804 if (target)
7805 return target;
7806 break;
7808 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7809 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7810 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7811 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7812 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7814 enum built_in_function lib;
7815 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7816 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7817 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7818 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7819 ignore, lib);
7820 if (target)
7821 return target;
7822 break;
7824 case BUILT_IN_ATOMIC_AND_FETCH_1:
7825 case BUILT_IN_ATOMIC_AND_FETCH_2:
7826 case BUILT_IN_ATOMIC_AND_FETCH_4:
7827 case BUILT_IN_ATOMIC_AND_FETCH_8:
7828 case BUILT_IN_ATOMIC_AND_FETCH_16:
7830 enum built_in_function lib;
7831 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7832 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7833 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7834 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7835 ignore, lib);
7836 if (target)
7837 return target;
7838 break;
7840 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7841 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7842 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7843 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7844 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7846 enum built_in_function lib;
7847 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7848 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7849 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7850 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7851 ignore, lib);
7852 if (target)
7853 return target;
7854 break;
7856 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7857 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7858 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7859 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7860 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7862 enum built_in_function lib;
7863 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7864 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7865 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7866 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7867 ignore, lib);
7868 if (target)
7869 return target;
7870 break;
7872 case BUILT_IN_ATOMIC_OR_FETCH_1:
7873 case BUILT_IN_ATOMIC_OR_FETCH_2:
7874 case BUILT_IN_ATOMIC_OR_FETCH_4:
7875 case BUILT_IN_ATOMIC_OR_FETCH_8:
7876 case BUILT_IN_ATOMIC_OR_FETCH_16:
7878 enum built_in_function lib;
7879 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7880 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7881 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7882 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7883 ignore, lib);
7884 if (target)
7885 return target;
7886 break;
7888 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7889 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7890 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7891 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7892 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7893 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7894 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7895 ignore, BUILT_IN_NONE);
7896 if (target)
7897 return target;
7898 break;
7900 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7901 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7902 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7903 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7904 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7905 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7906 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7907 ignore, BUILT_IN_NONE);
7908 if (target)
7909 return target;
7910 break;
7912 case BUILT_IN_ATOMIC_FETCH_AND_1:
7913 case BUILT_IN_ATOMIC_FETCH_AND_2:
7914 case BUILT_IN_ATOMIC_FETCH_AND_4:
7915 case BUILT_IN_ATOMIC_FETCH_AND_8:
7916 case BUILT_IN_ATOMIC_FETCH_AND_16:
7917 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7918 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7919 ignore, BUILT_IN_NONE);
7920 if (target)
7921 return target;
7922 break;
7924 case BUILT_IN_ATOMIC_FETCH_NAND_1:
7925 case BUILT_IN_ATOMIC_FETCH_NAND_2:
7926 case BUILT_IN_ATOMIC_FETCH_NAND_4:
7927 case BUILT_IN_ATOMIC_FETCH_NAND_8:
7928 case BUILT_IN_ATOMIC_FETCH_NAND_16:
7929 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
7930 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
7931 ignore, BUILT_IN_NONE);
7932 if (target)
7933 return target;
7934 break;
7936 case BUILT_IN_ATOMIC_FETCH_XOR_1:
7937 case BUILT_IN_ATOMIC_FETCH_XOR_2:
7938 case BUILT_IN_ATOMIC_FETCH_XOR_4:
7939 case BUILT_IN_ATOMIC_FETCH_XOR_8:
7940 case BUILT_IN_ATOMIC_FETCH_XOR_16:
7941 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
7942 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
7943 ignore, BUILT_IN_NONE);
7944 if (target)
7945 return target;
7946 break;
7948 case BUILT_IN_ATOMIC_FETCH_OR_1:
7949 case BUILT_IN_ATOMIC_FETCH_OR_2:
7950 case BUILT_IN_ATOMIC_FETCH_OR_4:
7951 case BUILT_IN_ATOMIC_FETCH_OR_8:
7952 case BUILT_IN_ATOMIC_FETCH_OR_16:
7953 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
7954 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
7955 ignore, BUILT_IN_NONE);
7956 if (target)
7957 return target;
7958 break;
7960 case BUILT_IN_ATOMIC_TEST_AND_SET:
7961 return expand_builtin_atomic_test_and_set (exp, target);
7963 case BUILT_IN_ATOMIC_CLEAR:
7964 return expand_builtin_atomic_clear (exp);
7966 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
7967 return expand_builtin_atomic_always_lock_free (exp);
7969 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
7970 target = expand_builtin_atomic_is_lock_free (exp);
7971 if (target)
7972 return target;
7973 break;
7975 case BUILT_IN_ATOMIC_THREAD_FENCE:
7976 expand_builtin_atomic_thread_fence (exp);
7977 return const0_rtx;
7979 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
7980 expand_builtin_atomic_signal_fence (exp);
7981 return const0_rtx;
7983 case BUILT_IN_OBJECT_SIZE:
7984 return expand_builtin_object_size (exp);
7986 case BUILT_IN_MEMCPY_CHK:
7987 case BUILT_IN_MEMPCPY_CHK:
7988 case BUILT_IN_MEMMOVE_CHK:
7989 case BUILT_IN_MEMSET_CHK:
7990 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7991 if (target)
7992 return target;
7993 break;
7995 case BUILT_IN_STRCPY_CHK:
7996 case BUILT_IN_STPCPY_CHK:
7997 case BUILT_IN_STRNCPY_CHK:
7998 case BUILT_IN_STPNCPY_CHK:
7999 case BUILT_IN_STRCAT_CHK:
8000 case BUILT_IN_STRNCAT_CHK:
8001 case BUILT_IN_SNPRINTF_CHK:
8002 case BUILT_IN_VSNPRINTF_CHK:
8003 maybe_emit_chk_warning (exp, fcode);
8004 break;
8006 case BUILT_IN_SPRINTF_CHK:
8007 case BUILT_IN_VSPRINTF_CHK:
8008 maybe_emit_sprintf_chk_warning (exp, fcode);
8009 break;
8011 case BUILT_IN_FREE:
8012 if (warn_free_nonheap_object)
8013 maybe_emit_free_warning (exp);
8014 break;
8016 case BUILT_IN_THREAD_POINTER:
8017 return expand_builtin_thread_pointer (exp, target);
8019 case BUILT_IN_SET_THREAD_POINTER:
8020 expand_builtin_set_thread_pointer (exp);
8021 return const0_rtx;
8023 case BUILT_IN_ACC_ON_DEVICE:
8024 /* Do library call, if we failed to expand the builtin when
8025 folding. */
8026 break;
8028 case BUILT_IN_GOACC_PARLEVEL_ID:
8029 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8030 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8032 default: /* just do library call, if unknown builtin */
8033 break;
8036 /* The switch statement above can drop through to cause the function
8037 to be called normally. */
8038 return expand_call (exp, target, ignore);
8041 /* Determine whether a tree node represents a call to a built-in
8042 function. If the tree T is a call to a built-in function with
8043 the right number of arguments of the appropriate types, return
8044 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8045 Otherwise the return value is END_BUILTINS. */
8047 enum built_in_function
8048 builtin_mathfn_code (const_tree t)
8050 const_tree fndecl, arg, parmlist;
8051 const_tree argtype, parmtype;
8052 const_call_expr_arg_iterator iter;
8054 if (TREE_CODE (t) != CALL_EXPR)
8055 return END_BUILTINS;
8057 fndecl = get_callee_fndecl (t);
8058 if (fndecl == NULL_TREE
8059 || TREE_CODE (fndecl) != FUNCTION_DECL
8060 || ! DECL_BUILT_IN (fndecl)
8061 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8062 return END_BUILTINS;
8064 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8065 init_const_call_expr_arg_iterator (t, &iter);
8066 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
8068 /* If a function doesn't take a variable number of arguments,
8069 the last element in the list will have type `void'. */
8070 parmtype = TREE_VALUE (parmlist);
8071 if (VOID_TYPE_P (parmtype))
8073 if (more_const_call_expr_args_p (&iter))
8074 return END_BUILTINS;
8075 return DECL_FUNCTION_CODE (fndecl);
8078 if (! more_const_call_expr_args_p (&iter))
8079 return END_BUILTINS;
8081 arg = next_const_call_expr_arg (&iter);
8082 argtype = TREE_TYPE (arg);
8084 if (SCALAR_FLOAT_TYPE_P (parmtype))
8086 if (! SCALAR_FLOAT_TYPE_P (argtype))
8087 return END_BUILTINS;
8089 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8091 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8092 return END_BUILTINS;
8094 else if (POINTER_TYPE_P (parmtype))
8096 if (! POINTER_TYPE_P (argtype))
8097 return END_BUILTINS;
8099 else if (INTEGRAL_TYPE_P (parmtype))
8101 if (! INTEGRAL_TYPE_P (argtype))
8102 return END_BUILTINS;
8104 else
8105 return END_BUILTINS;
8108 /* Variable-length argument list. */
8109 return DECL_FUNCTION_CODE (fndecl);
8112 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8113 evaluate to a constant. */
8115 static tree
8116 fold_builtin_constant_p (tree arg)
8118 /* We return 1 for a numeric type that's known to be a constant
8119 value at compile-time or for an aggregate type that's a
8120 literal constant. */
8121 STRIP_NOPS (arg);
8123 /* If we know this is a constant, emit the constant of one. */
8124 if (CONSTANT_CLASS_P (arg)
8125 || (TREE_CODE (arg) == CONSTRUCTOR
8126 && TREE_CONSTANT (arg)))
8127 return integer_one_node;
8128 if (TREE_CODE (arg) == ADDR_EXPR)
8130 tree op = TREE_OPERAND (arg, 0);
8131 if (TREE_CODE (op) == STRING_CST
8132 || (TREE_CODE (op) == ARRAY_REF
8133 && integer_zerop (TREE_OPERAND (op, 1))
8134 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8135 return integer_one_node;
8138 /* If this expression has side effects, show we don't know it to be a
8139 constant. Likewise if it's a pointer or aggregate type since in
8140 those case we only want literals, since those are only optimized
8141 when generating RTL, not later.
8142 And finally, if we are compiling an initializer, not code, we
8143 need to return a definite result now; there's not going to be any
8144 more optimization done. */
8145 if (TREE_SIDE_EFFECTS (arg)
8146 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8147 || POINTER_TYPE_P (TREE_TYPE (arg))
8148 || cfun == 0
8149 || folding_initializer
8150 || force_folding_builtin_constant_p)
8151 return integer_zero_node;
8153 return NULL_TREE;
8156 /* Create builtin_expect with PRED and EXPECTED as its arguments and
8157 return it as a truthvalue. */
8159 static tree
8160 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
8161 tree predictor)
8163 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
8165 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
8166 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8167 ret_type = TREE_TYPE (TREE_TYPE (fn));
8168 pred_type = TREE_VALUE (arg_types);
8169 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8171 pred = fold_convert_loc (loc, pred_type, pred);
8172 expected = fold_convert_loc (loc, expected_type, expected);
8173 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8174 predictor);
8176 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8177 build_int_cst (ret_type, 0));
8180 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
8181 NULL_TREE if no simplification is possible. */
8183 tree
8184 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
8186 tree inner, fndecl, inner_arg0;
8187 enum tree_code code;
8189 /* Distribute the expected value over short-circuiting operators.
8190 See through the cast from truthvalue_type_node to long. */
8191 inner_arg0 = arg0;
8192 while (CONVERT_EXPR_P (inner_arg0)
8193 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8194 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8195 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8197 /* If this is a builtin_expect within a builtin_expect keep the
8198 inner one. See through a comparison against a constant. It
8199 might have been added to create a thruthvalue. */
8200 inner = inner_arg0;
8202 if (COMPARISON_CLASS_P (inner)
8203 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8204 inner = TREE_OPERAND (inner, 0);
8206 if (TREE_CODE (inner) == CALL_EXPR
8207 && (fndecl = get_callee_fndecl (inner))
8208 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
8209 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
8210 return arg0;
8212 inner = inner_arg0;
8213 code = TREE_CODE (inner);
8214 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8216 tree op0 = TREE_OPERAND (inner, 0);
8217 tree op1 = TREE_OPERAND (inner, 1);
8218 arg1 = save_expr (arg1);
8220 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
8221 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
8222 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8224 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
8227 /* If the argument isn't invariant then there's nothing else we can do. */
8228 if (!TREE_CONSTANT (inner_arg0))
8229 return NULL_TREE;
8231 /* If we expect that a comparison against the argument will fold to
8232 a constant return the constant. In practice, this means a true
8233 constant or the address of a non-weak symbol. */
8234 inner = inner_arg0;
8235 STRIP_NOPS (inner);
8236 if (TREE_CODE (inner) == ADDR_EXPR)
8240 inner = TREE_OPERAND (inner, 0);
8242 while (TREE_CODE (inner) == COMPONENT_REF
8243 || TREE_CODE (inner) == ARRAY_REF);
8244 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8245 return NULL_TREE;
8248 /* Otherwise, ARG0 already has the proper type for the return value. */
8249 return arg0;
8252 /* Fold a call to __builtin_classify_type with argument ARG. */
8254 static tree
8255 fold_builtin_classify_type (tree arg)
8257 if (arg == 0)
8258 return build_int_cst (integer_type_node, no_type_class);
8260 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8263 /* Fold a call to __builtin_strlen with argument ARG. */
8265 static tree
8266 fold_builtin_strlen (location_t loc, tree type, tree arg)
8268 if (!validate_arg (arg, POINTER_TYPE))
8269 return NULL_TREE;
8270 else
8272 tree len = c_strlen (arg, 0);
8274 if (len)
8275 return fold_convert_loc (loc, type, len);
8277 return NULL_TREE;
8281 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8283 static tree
8284 fold_builtin_inf (location_t loc, tree type, int warn)
8286 REAL_VALUE_TYPE real;
8288 /* __builtin_inff is intended to be usable to define INFINITY on all
8289 targets. If an infinity is not available, INFINITY expands "to a
8290 positive constant of type float that overflows at translation
8291 time", footnote "In this case, using INFINITY will violate the
8292 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8293 Thus we pedwarn to ensure this constraint violation is
8294 diagnosed. */
8295 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8296 pedwarn (loc, 0, "target format does not support infinity");
8298 real_inf (&real);
8299 return build_real (type, real);
8302 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8303 NULL_TREE if no simplification can be made. */
8305 static tree
8306 fold_builtin_sincos (location_t loc,
8307 tree arg0, tree arg1, tree arg2)
8309 tree type;
8310 tree fndecl, call = NULL_TREE;
8312 if (!validate_arg (arg0, REAL_TYPE)
8313 || !validate_arg (arg1, POINTER_TYPE)
8314 || !validate_arg (arg2, POINTER_TYPE))
8315 return NULL_TREE;
8317 type = TREE_TYPE (arg0);
8319 /* Calculate the result when the argument is a constant. */
8320 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8321 if (fn == END_BUILTINS)
8322 return NULL_TREE;
8324 /* Canonicalize sincos to cexpi. */
8325 if (TREE_CODE (arg0) == REAL_CST)
8327 tree complex_type = build_complex_type (type);
8328 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8330 if (!call)
8332 if (!targetm.libc_has_function (function_c99_math_complex)
8333 || !builtin_decl_implicit_p (fn))
8334 return NULL_TREE;
8335 fndecl = builtin_decl_explicit (fn);
8336 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8337 call = builtin_save_expr (call);
8340 tree ptype = build_pointer_type (type);
8341 arg1 = fold_convert (ptype, arg1);
8342 arg2 = fold_convert (ptype, arg2);
8343 return build2 (COMPOUND_EXPR, void_type_node,
8344 build2 (MODIFY_EXPR, void_type_node,
8345 build_fold_indirect_ref_loc (loc, arg1),
8346 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8347 build2 (MODIFY_EXPR, void_type_node,
8348 build_fold_indirect_ref_loc (loc, arg2),
8349 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8352 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8353 Return NULL_TREE if no simplification can be made. */
8355 static tree
8356 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8358 if (!validate_arg (arg1, POINTER_TYPE)
8359 || !validate_arg (arg2, POINTER_TYPE)
8360 || !validate_arg (len, INTEGER_TYPE))
8361 return NULL_TREE;
8363 /* If the LEN parameter is zero, return zero. */
8364 if (integer_zerop (len))
8365 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8366 arg1, arg2);
8368 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8369 if (operand_equal_p (arg1, arg2, 0))
8370 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8372 /* If len parameter is one, return an expression corresponding to
8373 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8374 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8376 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8377 tree cst_uchar_ptr_node
8378 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8380 tree ind1
8381 = fold_convert_loc (loc, integer_type_node,
8382 build1 (INDIRECT_REF, cst_uchar_node,
8383 fold_convert_loc (loc,
8384 cst_uchar_ptr_node,
8385 arg1)));
8386 tree ind2
8387 = fold_convert_loc (loc, integer_type_node,
8388 build1 (INDIRECT_REF, cst_uchar_node,
8389 fold_convert_loc (loc,
8390 cst_uchar_ptr_node,
8391 arg2)));
8392 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8395 return NULL_TREE;
8398 /* Fold a call to builtin isascii with argument ARG. */
8400 static tree
8401 fold_builtin_isascii (location_t loc, tree arg)
8403 if (!validate_arg (arg, INTEGER_TYPE))
8404 return NULL_TREE;
8405 else
8407 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8408 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8409 build_int_cst (integer_type_node,
8410 ~ (unsigned HOST_WIDE_INT) 0x7f));
8411 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8412 arg, integer_zero_node);
8416 /* Fold a call to builtin toascii with argument ARG. */
8418 static tree
8419 fold_builtin_toascii (location_t loc, tree arg)
8421 if (!validate_arg (arg, INTEGER_TYPE))
8422 return NULL_TREE;
8424 /* Transform toascii(c) -> (c & 0x7f). */
8425 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8426 build_int_cst (integer_type_node, 0x7f));
8429 /* Fold a call to builtin isdigit with argument ARG. */
8431 static tree
8432 fold_builtin_isdigit (location_t loc, tree arg)
8434 if (!validate_arg (arg, INTEGER_TYPE))
8435 return NULL_TREE;
8436 else
8438 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8439 /* According to the C standard, isdigit is unaffected by locale.
8440 However, it definitely is affected by the target character set. */
8441 unsigned HOST_WIDE_INT target_digit0
8442 = lang_hooks.to_target_charset ('0');
8444 if (target_digit0 == 0)
8445 return NULL_TREE;
8447 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8448 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8449 build_int_cst (unsigned_type_node, target_digit0));
8450 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8451 build_int_cst (unsigned_type_node, 9));
8455 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8457 static tree
8458 fold_builtin_fabs (location_t loc, tree arg, tree type)
8460 if (!validate_arg (arg, REAL_TYPE))
8461 return NULL_TREE;
8463 arg = fold_convert_loc (loc, type, arg);
8464 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8467 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8469 static tree
8470 fold_builtin_abs (location_t loc, tree arg, tree type)
8472 if (!validate_arg (arg, INTEGER_TYPE))
8473 return NULL_TREE;
8475 arg = fold_convert_loc (loc, type, arg);
8476 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8479 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8481 static tree
8482 fold_builtin_carg (location_t loc, tree arg, tree type)
8484 if (validate_arg (arg, COMPLEX_TYPE)
8485 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8487 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8489 if (atan2_fn)
8491 tree new_arg = builtin_save_expr (arg);
8492 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8493 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8494 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8498 return NULL_TREE;
8501 /* Fold a call to builtin frexp, we can assume the base is 2. */
8503 static tree
8504 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8506 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8507 return NULL_TREE;
8509 STRIP_NOPS (arg0);
8511 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8512 return NULL_TREE;
8514 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8516 /* Proceed if a valid pointer type was passed in. */
8517 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8519 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8520 tree frac, exp;
8522 switch (value->cl)
8524 case rvc_zero:
8525 /* For +-0, return (*exp = 0, +-0). */
8526 exp = integer_zero_node;
8527 frac = arg0;
8528 break;
8529 case rvc_nan:
8530 case rvc_inf:
8531 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8532 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8533 case rvc_normal:
8535 /* Since the frexp function always expects base 2, and in
8536 GCC normalized significands are already in the range
8537 [0.5, 1.0), we have exactly what frexp wants. */
8538 REAL_VALUE_TYPE frac_rvt = *value;
8539 SET_REAL_EXP (&frac_rvt, 0);
8540 frac = build_real (rettype, frac_rvt);
8541 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8543 break;
8544 default:
8545 gcc_unreachable ();
8548 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8549 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8550 TREE_SIDE_EFFECTS (arg1) = 1;
8551 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8554 return NULL_TREE;
8557 /* Fold a call to builtin modf. */
8559 static tree
8560 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8562 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8563 return NULL_TREE;
8565 STRIP_NOPS (arg0);
8567 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8568 return NULL_TREE;
8570 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8572 /* Proceed if a valid pointer type was passed in. */
8573 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8575 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8576 REAL_VALUE_TYPE trunc, frac;
8578 switch (value->cl)
8580 case rvc_nan:
8581 case rvc_zero:
8582 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8583 trunc = frac = *value;
8584 break;
8585 case rvc_inf:
8586 /* For +-Inf, return (*arg1 = arg0, +-0). */
8587 frac = dconst0;
8588 frac.sign = value->sign;
8589 trunc = *value;
8590 break;
8591 case rvc_normal:
8592 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8593 real_trunc (&trunc, VOIDmode, value);
8594 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8595 /* If the original number was negative and already
8596 integral, then the fractional part is -0.0. */
8597 if (value->sign && frac.cl == rvc_zero)
8598 frac.sign = value->sign;
8599 break;
8602 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8603 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8604 build_real (rettype, trunc));
8605 TREE_SIDE_EFFECTS (arg1) = 1;
8606 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8607 build_real (rettype, frac));
8610 return NULL_TREE;
8613 /* Given a location LOC, an interclass builtin function decl FNDECL
8614 and its single argument ARG, return an folded expression computing
8615 the same, or NULL_TREE if we either couldn't or didn't want to fold
8616 (the latter happen if there's an RTL instruction available). */
8618 static tree
8619 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8621 machine_mode mode;
8623 if (!validate_arg (arg, REAL_TYPE))
8624 return NULL_TREE;
8626 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8627 return NULL_TREE;
8629 mode = TYPE_MODE (TREE_TYPE (arg));
8631 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8633 /* If there is no optab, try generic code. */
8634 switch (DECL_FUNCTION_CODE (fndecl))
8636 tree result;
8638 CASE_FLT_FN (BUILT_IN_ISINF):
8640 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8641 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8642 tree type = TREE_TYPE (arg);
8643 REAL_VALUE_TYPE r;
8644 char buf[128];
8646 if (is_ibm_extended)
8648 /* NaN and Inf are encoded in the high-order double value
8649 only. The low-order value is not significant. */
8650 type = double_type_node;
8651 mode = DFmode;
8652 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8654 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8655 real_from_string (&r, buf);
8656 result = build_call_expr (isgr_fn, 2,
8657 fold_build1_loc (loc, ABS_EXPR, type, arg),
8658 build_real (type, r));
8659 return result;
8661 CASE_FLT_FN (BUILT_IN_FINITE):
8662 case BUILT_IN_ISFINITE:
8664 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8665 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8666 tree type = TREE_TYPE (arg);
8667 REAL_VALUE_TYPE r;
8668 char buf[128];
8670 if (is_ibm_extended)
8672 /* NaN and Inf are encoded in the high-order double value
8673 only. The low-order value is not significant. */
8674 type = double_type_node;
8675 mode = DFmode;
8676 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8678 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8679 real_from_string (&r, buf);
8680 result = build_call_expr (isle_fn, 2,
8681 fold_build1_loc (loc, ABS_EXPR, type, arg),
8682 build_real (type, r));
8683 /*result = fold_build2_loc (loc, UNGT_EXPR,
8684 TREE_TYPE (TREE_TYPE (fndecl)),
8685 fold_build1_loc (loc, ABS_EXPR, type, arg),
8686 build_real (type, r));
8687 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8688 TREE_TYPE (TREE_TYPE (fndecl)),
8689 result);*/
8690 return result;
8692 case BUILT_IN_ISNORMAL:
8694 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8695 islessequal(fabs(x),DBL_MAX). */
8696 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8697 tree type = TREE_TYPE (arg);
8698 tree orig_arg, max_exp, min_exp;
8699 machine_mode orig_mode = mode;
8700 REAL_VALUE_TYPE rmax, rmin;
8701 char buf[128];
8703 orig_arg = arg = builtin_save_expr (arg);
8704 if (is_ibm_extended)
8706 /* Use double to test the normal range of IBM extended
8707 precision. Emin for IBM extended precision is
8708 different to emin for IEEE double, being 53 higher
8709 since the low double exponent is at least 53 lower
8710 than the high double exponent. */
8711 type = double_type_node;
8712 mode = DFmode;
8713 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8715 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8717 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8718 real_from_string (&rmax, buf);
8719 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8720 real_from_string (&rmin, buf);
8721 max_exp = build_real (type, rmax);
8722 min_exp = build_real (type, rmin);
8724 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8725 if (is_ibm_extended)
8727 /* Testing the high end of the range is done just using
8728 the high double, using the same test as isfinite().
8729 For the subnormal end of the range we first test the
8730 high double, then if its magnitude is equal to the
8731 limit of 0x1p-969, we test whether the low double is
8732 non-zero and opposite sign to the high double. */
8733 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8734 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8735 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8736 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8737 arg, min_exp);
8738 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8739 complex_double_type_node, orig_arg);
8740 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8741 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8742 tree zero = build_real (type, dconst0);
8743 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8744 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8745 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8746 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8747 fold_build3 (COND_EXPR,
8748 integer_type_node,
8749 hilt, logt, lolt));
8750 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8751 eq_min, ok_lo);
8752 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8753 gt_min, eq_min);
8755 else
8757 tree const isge_fn
8758 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8759 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8761 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8762 max_exp, min_exp);
8763 return result;
8765 default:
8766 break;
8769 return NULL_TREE;
8772 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8773 ARG is the argument for the call. */
8775 static tree
8776 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8778 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8780 if (!validate_arg (arg, REAL_TYPE))
8781 return NULL_TREE;
8783 switch (builtin_index)
8785 case BUILT_IN_ISINF:
8786 if (!HONOR_INFINITIES (arg))
8787 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8789 return NULL_TREE;
8791 case BUILT_IN_ISINF_SIGN:
8793 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8794 /* In a boolean context, GCC will fold the inner COND_EXPR to
8795 1. So e.g. "if (isinf_sign(x))" would be folded to just
8796 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8797 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8798 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8799 tree tmp = NULL_TREE;
8801 arg = builtin_save_expr (arg);
8803 if (signbit_fn && isinf_fn)
8805 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8806 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8808 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8809 signbit_call, integer_zero_node);
8810 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8811 isinf_call, integer_zero_node);
8813 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8814 integer_minus_one_node, integer_one_node);
8815 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8816 isinf_call, tmp,
8817 integer_zero_node);
8820 return tmp;
8823 case BUILT_IN_ISFINITE:
8824 if (!HONOR_NANS (arg)
8825 && !HONOR_INFINITIES (arg))
8826 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8828 return NULL_TREE;
8830 case BUILT_IN_ISNAN:
8831 if (!HONOR_NANS (arg))
8832 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8835 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8836 if (is_ibm_extended)
8838 /* NaN and Inf are encoded in the high-order double value
8839 only. The low-order value is not significant. */
8840 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8843 arg = builtin_save_expr (arg);
8844 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8846 default:
8847 gcc_unreachable ();
8851 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8852 This builtin will generate code to return the appropriate floating
8853 point classification depending on the value of the floating point
8854 number passed in. The possible return values must be supplied as
8855 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8856 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8857 one floating point argument which is "type generic". */
8859 static tree
8860 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8862 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8863 arg, type, res, tmp;
8864 machine_mode mode;
8865 REAL_VALUE_TYPE r;
8866 char buf[128];
8868 /* Verify the required arguments in the original call. */
8869 if (nargs != 6
8870 || !validate_arg (args[0], INTEGER_TYPE)
8871 || !validate_arg (args[1], INTEGER_TYPE)
8872 || !validate_arg (args[2], INTEGER_TYPE)
8873 || !validate_arg (args[3], INTEGER_TYPE)
8874 || !validate_arg (args[4], INTEGER_TYPE)
8875 || !validate_arg (args[5], REAL_TYPE))
8876 return NULL_TREE;
8878 fp_nan = args[0];
8879 fp_infinite = args[1];
8880 fp_normal = args[2];
8881 fp_subnormal = args[3];
8882 fp_zero = args[4];
8883 arg = args[5];
8884 type = TREE_TYPE (arg);
8885 mode = TYPE_MODE (type);
8886 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8888 /* fpclassify(x) ->
8889 isnan(x) ? FP_NAN :
8890 (fabs(x) == Inf ? FP_INFINITE :
8891 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8892 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8894 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8895 build_real (type, dconst0));
8896 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8897 tmp, fp_zero, fp_subnormal);
8899 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8900 real_from_string (&r, buf);
8901 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8902 arg, build_real (type, r));
8903 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8905 if (HONOR_INFINITIES (mode))
8907 real_inf (&r);
8908 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8909 build_real (type, r));
8910 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8911 fp_infinite, res);
8914 if (HONOR_NANS (mode))
8916 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8917 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8920 return res;
8923 /* Fold a call to an unordered comparison function such as
8924 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8925 being called and ARG0 and ARG1 are the arguments for the call.
8926 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8927 the opposite of the desired result. UNORDERED_CODE is used
8928 for modes that can hold NaNs and ORDERED_CODE is used for
8929 the rest. */
8931 static tree
8932 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8933 enum tree_code unordered_code,
8934 enum tree_code ordered_code)
8936 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8937 enum tree_code code;
8938 tree type0, type1;
8939 enum tree_code code0, code1;
8940 tree cmp_type = NULL_TREE;
8942 type0 = TREE_TYPE (arg0);
8943 type1 = TREE_TYPE (arg1);
8945 code0 = TREE_CODE (type0);
8946 code1 = TREE_CODE (type1);
8948 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8949 /* Choose the wider of two real types. */
8950 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8951 ? type0 : type1;
8952 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8953 cmp_type = type0;
8954 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8955 cmp_type = type1;
8957 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8958 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8960 if (unordered_code == UNORDERED_EXPR)
8962 if (!HONOR_NANS (arg0))
8963 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8964 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8967 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8968 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8969 fold_build2_loc (loc, code, type, arg0, arg1));
8972 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8973 arithmetics if it can never overflow, or into internal functions that
8974 return both result of arithmetics and overflowed boolean flag in
8975 a complex integer result, or some other check for overflow.
8976 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8977 checking part of that. */
8979 static tree
8980 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8981 tree arg0, tree arg1, tree arg2)
8983 enum internal_fn ifn = IFN_LAST;
8984 /* The code of the expression corresponding to the type-generic
8985 built-in, or ERROR_MARK for the type-specific ones. */
8986 enum tree_code opcode = ERROR_MARK;
8987 bool ovf_only = false;
8989 switch (fcode)
8991 case BUILT_IN_ADD_OVERFLOW_P:
8992 ovf_only = true;
8993 /* FALLTHRU */
8994 case BUILT_IN_ADD_OVERFLOW:
8995 opcode = PLUS_EXPR;
8996 /* FALLTHRU */
8997 case BUILT_IN_SADD_OVERFLOW:
8998 case BUILT_IN_SADDL_OVERFLOW:
8999 case BUILT_IN_SADDLL_OVERFLOW:
9000 case BUILT_IN_UADD_OVERFLOW:
9001 case BUILT_IN_UADDL_OVERFLOW:
9002 case BUILT_IN_UADDLL_OVERFLOW:
9003 ifn = IFN_ADD_OVERFLOW;
9004 break;
9005 case BUILT_IN_SUB_OVERFLOW_P:
9006 ovf_only = true;
9007 /* FALLTHRU */
9008 case BUILT_IN_SUB_OVERFLOW:
9009 opcode = MINUS_EXPR;
9010 /* FALLTHRU */
9011 case BUILT_IN_SSUB_OVERFLOW:
9012 case BUILT_IN_SSUBL_OVERFLOW:
9013 case BUILT_IN_SSUBLL_OVERFLOW:
9014 case BUILT_IN_USUB_OVERFLOW:
9015 case BUILT_IN_USUBL_OVERFLOW:
9016 case BUILT_IN_USUBLL_OVERFLOW:
9017 ifn = IFN_SUB_OVERFLOW;
9018 break;
9019 case BUILT_IN_MUL_OVERFLOW_P:
9020 ovf_only = true;
9021 /* FALLTHRU */
9022 case BUILT_IN_MUL_OVERFLOW:
9023 opcode = MULT_EXPR;
9024 /* FALLTHRU */
9025 case BUILT_IN_SMUL_OVERFLOW:
9026 case BUILT_IN_SMULL_OVERFLOW:
9027 case BUILT_IN_SMULLL_OVERFLOW:
9028 case BUILT_IN_UMUL_OVERFLOW:
9029 case BUILT_IN_UMULL_OVERFLOW:
9030 case BUILT_IN_UMULLL_OVERFLOW:
9031 ifn = IFN_MUL_OVERFLOW;
9032 break;
9033 default:
9034 gcc_unreachable ();
9037 /* For the "generic" overloads, the first two arguments can have different
9038 types and the last argument determines the target type to use to check
9039 for overflow. The arguments of the other overloads all have the same
9040 type. */
9041 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9043 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9044 arguments are constant, attempt to fold the built-in call into a constant
9045 expression indicating whether or not it detected an overflow. */
9046 if (ovf_only
9047 && TREE_CODE (arg0) == INTEGER_CST
9048 && TREE_CODE (arg1) == INTEGER_CST)
9049 /* Perform the computation in the target type and check for overflow. */
9050 return omit_one_operand_loc (loc, boolean_type_node,
9051 arith_overflowed_p (opcode, type, arg0, arg1)
9052 ? boolean_true_node : boolean_false_node,
9053 arg2);
9055 tree ctype = build_complex_type (type);
9056 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9057 2, arg0, arg1);
9058 tree tgt = save_expr (call);
9059 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9060 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9061 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9063 if (ovf_only)
9064 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9066 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9067 tree store
9068 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9069 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9072 /* Fold a call to __builtin_FILE to a constant string. */
9074 static inline tree
9075 fold_builtin_FILE (location_t loc)
9077 if (const char *fname = LOCATION_FILE (loc))
9079 /* The documentation says this builtin is equivalent to the preprocessor
9080 __FILE__ macro so it appears appropriate to use the same file prefix
9081 mappings. */
9082 fname = remap_macro_filename (fname);
9083 return build_string_literal (strlen (fname) + 1, fname);
9086 return build_string_literal (1, "");
9089 /* Fold a call to __builtin_FUNCTION to a constant string. */
9091 static inline tree
9092 fold_builtin_FUNCTION ()
9094 const char *name = "";
9096 if (current_function_decl)
9097 name = lang_hooks.decl_printable_name (current_function_decl, 0);
9099 return build_string_literal (strlen (name) + 1, name);
9102 /* Fold a call to __builtin_LINE to an integer constant. */
9104 static inline tree
9105 fold_builtin_LINE (location_t loc, tree type)
9107 return build_int_cst (type, LOCATION_LINE (loc));
9110 /* Fold a call to built-in function FNDECL with 0 arguments.
9111 This function returns NULL_TREE if no simplification was possible. */
9113 static tree
9114 fold_builtin_0 (location_t loc, tree fndecl)
9116 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9117 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9118 switch (fcode)
9120 case BUILT_IN_FILE:
9121 return fold_builtin_FILE (loc);
9123 case BUILT_IN_FUNCTION:
9124 return fold_builtin_FUNCTION ();
9126 case BUILT_IN_LINE:
9127 return fold_builtin_LINE (loc, type);
9129 CASE_FLT_FN (BUILT_IN_INF):
9130 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
9131 case BUILT_IN_INFD32:
9132 case BUILT_IN_INFD64:
9133 case BUILT_IN_INFD128:
9134 return fold_builtin_inf (loc, type, true);
9136 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9137 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
9138 return fold_builtin_inf (loc, type, false);
9140 case BUILT_IN_CLASSIFY_TYPE:
9141 return fold_builtin_classify_type (NULL_TREE);
9143 default:
9144 break;
9146 return NULL_TREE;
9149 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9150 This function returns NULL_TREE if no simplification was possible. */
9152 static tree
9153 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9155 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9156 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9158 if (TREE_CODE (arg0) == ERROR_MARK)
9159 return NULL_TREE;
9161 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
9162 return ret;
9164 switch (fcode)
9166 case BUILT_IN_CONSTANT_P:
9168 tree val = fold_builtin_constant_p (arg0);
9170 /* Gimplification will pull the CALL_EXPR for the builtin out of
9171 an if condition. When not optimizing, we'll not CSE it back.
9172 To avoid link error types of regressions, return false now. */
9173 if (!val && !optimize)
9174 val = integer_zero_node;
9176 return val;
9179 case BUILT_IN_CLASSIFY_TYPE:
9180 return fold_builtin_classify_type (arg0);
9182 case BUILT_IN_STRLEN:
9183 return fold_builtin_strlen (loc, type, arg0);
9185 CASE_FLT_FN (BUILT_IN_FABS):
9186 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9187 case BUILT_IN_FABSD32:
9188 case BUILT_IN_FABSD64:
9189 case BUILT_IN_FABSD128:
9190 return fold_builtin_fabs (loc, arg0, type);
9192 case BUILT_IN_ABS:
9193 case BUILT_IN_LABS:
9194 case BUILT_IN_LLABS:
9195 case BUILT_IN_IMAXABS:
9196 return fold_builtin_abs (loc, arg0, type);
9198 CASE_FLT_FN (BUILT_IN_CONJ):
9199 if (validate_arg (arg0, COMPLEX_TYPE)
9200 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9201 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9202 break;
9204 CASE_FLT_FN (BUILT_IN_CREAL):
9205 if (validate_arg (arg0, COMPLEX_TYPE)
9206 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9207 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9208 break;
9210 CASE_FLT_FN (BUILT_IN_CIMAG):
9211 if (validate_arg (arg0, COMPLEX_TYPE)
9212 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9213 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9214 break;
9216 CASE_FLT_FN (BUILT_IN_CARG):
9217 return fold_builtin_carg (loc, arg0, type);
9219 case BUILT_IN_ISASCII:
9220 return fold_builtin_isascii (loc, arg0);
9222 case BUILT_IN_TOASCII:
9223 return fold_builtin_toascii (loc, arg0);
9225 case BUILT_IN_ISDIGIT:
9226 return fold_builtin_isdigit (loc, arg0);
9228 CASE_FLT_FN (BUILT_IN_FINITE):
9229 case BUILT_IN_FINITED32:
9230 case BUILT_IN_FINITED64:
9231 case BUILT_IN_FINITED128:
9232 case BUILT_IN_ISFINITE:
9234 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9235 if (ret)
9236 return ret;
9237 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9240 CASE_FLT_FN (BUILT_IN_ISINF):
9241 case BUILT_IN_ISINFD32:
9242 case BUILT_IN_ISINFD64:
9243 case BUILT_IN_ISINFD128:
9245 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9246 if (ret)
9247 return ret;
9248 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9251 case BUILT_IN_ISNORMAL:
9252 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9254 case BUILT_IN_ISINF_SIGN:
9255 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9257 CASE_FLT_FN (BUILT_IN_ISNAN):
9258 case BUILT_IN_ISNAND32:
9259 case BUILT_IN_ISNAND64:
9260 case BUILT_IN_ISNAND128:
9261 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9263 case BUILT_IN_FREE:
9264 if (integer_zerop (arg0))
9265 return build_empty_stmt (loc);
9266 break;
9268 default:
9269 break;
9272 return NULL_TREE;
9276 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9277 This function returns NULL_TREE if no simplification was possible. */
9279 static tree
9280 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
9282 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9283 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9285 if (TREE_CODE (arg0) == ERROR_MARK
9286 || TREE_CODE (arg1) == ERROR_MARK)
9287 return NULL_TREE;
9289 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9290 return ret;
9292 switch (fcode)
9294 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9295 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9296 if (validate_arg (arg0, REAL_TYPE)
9297 && validate_arg (arg1, POINTER_TYPE))
9298 return do_mpfr_lgamma_r (arg0, arg1, type);
9299 break;
9301 CASE_FLT_FN (BUILT_IN_FREXP):
9302 return fold_builtin_frexp (loc, arg0, arg1, type);
9304 CASE_FLT_FN (BUILT_IN_MODF):
9305 return fold_builtin_modf (loc, arg0, arg1, type);
9307 case BUILT_IN_STRSPN:
9308 return fold_builtin_strspn (loc, arg0, arg1);
9310 case BUILT_IN_STRCSPN:
9311 return fold_builtin_strcspn (loc, arg0, arg1);
9313 case BUILT_IN_STRPBRK:
9314 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9316 case BUILT_IN_EXPECT:
9317 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
9319 case BUILT_IN_ISGREATER:
9320 return fold_builtin_unordered_cmp (loc, fndecl,
9321 arg0, arg1, UNLE_EXPR, LE_EXPR);
9322 case BUILT_IN_ISGREATEREQUAL:
9323 return fold_builtin_unordered_cmp (loc, fndecl,
9324 arg0, arg1, UNLT_EXPR, LT_EXPR);
9325 case BUILT_IN_ISLESS:
9326 return fold_builtin_unordered_cmp (loc, fndecl,
9327 arg0, arg1, UNGE_EXPR, GE_EXPR);
9328 case BUILT_IN_ISLESSEQUAL:
9329 return fold_builtin_unordered_cmp (loc, fndecl,
9330 arg0, arg1, UNGT_EXPR, GT_EXPR);
9331 case BUILT_IN_ISLESSGREATER:
9332 return fold_builtin_unordered_cmp (loc, fndecl,
9333 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9334 case BUILT_IN_ISUNORDERED:
9335 return fold_builtin_unordered_cmp (loc, fndecl,
9336 arg0, arg1, UNORDERED_EXPR,
9337 NOP_EXPR);
9339 /* We do the folding for va_start in the expander. */
9340 case BUILT_IN_VA_START:
9341 break;
9343 case BUILT_IN_OBJECT_SIZE:
9344 return fold_builtin_object_size (arg0, arg1);
9346 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9347 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9349 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9350 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9352 default:
9353 break;
9355 return NULL_TREE;
9358 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9359 and ARG2.
9360 This function returns NULL_TREE if no simplification was possible. */
9362 static tree
9363 fold_builtin_3 (location_t loc, tree fndecl,
9364 tree arg0, tree arg1, tree arg2)
9366 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9367 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9369 if (TREE_CODE (arg0) == ERROR_MARK
9370 || TREE_CODE (arg1) == ERROR_MARK
9371 || TREE_CODE (arg2) == ERROR_MARK)
9372 return NULL_TREE;
9374 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9375 arg0, arg1, arg2))
9376 return ret;
9378 switch (fcode)
9381 CASE_FLT_FN (BUILT_IN_SINCOS):
9382 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9384 CASE_FLT_FN (BUILT_IN_REMQUO):
9385 if (validate_arg (arg0, REAL_TYPE)
9386 && validate_arg (arg1, REAL_TYPE)
9387 && validate_arg (arg2, POINTER_TYPE))
9388 return do_mpfr_remquo (arg0, arg1, arg2);
9389 break;
9391 case BUILT_IN_MEMCMP:
9392 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9394 case BUILT_IN_EXPECT:
9395 return fold_builtin_expect (loc, arg0, arg1, arg2);
9397 case BUILT_IN_ADD_OVERFLOW:
9398 case BUILT_IN_SUB_OVERFLOW:
9399 case BUILT_IN_MUL_OVERFLOW:
9400 case BUILT_IN_ADD_OVERFLOW_P:
9401 case BUILT_IN_SUB_OVERFLOW_P:
9402 case BUILT_IN_MUL_OVERFLOW_P:
9403 case BUILT_IN_SADD_OVERFLOW:
9404 case BUILT_IN_SADDL_OVERFLOW:
9405 case BUILT_IN_SADDLL_OVERFLOW:
9406 case BUILT_IN_SSUB_OVERFLOW:
9407 case BUILT_IN_SSUBL_OVERFLOW:
9408 case BUILT_IN_SSUBLL_OVERFLOW:
9409 case BUILT_IN_SMUL_OVERFLOW:
9410 case BUILT_IN_SMULL_OVERFLOW:
9411 case BUILT_IN_SMULLL_OVERFLOW:
9412 case BUILT_IN_UADD_OVERFLOW:
9413 case BUILT_IN_UADDL_OVERFLOW:
9414 case BUILT_IN_UADDLL_OVERFLOW:
9415 case BUILT_IN_USUB_OVERFLOW:
9416 case BUILT_IN_USUBL_OVERFLOW:
9417 case BUILT_IN_USUBLL_OVERFLOW:
9418 case BUILT_IN_UMUL_OVERFLOW:
9419 case BUILT_IN_UMULL_OVERFLOW:
9420 case BUILT_IN_UMULLL_OVERFLOW:
9421 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9423 default:
9424 break;
9426 return NULL_TREE;
9429 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9430 arguments. IGNORE is true if the result of the
9431 function call is ignored. This function returns NULL_TREE if no
9432 simplification was possible. */
9434 tree
9435 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9437 tree ret = NULL_TREE;
9439 switch (nargs)
9441 case 0:
9442 ret = fold_builtin_0 (loc, fndecl);
9443 break;
9444 case 1:
9445 ret = fold_builtin_1 (loc, fndecl, args[0]);
9446 break;
9447 case 2:
9448 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9449 break;
9450 case 3:
9451 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9452 break;
9453 default:
9454 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9455 break;
9457 if (ret)
9459 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9460 SET_EXPR_LOCATION (ret, loc);
9461 return ret;
9463 return NULL_TREE;
9466 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9467 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9468 of arguments in ARGS to be omitted. OLDNARGS is the number of
9469 elements in ARGS. */
9471 static tree
9472 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9473 int skip, tree fndecl, int n, va_list newargs)
9475 int nargs = oldnargs - skip + n;
9476 tree *buffer;
9478 if (n > 0)
9480 int i, j;
9482 buffer = XALLOCAVEC (tree, nargs);
9483 for (i = 0; i < n; i++)
9484 buffer[i] = va_arg (newargs, tree);
9485 for (j = skip; j < oldnargs; j++, i++)
9486 buffer[i] = args[j];
9488 else
9489 buffer = args + skip;
9491 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9494 /* Return true if FNDECL shouldn't be folded right now.
9495 If a built-in function has an inline attribute always_inline
9496 wrapper, defer folding it after always_inline functions have
9497 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9498 might not be performed. */
9500 bool
9501 avoid_folding_inline_builtin (tree fndecl)
9503 return (DECL_DECLARED_INLINE_P (fndecl)
9504 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9505 && cfun
9506 && !cfun->always_inline_functions_inlined
9507 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9510 /* A wrapper function for builtin folding that prevents warnings for
9511 "statement without effect" and the like, caused by removing the
9512 call node earlier than the warning is generated. */
9514 tree
9515 fold_call_expr (location_t loc, tree exp, bool ignore)
9517 tree ret = NULL_TREE;
9518 tree fndecl = get_callee_fndecl (exp);
9519 if (fndecl
9520 && TREE_CODE (fndecl) == FUNCTION_DECL
9521 && DECL_BUILT_IN (fndecl)
9522 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9523 yet. Defer folding until we see all the arguments
9524 (after inlining). */
9525 && !CALL_EXPR_VA_ARG_PACK (exp))
9527 int nargs = call_expr_nargs (exp);
9529 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9530 instead last argument is __builtin_va_arg_pack (). Defer folding
9531 even in that case, until arguments are finalized. */
9532 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9534 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9535 if (fndecl2
9536 && TREE_CODE (fndecl2) == FUNCTION_DECL
9537 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9538 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9539 return NULL_TREE;
9542 if (avoid_folding_inline_builtin (fndecl))
9543 return NULL_TREE;
9545 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9546 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9547 CALL_EXPR_ARGP (exp), ignore);
9548 else
9550 tree *args = CALL_EXPR_ARGP (exp);
9551 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9552 if (ret)
9553 return ret;
9556 return NULL_TREE;
9559 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9560 N arguments are passed in the array ARGARRAY. Return a folded
9561 expression or NULL_TREE if no simplification was possible. */
9563 tree
9564 fold_builtin_call_array (location_t loc, tree,
9565 tree fn,
9566 int n,
9567 tree *argarray)
9569 if (TREE_CODE (fn) != ADDR_EXPR)
9570 return NULL_TREE;
9572 tree fndecl = TREE_OPERAND (fn, 0);
9573 if (TREE_CODE (fndecl) == FUNCTION_DECL
9574 && DECL_BUILT_IN (fndecl))
9576 /* If last argument is __builtin_va_arg_pack (), arguments to this
9577 function are not finalized yet. Defer folding until they are. */
9578 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9580 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9581 if (fndecl2
9582 && TREE_CODE (fndecl2) == FUNCTION_DECL
9583 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9584 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9585 return NULL_TREE;
9587 if (avoid_folding_inline_builtin (fndecl))
9588 return NULL_TREE;
9589 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9590 return targetm.fold_builtin (fndecl, n, argarray, false);
9591 else
9592 return fold_builtin_n (loc, fndecl, argarray, n, false);
9595 return NULL_TREE;
9598 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9599 along with N new arguments specified as the "..." parameters. SKIP
9600 is the number of arguments in EXP to be omitted. This function is used
9601 to do varargs-to-varargs transformations. */
9603 static tree
9604 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9606 va_list ap;
9607 tree t;
9609 va_start (ap, n);
9610 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9611 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9612 va_end (ap);
9614 return t;
9617 /* Validate a single argument ARG against a tree code CODE representing
9618 a type. Return true when argument is valid. */
9620 static bool
9621 validate_arg (const_tree arg, enum tree_code code)
9623 if (!arg)
9624 return false;
9625 else if (code == POINTER_TYPE)
9626 return POINTER_TYPE_P (TREE_TYPE (arg));
9627 else if (code == INTEGER_TYPE)
9628 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9629 return code == TREE_CODE (TREE_TYPE (arg));
9632 /* This function validates the types of a function call argument list
9633 against a specified list of tree_codes. If the last specifier is a 0,
9634 that represents an ellipses, otherwise the last specifier must be a
9635 VOID_TYPE.
9637 This is the GIMPLE version of validate_arglist. Eventually we want to
9638 completely convert builtins.c to work from GIMPLEs and the tree based
9639 validate_arglist will then be removed. */
9641 bool
9642 validate_gimple_arglist (const gcall *call, ...)
9644 enum tree_code code;
9645 bool res = 0;
9646 va_list ap;
9647 const_tree arg;
9648 size_t i;
9650 va_start (ap, call);
9651 i = 0;
9655 code = (enum tree_code) va_arg (ap, int);
9656 switch (code)
9658 case 0:
9659 /* This signifies an ellipses, any further arguments are all ok. */
9660 res = true;
9661 goto end;
9662 case VOID_TYPE:
9663 /* This signifies an endlink, if no arguments remain, return
9664 true, otherwise return false. */
9665 res = (i == gimple_call_num_args (call));
9666 goto end;
9667 default:
9668 /* If no parameters remain or the parameter's code does not
9669 match the specified code, return false. Otherwise continue
9670 checking any remaining arguments. */
9671 arg = gimple_call_arg (call, i++);
9672 if (!validate_arg (arg, code))
9673 goto end;
9674 break;
9677 while (1);
9679 /* We need gotos here since we can only have one VA_CLOSE in a
9680 function. */
9681 end: ;
9682 va_end (ap);
9684 return res;
9687 /* Default target-specific builtin expander that does nothing. */
9690 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9691 rtx target ATTRIBUTE_UNUSED,
9692 rtx subtarget ATTRIBUTE_UNUSED,
9693 machine_mode mode ATTRIBUTE_UNUSED,
9694 int ignore ATTRIBUTE_UNUSED)
9696 return NULL_RTX;
9699 /* Returns true is EXP represents data that would potentially reside
9700 in a readonly section. */
9702 bool
9703 readonly_data_expr (tree exp)
9705 STRIP_NOPS (exp);
9707 if (TREE_CODE (exp) != ADDR_EXPR)
9708 return false;
9710 exp = get_base_address (TREE_OPERAND (exp, 0));
9711 if (!exp)
9712 return false;
9714 /* Make sure we call decl_readonly_section only for trees it
9715 can handle (since it returns true for everything it doesn't
9716 understand). */
9717 if (TREE_CODE (exp) == STRING_CST
9718 || TREE_CODE (exp) == CONSTRUCTOR
9719 || (VAR_P (exp) && TREE_STATIC (exp)))
9720 return decl_readonly_section (exp, 0);
9721 else
9722 return false;
9725 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9726 to the call, and TYPE is its return type.
9728 Return NULL_TREE if no simplification was possible, otherwise return the
9729 simplified form of the call as a tree.
9731 The simplified form may be a constant or other expression which
9732 computes the same value, but in a more efficient manner (including
9733 calls to other builtin functions).
9735 The call may contain arguments which need to be evaluated, but
9736 which are not useful to determine the result of the call. In
9737 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9738 COMPOUND_EXPR will be an argument which must be evaluated.
9739 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9740 COMPOUND_EXPR in the chain will contain the tree for the simplified
9741 form of the builtin function call. */
9743 static tree
9744 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9746 if (!validate_arg (s1, POINTER_TYPE)
9747 || !validate_arg (s2, POINTER_TYPE))
9748 return NULL_TREE;
9749 else
9751 tree fn;
9752 const char *p1, *p2;
9754 p2 = c_getstr (s2);
9755 if (p2 == NULL)
9756 return NULL_TREE;
9758 p1 = c_getstr (s1);
9759 if (p1 != NULL)
9761 const char *r = strpbrk (p1, p2);
9762 tree tem;
9764 if (r == NULL)
9765 return build_int_cst (TREE_TYPE (s1), 0);
9767 /* Return an offset into the constant string argument. */
9768 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9769 return fold_convert_loc (loc, type, tem);
9772 if (p2[0] == '\0')
9773 /* strpbrk(x, "") == NULL.
9774 Evaluate and ignore s1 in case it had side-effects. */
9775 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
9777 if (p2[1] != '\0')
9778 return NULL_TREE; /* Really call strpbrk. */
9780 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9781 if (!fn)
9782 return NULL_TREE;
9784 /* New argument list transforming strpbrk(s1, s2) to
9785 strchr(s1, s2[0]). */
9786 return build_call_expr_loc (loc, fn, 2, s1,
9787 build_int_cst (integer_type_node, p2[0]));
9791 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9792 to the call.
9794 Return NULL_TREE if no simplification was possible, otherwise return the
9795 simplified form of the call as a tree.
9797 The simplified form may be a constant or other expression which
9798 computes the same value, but in a more efficient manner (including
9799 calls to other builtin functions).
9801 The call may contain arguments which need to be evaluated, but
9802 which are not useful to determine the result of the call. In
9803 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9804 COMPOUND_EXPR will be an argument which must be evaluated.
9805 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9806 COMPOUND_EXPR in the chain will contain the tree for the simplified
9807 form of the builtin function call. */
9809 static tree
9810 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9812 if (!validate_arg (s1, POINTER_TYPE)
9813 || !validate_arg (s2, POINTER_TYPE))
9814 return NULL_TREE;
9815 else
9817 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9819 /* If either argument is "", return NULL_TREE. */
9820 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9821 /* Evaluate and ignore both arguments in case either one has
9822 side-effects. */
9823 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9824 s1, s2);
9825 return NULL_TREE;
9829 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9830 to the call.
9832 Return NULL_TREE if no simplification was possible, otherwise return the
9833 simplified form of the call as a tree.
9835 The simplified form may be a constant or other expression which
9836 computes the same value, but in a more efficient manner (including
9837 calls to other builtin functions).
9839 The call may contain arguments which need to be evaluated, but
9840 which are not useful to determine the result of the call. In
9841 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9842 COMPOUND_EXPR will be an argument which must be evaluated.
9843 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9844 COMPOUND_EXPR in the chain will contain the tree for the simplified
9845 form of the builtin function call. */
9847 static tree
9848 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9850 if (!validate_arg (s1, POINTER_TYPE)
9851 || !validate_arg (s2, POINTER_TYPE))
9852 return NULL_TREE;
9853 else
9855 /* If the first argument is "", return NULL_TREE. */
9856 const char *p1 = c_getstr (s1);
9857 if (p1 && *p1 == '\0')
9859 /* Evaluate and ignore argument s2 in case it has
9860 side-effects. */
9861 return omit_one_operand_loc (loc, size_type_node,
9862 size_zero_node, s2);
9865 /* If the second argument is "", return __builtin_strlen(s1). */
9866 const char *p2 = c_getstr (s2);
9867 if (p2 && *p2 == '\0')
9869 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9871 /* If the replacement _DECL isn't initialized, don't do the
9872 transformation. */
9873 if (!fn)
9874 return NULL_TREE;
9876 return build_call_expr_loc (loc, fn, 1, s1);
9878 return NULL_TREE;
9882 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9883 produced. False otherwise. This is done so that we don't output the error
9884 or warning twice or three times. */
9886 bool
9887 fold_builtin_next_arg (tree exp, bool va_start_p)
9889 tree fntype = TREE_TYPE (current_function_decl);
9890 int nargs = call_expr_nargs (exp);
9891 tree arg;
9892 /* There is good chance the current input_location points inside the
9893 definition of the va_start macro (perhaps on the token for
9894 builtin) in a system header, so warnings will not be emitted.
9895 Use the location in real source code. */
9896 source_location current_location =
9897 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9898 NULL);
9900 if (!stdarg_p (fntype))
9902 error ("%<va_start%> used in function with fixed args");
9903 return true;
9906 if (va_start_p)
9908 if (va_start_p && (nargs != 2))
9910 error ("wrong number of arguments to function %<va_start%>");
9911 return true;
9913 arg = CALL_EXPR_ARG (exp, 1);
9915 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9916 when we checked the arguments and if needed issued a warning. */
9917 else
9919 if (nargs == 0)
9921 /* Evidently an out of date version of <stdarg.h>; can't validate
9922 va_start's second argument, but can still work as intended. */
9923 warning_at (current_location,
9924 OPT_Wvarargs,
9925 "%<__builtin_next_arg%> called without an argument");
9926 return true;
9928 else if (nargs > 1)
9930 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9931 return true;
9933 arg = CALL_EXPR_ARG (exp, 0);
9936 if (TREE_CODE (arg) == SSA_NAME)
9937 arg = SSA_NAME_VAR (arg);
9939 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9940 or __builtin_next_arg (0) the first time we see it, after checking
9941 the arguments and if needed issuing a warning. */
9942 if (!integer_zerop (arg))
9944 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9946 /* Strip off all nops for the sake of the comparison. This
9947 is not quite the same as STRIP_NOPS. It does more.
9948 We must also strip off INDIRECT_EXPR for C++ reference
9949 parameters. */
9950 while (CONVERT_EXPR_P (arg)
9951 || TREE_CODE (arg) == INDIRECT_REF)
9952 arg = TREE_OPERAND (arg, 0);
9953 if (arg != last_parm)
9955 /* FIXME: Sometimes with the tree optimizers we can get the
9956 not the last argument even though the user used the last
9957 argument. We just warn and set the arg to be the last
9958 argument so that we will get wrong-code because of
9959 it. */
9960 warning_at (current_location,
9961 OPT_Wvarargs,
9962 "second parameter of %<va_start%> not last named argument");
9965 /* Undefined by C99 7.15.1.4p4 (va_start):
9966 "If the parameter parmN is declared with the register storage
9967 class, with a function or array type, or with a type that is
9968 not compatible with the type that results after application of
9969 the default argument promotions, the behavior is undefined."
9971 else if (DECL_REGISTER (arg))
9973 warning_at (current_location,
9974 OPT_Wvarargs,
9975 "undefined behavior when second parameter of "
9976 "%<va_start%> is declared with %<register%> storage");
9979 /* We want to verify the second parameter just once before the tree
9980 optimizers are run and then avoid keeping it in the tree,
9981 as otherwise we could warn even for correct code like:
9982 void foo (int i, ...)
9983 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9984 if (va_start_p)
9985 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9986 else
9987 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9989 return false;
9993 /* Expand a call EXP to __builtin_object_size. */
9995 static rtx
9996 expand_builtin_object_size (tree exp)
9998 tree ost;
9999 int object_size_type;
10000 tree fndecl = get_callee_fndecl (exp);
10002 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10004 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
10005 exp, fndecl);
10006 expand_builtin_trap ();
10007 return const0_rtx;
10010 ost = CALL_EXPR_ARG (exp, 1);
10011 STRIP_NOPS (ost);
10013 if (TREE_CODE (ost) != INTEGER_CST
10014 || tree_int_cst_sgn (ost) < 0
10015 || compare_tree_int (ost, 3) > 0)
10017 error ("%Klast argument of %qD is not integer constant between 0 and 3",
10018 exp, fndecl);
10019 expand_builtin_trap ();
10020 return const0_rtx;
10023 object_size_type = tree_to_shwi (ost);
10025 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10028 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10029 FCODE is the BUILT_IN_* to use.
10030 Return NULL_RTX if we failed; the caller should emit a normal call,
10031 otherwise try to get the result in TARGET, if convenient (and in
10032 mode MODE if that's convenient). */
10034 static rtx
10035 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10036 enum built_in_function fcode)
10038 if (!validate_arglist (exp,
10039 POINTER_TYPE,
10040 fcode == BUILT_IN_MEMSET_CHK
10041 ? INTEGER_TYPE : POINTER_TYPE,
10042 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10043 return NULL_RTX;
10045 tree dest = CALL_EXPR_ARG (exp, 0);
10046 tree src = CALL_EXPR_ARG (exp, 1);
10047 tree len = CALL_EXPR_ARG (exp, 2);
10048 tree size = CALL_EXPR_ARG (exp, 3);
10050 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
10051 /*str=*/NULL_TREE, size);
10053 if (!tree_fits_uhwi_p (size))
10054 return NULL_RTX;
10056 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10058 /* Avoid transforming the checking call to an ordinary one when
10059 an overflow has been detected or when the call couldn't be
10060 validated because the size is not constant. */
10061 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10062 return NULL_RTX;
10064 tree fn = NULL_TREE;
10065 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10066 mem{cpy,pcpy,move,set} is available. */
10067 switch (fcode)
10069 case BUILT_IN_MEMCPY_CHK:
10070 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10071 break;
10072 case BUILT_IN_MEMPCPY_CHK:
10073 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10074 break;
10075 case BUILT_IN_MEMMOVE_CHK:
10076 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10077 break;
10078 case BUILT_IN_MEMSET_CHK:
10079 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10080 break;
10081 default:
10082 break;
10085 if (! fn)
10086 return NULL_RTX;
10088 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10089 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10090 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10091 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10093 else if (fcode == BUILT_IN_MEMSET_CHK)
10094 return NULL_RTX;
10095 else
10097 unsigned int dest_align = get_pointer_alignment (dest);
10099 /* If DEST is not a pointer type, call the normal function. */
10100 if (dest_align == 0)
10101 return NULL_RTX;
10103 /* If SRC and DEST are the same (and not volatile), do nothing. */
10104 if (operand_equal_p (src, dest, 0))
10106 tree expr;
10108 if (fcode != BUILT_IN_MEMPCPY_CHK)
10110 /* Evaluate and ignore LEN in case it has side-effects. */
10111 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10112 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10115 expr = fold_build_pointer_plus (dest, len);
10116 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10119 /* __memmove_chk special case. */
10120 if (fcode == BUILT_IN_MEMMOVE_CHK)
10122 unsigned int src_align = get_pointer_alignment (src);
10124 if (src_align == 0)
10125 return NULL_RTX;
10127 /* If src is categorized for a readonly section we can use
10128 normal __memcpy_chk. */
10129 if (readonly_data_expr (src))
10131 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10132 if (!fn)
10133 return NULL_RTX;
10134 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10135 dest, src, len, size);
10136 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10137 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10138 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10141 return NULL_RTX;
10145 /* Emit warning if a buffer overflow is detected at compile time. */
10147 static void
10148 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10150 /* The source string. */
10151 tree srcstr = NULL_TREE;
10152 /* The size of the destination object. */
10153 tree objsize = NULL_TREE;
10154 /* The string that is being concatenated with (as in __strcat_chk)
10155 or null if it isn't. */
10156 tree catstr = NULL_TREE;
10157 /* The maximum length of the source sequence in a bounded operation
10158 (such as __strncat_chk) or null if the operation isn't bounded
10159 (such as __strcat_chk). */
10160 tree maxread = NULL_TREE;
10161 /* The exact size of the access (such as in __strncpy_chk). */
10162 tree size = NULL_TREE;
10164 switch (fcode)
10166 case BUILT_IN_STRCPY_CHK:
10167 case BUILT_IN_STPCPY_CHK:
10168 srcstr = CALL_EXPR_ARG (exp, 1);
10169 objsize = CALL_EXPR_ARG (exp, 2);
10170 break;
10172 case BUILT_IN_STRCAT_CHK:
10173 /* For __strcat_chk the warning will be emitted only if overflowing
10174 by at least strlen (dest) + 1 bytes. */
10175 catstr = CALL_EXPR_ARG (exp, 0);
10176 srcstr = CALL_EXPR_ARG (exp, 1);
10177 objsize = CALL_EXPR_ARG (exp, 2);
10178 break;
10180 case BUILT_IN_STRNCAT_CHK:
10181 catstr = CALL_EXPR_ARG (exp, 0);
10182 srcstr = CALL_EXPR_ARG (exp, 1);
10183 maxread = CALL_EXPR_ARG (exp, 2);
10184 objsize = CALL_EXPR_ARG (exp, 3);
10185 break;
10187 case BUILT_IN_STRNCPY_CHK:
10188 case BUILT_IN_STPNCPY_CHK:
10189 srcstr = CALL_EXPR_ARG (exp, 1);
10190 size = CALL_EXPR_ARG (exp, 2);
10191 objsize = CALL_EXPR_ARG (exp, 3);
10192 break;
10194 case BUILT_IN_SNPRINTF_CHK:
10195 case BUILT_IN_VSNPRINTF_CHK:
10196 maxread = CALL_EXPR_ARG (exp, 1);
10197 objsize = CALL_EXPR_ARG (exp, 3);
10198 break;
10199 default:
10200 gcc_unreachable ();
10203 if (catstr && maxread)
10205 /* Check __strncat_chk. There is no way to determine the length
10206 of the string to which the source string is being appended so
10207 just warn when the length of the source string is not known. */
10208 check_strncat_sizes (exp, objsize);
10209 return;
10212 /* The destination argument is the first one for all built-ins above. */
10213 tree dst = CALL_EXPR_ARG (exp, 0);
10215 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
10218 /* Emit warning if a buffer overflow is detected at compile time
10219 in __sprintf_chk/__vsprintf_chk calls. */
10221 static void
10222 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10224 tree size, len, fmt;
10225 const char *fmt_str;
10226 int nargs = call_expr_nargs (exp);
10228 /* Verify the required arguments in the original call. */
10230 if (nargs < 4)
10231 return;
10232 size = CALL_EXPR_ARG (exp, 2);
10233 fmt = CALL_EXPR_ARG (exp, 3);
10235 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10236 return;
10238 /* Check whether the format is a literal string constant. */
10239 fmt_str = c_getstr (fmt);
10240 if (fmt_str == NULL)
10241 return;
10243 if (!init_target_chars ())
10244 return;
10246 /* If the format doesn't contain % args or %%, we know its size. */
10247 if (strchr (fmt_str, target_percent) == 0)
10248 len = build_int_cstu (size_type_node, strlen (fmt_str));
10249 /* If the format is "%s" and first ... argument is a string literal,
10250 we know it too. */
10251 else if (fcode == BUILT_IN_SPRINTF_CHK
10252 && strcmp (fmt_str, target_percent_s) == 0)
10254 tree arg;
10256 if (nargs < 5)
10257 return;
10258 arg = CALL_EXPR_ARG (exp, 4);
10259 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10260 return;
10262 len = c_strlen (arg, 1);
10263 if (!len || ! tree_fits_uhwi_p (len))
10264 return;
10266 else
10267 return;
10269 /* Add one for the terminating nul. */
10270 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10272 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
10273 /*maxread=*/NULL_TREE, len, size);
10276 /* Emit warning if a free is called with address of a variable. */
10278 static void
10279 maybe_emit_free_warning (tree exp)
10281 tree arg = CALL_EXPR_ARG (exp, 0);
10283 STRIP_NOPS (arg);
10284 if (TREE_CODE (arg) != ADDR_EXPR)
10285 return;
10287 arg = get_base_address (TREE_OPERAND (arg, 0));
10288 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
10289 return;
10291 if (SSA_VAR_P (arg))
10292 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10293 "%Kattempt to free a non-heap object %qD", exp, arg);
10294 else
10295 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10296 "%Kattempt to free a non-heap object", exp);
10299 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10300 if possible. */
10302 static tree
10303 fold_builtin_object_size (tree ptr, tree ost)
10305 unsigned HOST_WIDE_INT bytes;
10306 int object_size_type;
10308 if (!validate_arg (ptr, POINTER_TYPE)
10309 || !validate_arg (ost, INTEGER_TYPE))
10310 return NULL_TREE;
10312 STRIP_NOPS (ost);
10314 if (TREE_CODE (ost) != INTEGER_CST
10315 || tree_int_cst_sgn (ost) < 0
10316 || compare_tree_int (ost, 3) > 0)
10317 return NULL_TREE;
10319 object_size_type = tree_to_shwi (ost);
10321 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10322 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10323 and (size_t) 0 for types 2 and 3. */
10324 if (TREE_SIDE_EFFECTS (ptr))
10325 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10327 if (TREE_CODE (ptr) == ADDR_EXPR)
10329 compute_builtin_object_size (ptr, object_size_type, &bytes);
10330 if (wi::fits_to_tree_p (bytes, size_type_node))
10331 return build_int_cstu (size_type_node, bytes);
10333 else if (TREE_CODE (ptr) == SSA_NAME)
10335 /* If object size is not known yet, delay folding until
10336 later. Maybe subsequent passes will help determining
10337 it. */
10338 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10339 && wi::fits_to_tree_p (bytes, size_type_node))
10340 return build_int_cstu (size_type_node, bytes);
10343 return NULL_TREE;
10346 /* Builtins with folding operations that operate on "..." arguments
10347 need special handling; we need to store the arguments in a convenient
10348 data structure before attempting any folding. Fortunately there are
10349 only a few builtins that fall into this category. FNDECL is the
10350 function, EXP is the CALL_EXPR for the call. */
10352 static tree
10353 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10355 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10356 tree ret = NULL_TREE;
10358 switch (fcode)
10360 case BUILT_IN_FPCLASSIFY:
10361 ret = fold_builtin_fpclassify (loc, args, nargs);
10362 break;
10364 default:
10365 break;
10367 if (ret)
10369 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10370 SET_EXPR_LOCATION (ret, loc);
10371 TREE_NO_WARNING (ret) = 1;
10372 return ret;
10374 return NULL_TREE;
10377 /* Initialize format string characters in the target charset. */
10379 bool
10380 init_target_chars (void)
10382 static bool init;
10383 if (!init)
10385 target_newline = lang_hooks.to_target_charset ('\n');
10386 target_percent = lang_hooks.to_target_charset ('%');
10387 target_c = lang_hooks.to_target_charset ('c');
10388 target_s = lang_hooks.to_target_charset ('s');
10389 if (target_newline == 0 || target_percent == 0 || target_c == 0
10390 || target_s == 0)
10391 return false;
10393 target_percent_c[0] = target_percent;
10394 target_percent_c[1] = target_c;
10395 target_percent_c[2] = '\0';
10397 target_percent_s[0] = target_percent;
10398 target_percent_s[1] = target_s;
10399 target_percent_s[2] = '\0';
10401 target_percent_s_newline[0] = target_percent;
10402 target_percent_s_newline[1] = target_s;
10403 target_percent_s_newline[2] = target_newline;
10404 target_percent_s_newline[3] = '\0';
10406 init = true;
10408 return true;
10411 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10412 and no overflow/underflow occurred. INEXACT is true if M was not
10413 exactly calculated. TYPE is the tree type for the result. This
10414 function assumes that you cleared the MPFR flags and then
10415 calculated M to see if anything subsequently set a flag prior to
10416 entering this function. Return NULL_TREE if any checks fail. */
10418 static tree
10419 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10421 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10422 overflow/underflow occurred. If -frounding-math, proceed iff the
10423 result of calling FUNC was exact. */
10424 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10425 && (!flag_rounding_math || !inexact))
10427 REAL_VALUE_TYPE rr;
10429 real_from_mpfr (&rr, m, type, GMP_RNDN);
10430 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10431 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10432 but the mpft_t is not, then we underflowed in the
10433 conversion. */
10434 if (real_isfinite (&rr)
10435 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10437 REAL_VALUE_TYPE rmode;
10439 real_convert (&rmode, TYPE_MODE (type), &rr);
10440 /* Proceed iff the specified mode can hold the value. */
10441 if (real_identical (&rmode, &rr))
10442 return build_real (type, rmode);
10445 return NULL_TREE;
10448 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10449 number and no overflow/underflow occurred. INEXACT is true if M
10450 was not exactly calculated. TYPE is the tree type for the result.
10451 This function assumes that you cleared the MPFR flags and then
10452 calculated M to see if anything subsequently set a flag prior to
10453 entering this function. Return NULL_TREE if any checks fail, if
10454 FORCE_CONVERT is true, then bypass the checks. */
10456 static tree
10457 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10459 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10460 overflow/underflow occurred. If -frounding-math, proceed iff the
10461 result of calling FUNC was exact. */
10462 if (force_convert
10463 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10464 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10465 && (!flag_rounding_math || !inexact)))
10467 REAL_VALUE_TYPE re, im;
10469 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10470 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10471 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10472 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10473 but the mpft_t is not, then we underflowed in the
10474 conversion. */
10475 if (force_convert
10476 || (real_isfinite (&re) && real_isfinite (&im)
10477 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10478 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10480 REAL_VALUE_TYPE re_mode, im_mode;
10482 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10483 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10484 /* Proceed iff the specified mode can hold the value. */
10485 if (force_convert
10486 || (real_identical (&re_mode, &re)
10487 && real_identical (&im_mode, &im)))
10488 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10489 build_real (TREE_TYPE (type), im_mode));
10492 return NULL_TREE;
10495 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10496 the pointer *(ARG_QUO) and return the result. The type is taken
10497 from the type of ARG0 and is used for setting the precision of the
10498 calculation and results. */
10500 static tree
10501 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10503 tree const type = TREE_TYPE (arg0);
10504 tree result = NULL_TREE;
10506 STRIP_NOPS (arg0);
10507 STRIP_NOPS (arg1);
10509 /* To proceed, MPFR must exactly represent the target floating point
10510 format, which only happens when the target base equals two. */
10511 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10512 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10513 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10515 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10516 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10518 if (real_isfinite (ra0) && real_isfinite (ra1))
10520 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10521 const int prec = fmt->p;
10522 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10523 tree result_rem;
10524 long integer_quo;
10525 mpfr_t m0, m1;
10527 mpfr_inits2 (prec, m0, m1, NULL);
10528 mpfr_from_real (m0, ra0, GMP_RNDN);
10529 mpfr_from_real (m1, ra1, GMP_RNDN);
10530 mpfr_clear_flags ();
10531 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10532 /* Remquo is independent of the rounding mode, so pass
10533 inexact=0 to do_mpfr_ckconv(). */
10534 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10535 mpfr_clears (m0, m1, NULL);
10536 if (result_rem)
10538 /* MPFR calculates quo in the host's long so it may
10539 return more bits in quo than the target int can hold
10540 if sizeof(host long) > sizeof(target int). This can
10541 happen even for native compilers in LP64 mode. In
10542 these cases, modulo the quo value with the largest
10543 number that the target int can hold while leaving one
10544 bit for the sign. */
10545 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10546 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10548 /* Dereference the quo pointer argument. */
10549 arg_quo = build_fold_indirect_ref (arg_quo);
10550 /* Proceed iff a valid pointer type was passed in. */
10551 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10553 /* Set the value. */
10554 tree result_quo
10555 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10556 build_int_cst (TREE_TYPE (arg_quo),
10557 integer_quo));
10558 TREE_SIDE_EFFECTS (result_quo) = 1;
10559 /* Combine the quo assignment with the rem. */
10560 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10561 result_quo, result_rem));
10566 return result;
10569 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10570 resulting value as a tree with type TYPE. The mpfr precision is
10571 set to the precision of TYPE. We assume that this mpfr function
10572 returns zero if the result could be calculated exactly within the
10573 requested precision. In addition, the integer pointer represented
10574 by ARG_SG will be dereferenced and set to the appropriate signgam
10575 (-1,1) value. */
10577 static tree
10578 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10580 tree result = NULL_TREE;
10582 STRIP_NOPS (arg);
10584 /* To proceed, MPFR must exactly represent the target floating point
10585 format, which only happens when the target base equals two. Also
10586 verify ARG is a constant and that ARG_SG is an int pointer. */
10587 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10588 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10589 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10590 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10592 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10594 /* In addition to NaN and Inf, the argument cannot be zero or a
10595 negative integer. */
10596 if (real_isfinite (ra)
10597 && ra->cl != rvc_zero
10598 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10600 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10601 const int prec = fmt->p;
10602 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10603 int inexact, sg;
10604 mpfr_t m;
10605 tree result_lg;
10607 mpfr_init2 (m, prec);
10608 mpfr_from_real (m, ra, GMP_RNDN);
10609 mpfr_clear_flags ();
10610 inexact = mpfr_lgamma (m, &sg, m, rnd);
10611 result_lg = do_mpfr_ckconv (m, type, inexact);
10612 mpfr_clear (m);
10613 if (result_lg)
10615 tree result_sg;
10617 /* Dereference the arg_sg pointer argument. */
10618 arg_sg = build_fold_indirect_ref (arg_sg);
10619 /* Assign the signgam value into *arg_sg. */
10620 result_sg = fold_build2 (MODIFY_EXPR,
10621 TREE_TYPE (arg_sg), arg_sg,
10622 build_int_cst (TREE_TYPE (arg_sg), sg));
10623 TREE_SIDE_EFFECTS (result_sg) = 1;
10624 /* Combine the signgam assignment with the lgamma result. */
10625 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10626 result_sg, result_lg));
10631 return result;
10634 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10635 mpc function FUNC on it and return the resulting value as a tree
10636 with type TYPE. The mpfr precision is set to the precision of
10637 TYPE. We assume that function FUNC returns zero if the result
10638 could be calculated exactly within the requested precision. If
10639 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10640 in the arguments and/or results. */
10642 tree
10643 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10644 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10646 tree result = NULL_TREE;
10648 STRIP_NOPS (arg0);
10649 STRIP_NOPS (arg1);
10651 /* To proceed, MPFR must exactly represent the target floating point
10652 format, which only happens when the target base equals two. */
10653 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10654 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10655 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10656 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10657 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10659 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10660 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10661 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10662 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10664 if (do_nonfinite
10665 || (real_isfinite (re0) && real_isfinite (im0)
10666 && real_isfinite (re1) && real_isfinite (im1)))
10668 const struct real_format *const fmt =
10669 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10670 const int prec = fmt->p;
10671 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10672 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10673 int inexact;
10674 mpc_t m0, m1;
10676 mpc_init2 (m0, prec);
10677 mpc_init2 (m1, prec);
10678 mpfr_from_real (mpc_realref (m0), re0, rnd);
10679 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10680 mpfr_from_real (mpc_realref (m1), re1, rnd);
10681 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10682 mpfr_clear_flags ();
10683 inexact = func (m0, m0, m1, crnd);
10684 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10685 mpc_clear (m0);
10686 mpc_clear (m1);
10690 return result;
10693 /* A wrapper function for builtin folding that prevents warnings for
10694 "statement without effect" and the like, caused by removing the
10695 call node earlier than the warning is generated. */
10697 tree
10698 fold_call_stmt (gcall *stmt, bool ignore)
10700 tree ret = NULL_TREE;
10701 tree fndecl = gimple_call_fndecl (stmt);
10702 location_t loc = gimple_location (stmt);
10703 if (fndecl
10704 && TREE_CODE (fndecl) == FUNCTION_DECL
10705 && DECL_BUILT_IN (fndecl)
10706 && !gimple_call_va_arg_pack_p (stmt))
10708 int nargs = gimple_call_num_args (stmt);
10709 tree *args = (nargs > 0
10710 ? gimple_call_arg_ptr (stmt, 0)
10711 : &error_mark_node);
10713 if (avoid_folding_inline_builtin (fndecl))
10714 return NULL_TREE;
10715 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10717 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10719 else
10721 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10722 if (ret)
10724 /* Propagate location information from original call to
10725 expansion of builtin. Otherwise things like
10726 maybe_emit_chk_warning, that operate on the expansion
10727 of a builtin, will use the wrong location information. */
10728 if (gimple_has_location (stmt))
10730 tree realret = ret;
10731 if (TREE_CODE (ret) == NOP_EXPR)
10732 realret = TREE_OPERAND (ret, 0);
10733 if (CAN_HAVE_LOCATION_P (realret)
10734 && !EXPR_HAS_LOCATION (realret))
10735 SET_EXPR_LOCATION (realret, loc);
10736 return realret;
10738 return ret;
10742 return NULL_TREE;
10745 /* Look up the function in builtin_decl that corresponds to DECL
10746 and set ASMSPEC as its user assembler name. DECL must be a
10747 function decl that declares a builtin. */
10749 void
10750 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10752 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10753 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10754 && asmspec != 0);
10756 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10757 set_user_assembler_name (builtin, asmspec);
10759 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10760 && INT_TYPE_SIZE < BITS_PER_WORD)
10762 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
10763 set_user_assembler_libfunc ("ffs", asmspec);
10764 set_optab_libfunc (ffs_optab, mode, "ffs");
10768 /* Return true if DECL is a builtin that expands to a constant or similarly
10769 simple code. */
10770 bool
10771 is_simple_builtin (tree decl)
10773 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10774 switch (DECL_FUNCTION_CODE (decl))
10776 /* Builtins that expand to constants. */
10777 case BUILT_IN_CONSTANT_P:
10778 case BUILT_IN_EXPECT:
10779 case BUILT_IN_OBJECT_SIZE:
10780 case BUILT_IN_UNREACHABLE:
10781 /* Simple register moves or loads from stack. */
10782 case BUILT_IN_ASSUME_ALIGNED:
10783 case BUILT_IN_RETURN_ADDRESS:
10784 case BUILT_IN_EXTRACT_RETURN_ADDR:
10785 case BUILT_IN_FROB_RETURN_ADDR:
10786 case BUILT_IN_RETURN:
10787 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10788 case BUILT_IN_FRAME_ADDRESS:
10789 case BUILT_IN_VA_END:
10790 case BUILT_IN_STACK_SAVE:
10791 case BUILT_IN_STACK_RESTORE:
10792 /* Exception state returns or moves registers around. */
10793 case BUILT_IN_EH_FILTER:
10794 case BUILT_IN_EH_POINTER:
10795 case BUILT_IN_EH_COPY_VALUES:
10796 return true;
10798 default:
10799 return false;
10802 return false;
10805 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10806 most probably expanded inline into reasonably simple code. This is a
10807 superset of is_simple_builtin. */
10808 bool
10809 is_inexpensive_builtin (tree decl)
10811 if (!decl)
10812 return false;
10813 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10814 return true;
10815 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10816 switch (DECL_FUNCTION_CODE (decl))
10818 case BUILT_IN_ABS:
10819 CASE_BUILT_IN_ALLOCA:
10820 case BUILT_IN_BSWAP16:
10821 case BUILT_IN_BSWAP32:
10822 case BUILT_IN_BSWAP64:
10823 case BUILT_IN_CLZ:
10824 case BUILT_IN_CLZIMAX:
10825 case BUILT_IN_CLZL:
10826 case BUILT_IN_CLZLL:
10827 case BUILT_IN_CTZ:
10828 case BUILT_IN_CTZIMAX:
10829 case BUILT_IN_CTZL:
10830 case BUILT_IN_CTZLL:
10831 case BUILT_IN_FFS:
10832 case BUILT_IN_FFSIMAX:
10833 case BUILT_IN_FFSL:
10834 case BUILT_IN_FFSLL:
10835 case BUILT_IN_IMAXABS:
10836 case BUILT_IN_FINITE:
10837 case BUILT_IN_FINITEF:
10838 case BUILT_IN_FINITEL:
10839 case BUILT_IN_FINITED32:
10840 case BUILT_IN_FINITED64:
10841 case BUILT_IN_FINITED128:
10842 case BUILT_IN_FPCLASSIFY:
10843 case BUILT_IN_ISFINITE:
10844 case BUILT_IN_ISINF_SIGN:
10845 case BUILT_IN_ISINF:
10846 case BUILT_IN_ISINFF:
10847 case BUILT_IN_ISINFL:
10848 case BUILT_IN_ISINFD32:
10849 case BUILT_IN_ISINFD64:
10850 case BUILT_IN_ISINFD128:
10851 case BUILT_IN_ISNAN:
10852 case BUILT_IN_ISNANF:
10853 case BUILT_IN_ISNANL:
10854 case BUILT_IN_ISNAND32:
10855 case BUILT_IN_ISNAND64:
10856 case BUILT_IN_ISNAND128:
10857 case BUILT_IN_ISNORMAL:
10858 case BUILT_IN_ISGREATER:
10859 case BUILT_IN_ISGREATEREQUAL:
10860 case BUILT_IN_ISLESS:
10861 case BUILT_IN_ISLESSEQUAL:
10862 case BUILT_IN_ISLESSGREATER:
10863 case BUILT_IN_ISUNORDERED:
10864 case BUILT_IN_VA_ARG_PACK:
10865 case BUILT_IN_VA_ARG_PACK_LEN:
10866 case BUILT_IN_VA_COPY:
10867 case BUILT_IN_TRAP:
10868 case BUILT_IN_SAVEREGS:
10869 case BUILT_IN_POPCOUNTL:
10870 case BUILT_IN_POPCOUNTLL:
10871 case BUILT_IN_POPCOUNTIMAX:
10872 case BUILT_IN_POPCOUNT:
10873 case BUILT_IN_PARITYL:
10874 case BUILT_IN_PARITYLL:
10875 case BUILT_IN_PARITYIMAX:
10876 case BUILT_IN_PARITY:
10877 case BUILT_IN_LABS:
10878 case BUILT_IN_LLABS:
10879 case BUILT_IN_PREFETCH:
10880 case BUILT_IN_ACC_ON_DEVICE:
10881 return true;
10883 default:
10884 return is_simple_builtin (decl);
10887 return false;
10890 /* Return true if T is a constant and the value cast to a target char
10891 can be represented by a host char.
10892 Store the casted char constant in *P if so. */
10894 bool
10895 target_char_cst_p (tree t, char *p)
10897 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
10898 return false;
10900 *p = (char)tree_to_uhwi (t);
10901 return true;
10904 /* Return the maximum object size. */
10906 tree
10907 max_object_size (void)
10909 /* To do: Make this a configurable parameter. */
10910 return TYPE_MAX_VALUE (ptrdiff_type_node);