Add initial version of C++17 <memory_resource> header
[official-gcc.git] / gcc / builtins.c
blob539a6d17688720b81087c59cef244cc68736665c
1 /* Expand builtin functions.
2 Copyright (C) 1988-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "params.h"
35 #include "tm_p.h"
36 #include "stringpool.h"
37 #include "tree-vrp.h"
38 #include "tree-ssanames.h"
39 #include "expmed.h"
40 #include "optabs.h"
41 #include "emit-rtl.h"
42 #include "recog.h"
43 #include "diagnostic-core.h"
44 #include "alias.h"
45 #include "fold-const.h"
46 #include "fold-const-call.h"
47 #include "gimple-ssa-warn-restrict.h"
48 #include "stor-layout.h"
49 #include "calls.h"
50 #include "varasm.h"
51 #include "tree-object-size.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
76 struct target_builtins default_target_builtins;
77 #if SWITCHABLE_TARGET
78 struct target_builtins *this_target_builtins = &default_target_builtins;
79 #endif
81 /* Define the names of the builtin function types and codes. */
82 const char *const built_in_class_names[BUILT_IN_LAST]
83 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
85 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
86 const char * built_in_names[(int) END_BUILTINS] =
88 #include "builtins.def"
91 /* Setup an array of builtin_info_type, make sure each element decl is
92 initialized to NULL_TREE. */
93 builtin_info_type builtin_info[(int)END_BUILTINS];
95 /* Non-zero if __builtin_constant_p should be folded right away. */
96 bool force_folding_builtin_constant_p;
98 static rtx c_readstr (const char *, scalar_int_mode);
99 static int target_char_cast (tree, char *);
100 static rtx get_memory_rtx (tree, tree);
101 static int apply_args_size (void);
102 static int apply_result_size (void);
103 static rtx result_vector (int, rtx);
104 static void expand_builtin_prefetch (tree);
105 static rtx expand_builtin_apply_args (void);
106 static rtx expand_builtin_apply_args_1 (void);
107 static rtx expand_builtin_apply (rtx, rtx, rtx);
108 static void expand_builtin_return (rtx);
109 static enum type_class type_to_class (tree);
110 static rtx expand_builtin_classify_type (tree);
111 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
112 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
113 static rtx expand_builtin_interclass_mathfn (tree, rtx);
114 static rtx expand_builtin_sincos (tree);
115 static rtx expand_builtin_cexpi (tree, rtx);
116 static rtx expand_builtin_int_roundingfn (tree, rtx);
117 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
118 static rtx expand_builtin_next_arg (void);
119 static rtx expand_builtin_va_start (tree);
120 static rtx expand_builtin_va_end (tree);
121 static rtx expand_builtin_va_copy (tree);
122 static rtx inline_expand_builtin_string_cmp (tree, rtx);
123 static rtx expand_builtin_strcmp (tree, rtx);
124 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
125 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
126 static rtx expand_builtin_memchr (tree, rtx);
127 static rtx expand_builtin_memcpy (tree, rtx);
128 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
129 rtx target, tree exp, int endp);
130 static rtx expand_builtin_memmove (tree, rtx);
131 static rtx expand_builtin_mempcpy (tree, rtx);
132 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, int);
133 static rtx expand_builtin_strcat (tree, rtx);
134 static rtx expand_builtin_strcpy (tree, rtx);
135 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
136 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
137 static rtx expand_builtin_stpncpy (tree, rtx);
138 static rtx expand_builtin_strncat (tree, rtx);
139 static rtx expand_builtin_strncpy (tree, rtx);
140 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
141 static rtx expand_builtin_memset (tree, rtx, machine_mode);
142 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
143 static rtx expand_builtin_bzero (tree);
144 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
145 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
146 static rtx expand_builtin_alloca (tree);
147 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
148 static rtx expand_builtin_frame_address (tree, tree);
149 static tree stabilize_va_list_loc (location_t, tree, int);
150 static rtx expand_builtin_expect (tree, rtx);
151 static tree fold_builtin_constant_p (tree);
152 static tree fold_builtin_classify_type (tree);
153 static tree fold_builtin_strlen (location_t, tree, tree);
154 static tree fold_builtin_inf (location_t, tree, int);
155 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
156 static bool validate_arg (const_tree, enum tree_code code);
157 static rtx expand_builtin_fabs (tree, rtx, rtx);
158 static rtx expand_builtin_signbit (tree, rtx);
159 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
160 static tree fold_builtin_isascii (location_t, tree);
161 static tree fold_builtin_toascii (location_t, tree);
162 static tree fold_builtin_isdigit (location_t, tree);
163 static tree fold_builtin_fabs (location_t, tree, tree);
164 static tree fold_builtin_abs (location_t, tree, tree);
165 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
166 enum tree_code);
167 static tree fold_builtin_0 (location_t, tree);
168 static tree fold_builtin_1 (location_t, tree, tree);
169 static tree fold_builtin_2 (location_t, tree, tree, tree);
170 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
171 static tree fold_builtin_varargs (location_t, tree, tree*, int);
173 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
174 static tree fold_builtin_strspn (location_t, tree, tree);
175 static tree fold_builtin_strcspn (location_t, tree, tree);
177 static rtx expand_builtin_object_size (tree);
178 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
179 enum built_in_function);
180 static void maybe_emit_chk_warning (tree, enum built_in_function);
181 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
182 static void maybe_emit_free_warning (tree);
183 static tree fold_builtin_object_size (tree, tree);
185 unsigned HOST_WIDE_INT target_newline;
186 unsigned HOST_WIDE_INT target_percent;
187 static unsigned HOST_WIDE_INT target_c;
188 static unsigned HOST_WIDE_INT target_s;
189 char target_percent_c[3];
190 char target_percent_s[3];
191 char target_percent_s_newline[4];
192 static tree do_mpfr_remquo (tree, tree, tree);
193 static tree do_mpfr_lgamma_r (tree, tree, tree);
194 static void expand_builtin_sync_synchronize (void);
196 /* Return true if NAME starts with __builtin_ or __sync_. */
198 static bool
199 is_builtin_name (const char *name)
201 if (strncmp (name, "__builtin_", 10) == 0)
202 return true;
203 if (strncmp (name, "__sync_", 7) == 0)
204 return true;
205 if (strncmp (name, "__atomic_", 9) == 0)
206 return true;
207 return false;
211 /* Return true if DECL is a function symbol representing a built-in. */
213 bool
214 is_builtin_fn (tree decl)
216 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
219 /* Return true if NODE should be considered for inline expansion regardless
220 of the optimization level. This means whenever a function is invoked with
221 its "internal" name, which normally contains the prefix "__builtin". */
223 bool
224 called_as_built_in (tree node)
226 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
227 we want the name used to call the function, not the name it
228 will have. */
229 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
230 return is_builtin_name (name);
233 /* Compute values M and N such that M divides (address of EXP - N) and such
234 that N < M. If these numbers can be determined, store M in alignp and N in
235 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
236 *alignp and any bit-offset to *bitposp.
238 Note that the address (and thus the alignment) computed here is based
239 on the address to which a symbol resolves, whereas DECL_ALIGN is based
240 on the address at which an object is actually located. These two
241 addresses are not always the same. For example, on ARM targets,
242 the address &foo of a Thumb function foo() has the lowest bit set,
243 whereas foo() itself starts on an even address.
245 If ADDR_P is true we are taking the address of the memory reference EXP
246 and thus cannot rely on the access taking place. */
248 static bool
249 get_object_alignment_2 (tree exp, unsigned int *alignp,
250 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
252 poly_int64 bitsize, bitpos;
253 tree offset;
254 machine_mode mode;
255 int unsignedp, reversep, volatilep;
256 unsigned int align = BITS_PER_UNIT;
257 bool known_alignment = false;
259 /* Get the innermost object and the constant (bitpos) and possibly
260 variable (offset) offset of the access. */
261 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
262 &unsignedp, &reversep, &volatilep);
264 /* Extract alignment information from the innermost object and
265 possibly adjust bitpos and offset. */
266 if (TREE_CODE (exp) == FUNCTION_DECL)
268 /* Function addresses can encode extra information besides their
269 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
270 allows the low bit to be used as a virtual bit, we know
271 that the address itself must be at least 2-byte aligned. */
272 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
273 align = 2 * BITS_PER_UNIT;
275 else if (TREE_CODE (exp) == LABEL_DECL)
277 else if (TREE_CODE (exp) == CONST_DECL)
279 /* The alignment of a CONST_DECL is determined by its initializer. */
280 exp = DECL_INITIAL (exp);
281 align = TYPE_ALIGN (TREE_TYPE (exp));
282 if (CONSTANT_CLASS_P (exp))
283 align = targetm.constant_alignment (exp, align);
285 known_alignment = true;
287 else if (DECL_P (exp))
289 align = DECL_ALIGN (exp);
290 known_alignment = true;
292 else if (TREE_CODE (exp) == INDIRECT_REF
293 || TREE_CODE (exp) == MEM_REF
294 || TREE_CODE (exp) == TARGET_MEM_REF)
296 tree addr = TREE_OPERAND (exp, 0);
297 unsigned ptr_align;
298 unsigned HOST_WIDE_INT ptr_bitpos;
299 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
301 /* If the address is explicitely aligned, handle that. */
302 if (TREE_CODE (addr) == BIT_AND_EXPR
303 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
305 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
306 ptr_bitmask *= BITS_PER_UNIT;
307 align = least_bit_hwi (ptr_bitmask);
308 addr = TREE_OPERAND (addr, 0);
311 known_alignment
312 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
313 align = MAX (ptr_align, align);
315 /* Re-apply explicit alignment to the bitpos. */
316 ptr_bitpos &= ptr_bitmask;
318 /* The alignment of the pointer operand in a TARGET_MEM_REF
319 has to take the variable offset parts into account. */
320 if (TREE_CODE (exp) == TARGET_MEM_REF)
322 if (TMR_INDEX (exp))
324 unsigned HOST_WIDE_INT step = 1;
325 if (TMR_STEP (exp))
326 step = TREE_INT_CST_LOW (TMR_STEP (exp));
327 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
329 if (TMR_INDEX2 (exp))
330 align = BITS_PER_UNIT;
331 known_alignment = false;
334 /* When EXP is an actual memory reference then we can use
335 TYPE_ALIGN of a pointer indirection to derive alignment.
336 Do so only if get_pointer_alignment_1 did not reveal absolute
337 alignment knowledge and if using that alignment would
338 improve the situation. */
339 unsigned int talign;
340 if (!addr_p && !known_alignment
341 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
342 && talign > align)
343 align = talign;
344 else
346 /* Else adjust bitpos accordingly. */
347 bitpos += ptr_bitpos;
348 if (TREE_CODE (exp) == MEM_REF
349 || TREE_CODE (exp) == TARGET_MEM_REF)
350 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
353 else if (TREE_CODE (exp) == STRING_CST)
355 /* STRING_CST are the only constant objects we allow to be not
356 wrapped inside a CONST_DECL. */
357 align = TYPE_ALIGN (TREE_TYPE (exp));
358 if (CONSTANT_CLASS_P (exp))
359 align = targetm.constant_alignment (exp, align);
361 known_alignment = true;
364 /* If there is a non-constant offset part extract the maximum
365 alignment that can prevail. */
366 if (offset)
368 unsigned int trailing_zeros = tree_ctz (offset);
369 if (trailing_zeros < HOST_BITS_PER_INT)
371 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
372 if (inner)
373 align = MIN (align, inner);
377 /* Account for the alignment of runtime coefficients, so that the constant
378 bitpos is guaranteed to be accurate. */
379 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
380 if (alt_align != 0 && alt_align < align)
382 align = alt_align;
383 known_alignment = false;
386 *alignp = align;
387 *bitposp = bitpos.coeffs[0] & (align - 1);
388 return known_alignment;
391 /* For a memory reference expression EXP compute values M and N such that M
392 divides (&EXP - N) and such that N < M. If these numbers can be determined,
393 store M in alignp and N in *BITPOSP and return true. Otherwise return false
394 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
396 bool
397 get_object_alignment_1 (tree exp, unsigned int *alignp,
398 unsigned HOST_WIDE_INT *bitposp)
400 return get_object_alignment_2 (exp, alignp, bitposp, false);
403 /* Return the alignment in bits of EXP, an object. */
405 unsigned int
406 get_object_alignment (tree exp)
408 unsigned HOST_WIDE_INT bitpos = 0;
409 unsigned int align;
411 get_object_alignment_1 (exp, &align, &bitpos);
413 /* align and bitpos now specify known low bits of the pointer.
414 ptr & (align - 1) == bitpos. */
416 if (bitpos != 0)
417 align = least_bit_hwi (bitpos);
418 return align;
421 /* For a pointer valued expression EXP compute values M and N such that M
422 divides (EXP - N) and such that N < M. If these numbers can be determined,
423 store M in alignp and N in *BITPOSP and return true. Return false if
424 the results are just a conservative approximation.
426 If EXP is not a pointer, false is returned too. */
428 bool
429 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
430 unsigned HOST_WIDE_INT *bitposp)
432 STRIP_NOPS (exp);
434 if (TREE_CODE (exp) == ADDR_EXPR)
435 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
436 alignp, bitposp, true);
437 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
439 unsigned int align;
440 unsigned HOST_WIDE_INT bitpos;
441 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
442 &align, &bitpos);
443 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
444 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
445 else
447 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
448 if (trailing_zeros < HOST_BITS_PER_INT)
450 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
451 if (inner)
452 align = MIN (align, inner);
455 *alignp = align;
456 *bitposp = bitpos & (align - 1);
457 return res;
459 else if (TREE_CODE (exp) == SSA_NAME
460 && POINTER_TYPE_P (TREE_TYPE (exp)))
462 unsigned int ptr_align, ptr_misalign;
463 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
465 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
467 *bitposp = ptr_misalign * BITS_PER_UNIT;
468 *alignp = ptr_align * BITS_PER_UNIT;
469 /* Make sure to return a sensible alignment when the multiplication
470 by BITS_PER_UNIT overflowed. */
471 if (*alignp == 0)
472 *alignp = 1u << (HOST_BITS_PER_INT - 1);
473 /* We cannot really tell whether this result is an approximation. */
474 return false;
476 else
478 *bitposp = 0;
479 *alignp = BITS_PER_UNIT;
480 return false;
483 else if (TREE_CODE (exp) == INTEGER_CST)
485 *alignp = BIGGEST_ALIGNMENT;
486 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
487 & (BIGGEST_ALIGNMENT - 1));
488 return true;
491 *bitposp = 0;
492 *alignp = BITS_PER_UNIT;
493 return false;
496 /* Return the alignment in bits of EXP, a pointer valued expression.
497 The alignment returned is, by default, the alignment of the thing that
498 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
500 Otherwise, look at the expression to see if we can do better, i.e., if the
501 expression is actually pointing at an object whose alignment is tighter. */
503 unsigned int
504 get_pointer_alignment (tree exp)
506 unsigned HOST_WIDE_INT bitpos = 0;
507 unsigned int align;
509 get_pointer_alignment_1 (exp, &align, &bitpos);
511 /* align and bitpos now specify known low bits of the pointer.
512 ptr & (align - 1) == bitpos. */
514 if (bitpos != 0)
515 align = least_bit_hwi (bitpos);
517 return align;
520 /* Return the number of non-zero elements in the sequence
521 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
522 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
524 static unsigned
525 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
527 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
529 unsigned n;
531 if (eltsize == 1)
533 /* Optimize the common case of plain char. */
534 for (n = 0; n < maxelts; n++)
536 const char *elt = (const char*) ptr + n;
537 if (!*elt)
538 break;
541 else
543 for (n = 0; n < maxelts; n++)
545 const char *elt = (const char*) ptr + n * eltsize;
546 if (!memcmp (elt, "\0\0\0\0", eltsize))
547 break;
550 return n;
553 /* Compute the length of a null-terminated character string or wide
554 character string handling character sizes of 1, 2, and 4 bytes.
555 TREE_STRING_LENGTH is not the right way because it evaluates to
556 the size of the character array in bytes (as opposed to characters)
557 and because it can contain a zero byte in the middle.
559 ONLY_VALUE should be nonzero if the result is not going to be emitted
560 into the instruction stream and zero if it is going to be expanded.
561 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
562 is returned, otherwise NULL, since
563 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
564 evaluate the side-effects.
566 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
567 accesses. Note that this implies the result is not going to be emitted
568 into the instruction stream.
570 The value returned is of type `ssizetype'.
572 Unfortunately, string_constant can't access the values of const char
573 arrays with initializers, so neither can we do so here. */
575 tree
576 c_strlen (tree src, int only_value)
578 STRIP_NOPS (src);
579 if (TREE_CODE (src) == COND_EXPR
580 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
582 tree len1, len2;
584 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
585 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
586 if (tree_int_cst_equal (len1, len2))
587 return len1;
590 if (TREE_CODE (src) == COMPOUND_EXPR
591 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
592 return c_strlen (TREE_OPERAND (src, 1), only_value);
594 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
596 /* Offset from the beginning of the string in bytes. */
597 tree byteoff;
598 src = string_constant (src, &byteoff);
599 if (src == 0)
600 return NULL_TREE;
602 /* Determine the size of the string element. */
603 unsigned eltsize
604 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src))));
606 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
607 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
608 in case the latter is less than the size of the array. */
609 HOST_WIDE_INT maxelts = TREE_STRING_LENGTH (src);
610 tree type = TREE_TYPE (src);
611 if (tree size = TYPE_SIZE_UNIT (type))
612 if (tree_fits_shwi_p (size))
613 maxelts = tree_to_uhwi (size);
615 maxelts = maxelts / eltsize - 1;
617 /* PTR can point to the byte representation of any string type, including
618 char* and wchar_t*. */
619 const char *ptr = TREE_STRING_POINTER (src);
621 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
623 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
624 compute the offset to the following null if we don't know where to
625 start searching for it. */
626 if (string_length (ptr, eltsize, maxelts) < maxelts)
628 /* Return when an embedded null character is found. */
629 return NULL_TREE;
632 if (!maxelts)
633 return ssize_int (0);
635 /* We don't know the starting offset, but we do know that the string
636 has no internal zero bytes. We can assume that the offset falls
637 within the bounds of the string; otherwise, the programmer deserves
638 what he gets. Subtract the offset from the length of the string,
639 and return that. This would perhaps not be valid if we were dealing
640 with named arrays in addition to literal string constants. */
641 return size_diffop_loc (loc, size_int (maxelts * eltsize), byteoff);
644 /* Offset from the beginning of the string in elements. */
645 HOST_WIDE_INT eltoff;
647 /* We have a known offset into the string. Start searching there for
648 a null character if we can represent it as a single HOST_WIDE_INT. */
649 if (byteoff == 0)
650 eltoff = 0;
651 else if (! tree_fits_shwi_p (byteoff))
652 eltoff = -1;
653 else
654 eltoff = tree_to_shwi (byteoff) / eltsize;
656 /* If the offset is known to be out of bounds, warn, and call strlen at
657 runtime. */
658 if (eltoff < 0 || eltoff > maxelts)
660 /* Suppress multiple warnings for propagated constant strings. */
661 if (only_value != 2
662 && !TREE_NO_WARNING (src))
664 warning_at (loc, OPT_Warray_bounds,
665 "offset %qwi outside bounds of constant string",
666 eltoff);
667 TREE_NO_WARNING (src) = 1;
669 return NULL_TREE;
672 /* Use strlen to search for the first zero byte. Since any strings
673 constructed with build_string will have nulls appended, we win even
674 if we get handed something like (char[4])"abcd".
676 Since ELTOFF is our starting index into the string, no further
677 calculation is needed. */
678 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
679 maxelts - eltoff);
681 return ssize_int (len);
684 /* Return a constant integer corresponding to target reading
685 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
687 static rtx
688 c_readstr (const char *str, scalar_int_mode mode)
690 HOST_WIDE_INT ch;
691 unsigned int i, j;
692 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
694 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
695 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
696 / HOST_BITS_PER_WIDE_INT;
698 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
699 for (i = 0; i < len; i++)
700 tmp[i] = 0;
702 ch = 1;
703 for (i = 0; i < GET_MODE_SIZE (mode); i++)
705 j = i;
706 if (WORDS_BIG_ENDIAN)
707 j = GET_MODE_SIZE (mode) - i - 1;
708 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
709 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
710 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
711 j *= BITS_PER_UNIT;
713 if (ch)
714 ch = (unsigned char) str[i];
715 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
718 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
719 return immed_wide_int_const (c, mode);
722 /* Cast a target constant CST to target CHAR and if that value fits into
723 host char type, return zero and put that value into variable pointed to by
724 P. */
726 static int
727 target_char_cast (tree cst, char *p)
729 unsigned HOST_WIDE_INT val, hostval;
731 if (TREE_CODE (cst) != INTEGER_CST
732 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
733 return 1;
735 /* Do not care if it fits or not right here. */
736 val = TREE_INT_CST_LOW (cst);
738 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
739 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
741 hostval = val;
742 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
743 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
745 if (val != hostval)
746 return 1;
748 *p = hostval;
749 return 0;
752 /* Similar to save_expr, but assumes that arbitrary code is not executed
753 in between the multiple evaluations. In particular, we assume that a
754 non-addressable local variable will not be modified. */
756 static tree
757 builtin_save_expr (tree exp)
759 if (TREE_CODE (exp) == SSA_NAME
760 || (TREE_ADDRESSABLE (exp) == 0
761 && (TREE_CODE (exp) == PARM_DECL
762 || (VAR_P (exp) && !TREE_STATIC (exp)))))
763 return exp;
765 return save_expr (exp);
768 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
769 times to get the address of either a higher stack frame, or a return
770 address located within it (depending on FNDECL_CODE). */
772 static rtx
773 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
775 int i;
776 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
777 if (tem == NULL_RTX)
779 /* For a zero count with __builtin_return_address, we don't care what
780 frame address we return, because target-specific definitions will
781 override us. Therefore frame pointer elimination is OK, and using
782 the soft frame pointer is OK.
784 For a nonzero count, or a zero count with __builtin_frame_address,
785 we require a stable offset from the current frame pointer to the
786 previous one, so we must use the hard frame pointer, and
787 we must disable frame pointer elimination. */
788 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
789 tem = frame_pointer_rtx;
790 else
792 tem = hard_frame_pointer_rtx;
794 /* Tell reload not to eliminate the frame pointer. */
795 crtl->accesses_prior_frames = 1;
799 if (count > 0)
800 SETUP_FRAME_ADDRESSES ();
802 /* On the SPARC, the return address is not in the frame, it is in a
803 register. There is no way to access it off of the current frame
804 pointer, but it can be accessed off the previous frame pointer by
805 reading the value from the register window save area. */
806 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
807 count--;
809 /* Scan back COUNT frames to the specified frame. */
810 for (i = 0; i < count; i++)
812 /* Assume the dynamic chain pointer is in the word that the
813 frame address points to, unless otherwise specified. */
814 tem = DYNAMIC_CHAIN_ADDRESS (tem);
815 tem = memory_address (Pmode, tem);
816 tem = gen_frame_mem (Pmode, tem);
817 tem = copy_to_reg (tem);
820 /* For __builtin_frame_address, return what we've got. But, on
821 the SPARC for example, we may have to add a bias. */
822 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
823 return FRAME_ADDR_RTX (tem);
825 /* For __builtin_return_address, get the return address from that frame. */
826 #ifdef RETURN_ADDR_RTX
827 tem = RETURN_ADDR_RTX (count, tem);
828 #else
829 tem = memory_address (Pmode,
830 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
831 tem = gen_frame_mem (Pmode, tem);
832 #endif
833 return tem;
836 /* Alias set used for setjmp buffer. */
837 static alias_set_type setjmp_alias_set = -1;
839 /* Construct the leading half of a __builtin_setjmp call. Control will
840 return to RECEIVER_LABEL. This is also called directly by the SJLJ
841 exception handling code. */
843 void
844 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
846 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
847 rtx stack_save;
848 rtx mem;
850 if (setjmp_alias_set == -1)
851 setjmp_alias_set = new_alias_set ();
853 buf_addr = convert_memory_address (Pmode, buf_addr);
855 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
857 /* We store the frame pointer and the address of receiver_label in
858 the buffer and use the rest of it for the stack save area, which
859 is machine-dependent. */
861 mem = gen_rtx_MEM (Pmode, buf_addr);
862 set_mem_alias_set (mem, setjmp_alias_set);
863 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
865 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
866 GET_MODE_SIZE (Pmode))),
867 set_mem_alias_set (mem, setjmp_alias_set);
869 emit_move_insn (validize_mem (mem),
870 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
872 stack_save = gen_rtx_MEM (sa_mode,
873 plus_constant (Pmode, buf_addr,
874 2 * GET_MODE_SIZE (Pmode)));
875 set_mem_alias_set (stack_save, setjmp_alias_set);
876 emit_stack_save (SAVE_NONLOCAL, &stack_save);
878 /* If there is further processing to do, do it. */
879 if (targetm.have_builtin_setjmp_setup ())
880 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
882 /* We have a nonlocal label. */
883 cfun->has_nonlocal_label = 1;
886 /* Construct the trailing part of a __builtin_setjmp call. This is
887 also called directly by the SJLJ exception handling code.
888 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
890 void
891 expand_builtin_setjmp_receiver (rtx receiver_label)
893 rtx chain;
895 /* Mark the FP as used when we get here, so we have to make sure it's
896 marked as used by this function. */
897 emit_use (hard_frame_pointer_rtx);
899 /* Mark the static chain as clobbered here so life information
900 doesn't get messed up for it. */
901 chain = rtx_for_static_chain (current_function_decl, true);
902 if (chain && REG_P (chain))
903 emit_clobber (chain);
905 /* Now put in the code to restore the frame pointer, and argument
906 pointer, if needed. */
907 if (! targetm.have_nonlocal_goto ())
909 /* First adjust our frame pointer to its actual value. It was
910 previously set to the start of the virtual area corresponding to
911 the stacked variables when we branched here and now needs to be
912 adjusted to the actual hardware fp value.
914 Assignments to virtual registers are converted by
915 instantiate_virtual_regs into the corresponding assignment
916 to the underlying register (fp in this case) that makes
917 the original assignment true.
918 So the following insn will actually be decrementing fp by
919 TARGET_STARTING_FRAME_OFFSET. */
920 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
922 /* Restoring the frame pointer also modifies the hard frame pointer.
923 Mark it used (so that the previous assignment remains live once
924 the frame pointer is eliminated) and clobbered (to represent the
925 implicit update from the assignment). */
926 emit_use (hard_frame_pointer_rtx);
927 emit_clobber (hard_frame_pointer_rtx);
930 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
932 /* If the argument pointer can be eliminated in favor of the
933 frame pointer, we don't need to restore it. We assume here
934 that if such an elimination is present, it can always be used.
935 This is the case on all known machines; if we don't make this
936 assumption, we do unnecessary saving on many machines. */
937 size_t i;
938 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
940 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
941 if (elim_regs[i].from == ARG_POINTER_REGNUM
942 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
943 break;
945 if (i == ARRAY_SIZE (elim_regs))
947 /* Now restore our arg pointer from the address at which it
948 was saved in our stack frame. */
949 emit_move_insn (crtl->args.internal_arg_pointer,
950 copy_to_reg (get_arg_pointer_save_area ()));
954 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
955 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
956 else if (targetm.have_nonlocal_goto_receiver ())
957 emit_insn (targetm.gen_nonlocal_goto_receiver ());
958 else
959 { /* Nothing */ }
961 /* We must not allow the code we just generated to be reordered by
962 scheduling. Specifically, the update of the frame pointer must
963 happen immediately, not later. */
964 emit_insn (gen_blockage ());
967 /* __builtin_longjmp is passed a pointer to an array of five words (not
968 all will be used on all machines). It operates similarly to the C
969 library function of the same name, but is more efficient. Much of
970 the code below is copied from the handling of non-local gotos. */
972 static void
973 expand_builtin_longjmp (rtx buf_addr, rtx value)
975 rtx fp, lab, stack;
976 rtx_insn *insn, *last;
977 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
979 /* DRAP is needed for stack realign if longjmp is expanded to current
980 function */
981 if (SUPPORTS_STACK_ALIGNMENT)
982 crtl->need_drap = true;
984 if (setjmp_alias_set == -1)
985 setjmp_alias_set = new_alias_set ();
987 buf_addr = convert_memory_address (Pmode, buf_addr);
989 buf_addr = force_reg (Pmode, buf_addr);
991 /* We require that the user must pass a second argument of 1, because
992 that is what builtin_setjmp will return. */
993 gcc_assert (value == const1_rtx);
995 last = get_last_insn ();
996 if (targetm.have_builtin_longjmp ())
997 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
998 else
1000 fp = gen_rtx_MEM (Pmode, buf_addr);
1001 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1002 GET_MODE_SIZE (Pmode)));
1004 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1005 2 * GET_MODE_SIZE (Pmode)));
1006 set_mem_alias_set (fp, setjmp_alias_set);
1007 set_mem_alias_set (lab, setjmp_alias_set);
1008 set_mem_alias_set (stack, setjmp_alias_set);
1010 /* Pick up FP, label, and SP from the block and jump. This code is
1011 from expand_goto in stmt.c; see there for detailed comments. */
1012 if (targetm.have_nonlocal_goto ())
1013 /* We have to pass a value to the nonlocal_goto pattern that will
1014 get copied into the static_chain pointer, but it does not matter
1015 what that value is, because builtin_setjmp does not use it. */
1016 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1017 else
1019 lab = copy_to_reg (lab);
1021 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1022 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1024 emit_move_insn (hard_frame_pointer_rtx, fp);
1025 emit_stack_restore (SAVE_NONLOCAL, stack);
1027 emit_use (hard_frame_pointer_rtx);
1028 emit_use (stack_pointer_rtx);
1029 emit_indirect_jump (lab);
1033 /* Search backwards and mark the jump insn as a non-local goto.
1034 Note that this precludes the use of __builtin_longjmp to a
1035 __builtin_setjmp target in the same function. However, we've
1036 already cautioned the user that these functions are for
1037 internal exception handling use only. */
1038 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1040 gcc_assert (insn != last);
1042 if (JUMP_P (insn))
1044 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1045 break;
1047 else if (CALL_P (insn))
1048 break;
1052 static inline bool
1053 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1055 return (iter->i < iter->n);
1058 /* This function validates the types of a function call argument list
1059 against a specified list of tree_codes. If the last specifier is a 0,
1060 that represents an ellipsis, otherwise the last specifier must be a
1061 VOID_TYPE. */
1063 static bool
1064 validate_arglist (const_tree callexpr, ...)
1066 enum tree_code code;
1067 bool res = 0;
1068 va_list ap;
1069 const_call_expr_arg_iterator iter;
1070 const_tree arg;
1072 va_start (ap, callexpr);
1073 init_const_call_expr_arg_iterator (callexpr, &iter);
1075 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1076 tree fn = CALL_EXPR_FN (callexpr);
1077 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1079 for (unsigned argno = 1; ; ++argno)
1081 code = (enum tree_code) va_arg (ap, int);
1083 switch (code)
1085 case 0:
1086 /* This signifies an ellipses, any further arguments are all ok. */
1087 res = true;
1088 goto end;
1089 case VOID_TYPE:
1090 /* This signifies an endlink, if no arguments remain, return
1091 true, otherwise return false. */
1092 res = !more_const_call_expr_args_p (&iter);
1093 goto end;
1094 case POINTER_TYPE:
1095 /* The actual argument must be nonnull when either the whole
1096 called function has been declared nonnull, or when the formal
1097 argument corresponding to the actual argument has been. */
1098 if (argmap
1099 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1101 arg = next_const_call_expr_arg (&iter);
1102 if (!validate_arg (arg, code) || integer_zerop (arg))
1103 goto end;
1104 break;
1106 /* FALLTHRU */
1107 default:
1108 /* If no parameters remain or the parameter's code does not
1109 match the specified code, return false. Otherwise continue
1110 checking any remaining arguments. */
1111 arg = next_const_call_expr_arg (&iter);
1112 if (!validate_arg (arg, code))
1113 goto end;
1114 break;
1118 /* We need gotos here since we can only have one VA_CLOSE in a
1119 function. */
1120 end: ;
1121 va_end (ap);
1123 BITMAP_FREE (argmap);
1125 return res;
1128 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1129 and the address of the save area. */
1131 static rtx
1132 expand_builtin_nonlocal_goto (tree exp)
1134 tree t_label, t_save_area;
1135 rtx r_label, r_save_area, r_fp, r_sp;
1136 rtx_insn *insn;
1138 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1139 return NULL_RTX;
1141 t_label = CALL_EXPR_ARG (exp, 0);
1142 t_save_area = CALL_EXPR_ARG (exp, 1);
1144 r_label = expand_normal (t_label);
1145 r_label = convert_memory_address (Pmode, r_label);
1146 r_save_area = expand_normal (t_save_area);
1147 r_save_area = convert_memory_address (Pmode, r_save_area);
1148 /* Copy the address of the save location to a register just in case it was
1149 based on the frame pointer. */
1150 r_save_area = copy_to_reg (r_save_area);
1151 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1152 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1153 plus_constant (Pmode, r_save_area,
1154 GET_MODE_SIZE (Pmode)));
1156 crtl->has_nonlocal_goto = 1;
1158 /* ??? We no longer need to pass the static chain value, afaik. */
1159 if (targetm.have_nonlocal_goto ())
1160 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1161 else
1163 r_label = copy_to_reg (r_label);
1165 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1166 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1168 /* Restore frame pointer for containing function. */
1169 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1170 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1172 /* USE of hard_frame_pointer_rtx added for consistency;
1173 not clear if really needed. */
1174 emit_use (hard_frame_pointer_rtx);
1175 emit_use (stack_pointer_rtx);
1177 /* If the architecture is using a GP register, we must
1178 conservatively assume that the target function makes use of it.
1179 The prologue of functions with nonlocal gotos must therefore
1180 initialize the GP register to the appropriate value, and we
1181 must then make sure that this value is live at the point
1182 of the jump. (Note that this doesn't necessarily apply
1183 to targets with a nonlocal_goto pattern; they are free
1184 to implement it in their own way. Note also that this is
1185 a no-op if the GP register is a global invariant.) */
1186 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1187 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1188 emit_use (pic_offset_table_rtx);
1190 emit_indirect_jump (r_label);
1193 /* Search backwards to the jump insn and mark it as a
1194 non-local goto. */
1195 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1197 if (JUMP_P (insn))
1199 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1200 break;
1202 else if (CALL_P (insn))
1203 break;
1206 return const0_rtx;
1209 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1210 (not all will be used on all machines) that was passed to __builtin_setjmp.
1211 It updates the stack pointer in that block to the current value. This is
1212 also called directly by the SJLJ exception handling code. */
1214 void
1215 expand_builtin_update_setjmp_buf (rtx buf_addr)
1217 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1218 buf_addr = convert_memory_address (Pmode, buf_addr);
1219 rtx stack_save
1220 = gen_rtx_MEM (sa_mode,
1221 memory_address
1222 (sa_mode,
1223 plus_constant (Pmode, buf_addr,
1224 2 * GET_MODE_SIZE (Pmode))));
1226 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1229 /* Expand a call to __builtin_prefetch. For a target that does not support
1230 data prefetch, evaluate the memory address argument in case it has side
1231 effects. */
1233 static void
1234 expand_builtin_prefetch (tree exp)
1236 tree arg0, arg1, arg2;
1237 int nargs;
1238 rtx op0, op1, op2;
1240 if (!validate_arglist (exp, POINTER_TYPE, 0))
1241 return;
1243 arg0 = CALL_EXPR_ARG (exp, 0);
1245 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1246 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1247 locality). */
1248 nargs = call_expr_nargs (exp);
1249 if (nargs > 1)
1250 arg1 = CALL_EXPR_ARG (exp, 1);
1251 else
1252 arg1 = integer_zero_node;
1253 if (nargs > 2)
1254 arg2 = CALL_EXPR_ARG (exp, 2);
1255 else
1256 arg2 = integer_three_node;
1258 /* Argument 0 is an address. */
1259 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1261 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1262 if (TREE_CODE (arg1) != INTEGER_CST)
1264 error ("second argument to %<__builtin_prefetch%> must be a constant");
1265 arg1 = integer_zero_node;
1267 op1 = expand_normal (arg1);
1268 /* Argument 1 must be either zero or one. */
1269 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1271 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1272 " using zero");
1273 op1 = const0_rtx;
1276 /* Argument 2 (locality) must be a compile-time constant int. */
1277 if (TREE_CODE (arg2) != INTEGER_CST)
1279 error ("third argument to %<__builtin_prefetch%> must be a constant");
1280 arg2 = integer_zero_node;
1282 op2 = expand_normal (arg2);
1283 /* Argument 2 must be 0, 1, 2, or 3. */
1284 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1286 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1287 op2 = const0_rtx;
1290 if (targetm.have_prefetch ())
1292 struct expand_operand ops[3];
1294 create_address_operand (&ops[0], op0);
1295 create_integer_operand (&ops[1], INTVAL (op1));
1296 create_integer_operand (&ops[2], INTVAL (op2));
1297 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1298 return;
1301 /* Don't do anything with direct references to volatile memory, but
1302 generate code to handle other side effects. */
1303 if (!MEM_P (op0) && side_effects_p (op0))
1304 emit_insn (op0);
1307 /* Get a MEM rtx for expression EXP which is the address of an operand
1308 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1309 the maximum length of the block of memory that might be accessed or
1310 NULL if unknown. */
1312 static rtx
1313 get_memory_rtx (tree exp, tree len)
1315 tree orig_exp = exp;
1316 rtx addr, mem;
1318 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1319 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1320 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1321 exp = TREE_OPERAND (exp, 0);
1323 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1324 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1326 /* Get an expression we can use to find the attributes to assign to MEM.
1327 First remove any nops. */
1328 while (CONVERT_EXPR_P (exp)
1329 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1330 exp = TREE_OPERAND (exp, 0);
1332 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1333 (as builtin stringops may alias with anything). */
1334 exp = fold_build2 (MEM_REF,
1335 build_array_type (char_type_node,
1336 build_range_type (sizetype,
1337 size_one_node, len)),
1338 exp, build_int_cst (ptr_type_node, 0));
1340 /* If the MEM_REF has no acceptable address, try to get the base object
1341 from the original address we got, and build an all-aliasing
1342 unknown-sized access to that one. */
1343 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1344 set_mem_attributes (mem, exp, 0);
1345 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1346 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1347 0))))
1349 exp = build_fold_addr_expr (exp);
1350 exp = fold_build2 (MEM_REF,
1351 build_array_type (char_type_node,
1352 build_range_type (sizetype,
1353 size_zero_node,
1354 NULL)),
1355 exp, build_int_cst (ptr_type_node, 0));
1356 set_mem_attributes (mem, exp, 0);
1358 set_mem_alias_set (mem, 0);
1359 return mem;
1362 /* Built-in functions to perform an untyped call and return. */
1364 #define apply_args_mode \
1365 (this_target_builtins->x_apply_args_mode)
1366 #define apply_result_mode \
1367 (this_target_builtins->x_apply_result_mode)
1369 /* Return the size required for the block returned by __builtin_apply_args,
1370 and initialize apply_args_mode. */
1372 static int
1373 apply_args_size (void)
1375 static int size = -1;
1376 int align;
1377 unsigned int regno;
1379 /* The values computed by this function never change. */
1380 if (size < 0)
1382 /* The first value is the incoming arg-pointer. */
1383 size = GET_MODE_SIZE (Pmode);
1385 /* The second value is the structure value address unless this is
1386 passed as an "invisible" first argument. */
1387 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1388 size += GET_MODE_SIZE (Pmode);
1390 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1391 if (FUNCTION_ARG_REGNO_P (regno))
1393 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1395 gcc_assert (mode != VOIDmode);
1397 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1398 if (size % align != 0)
1399 size = CEIL (size, align) * align;
1400 size += GET_MODE_SIZE (mode);
1401 apply_args_mode[regno] = mode;
1403 else
1405 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1408 return size;
1411 /* Return the size required for the block returned by __builtin_apply,
1412 and initialize apply_result_mode. */
1414 static int
1415 apply_result_size (void)
1417 static int size = -1;
1418 int align, regno;
1420 /* The values computed by this function never change. */
1421 if (size < 0)
1423 size = 0;
1425 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1426 if (targetm.calls.function_value_regno_p (regno))
1428 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1430 gcc_assert (mode != VOIDmode);
1432 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1433 if (size % align != 0)
1434 size = CEIL (size, align) * align;
1435 size += GET_MODE_SIZE (mode);
1436 apply_result_mode[regno] = mode;
1438 else
1439 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1441 /* Allow targets that use untyped_call and untyped_return to override
1442 the size so that machine-specific information can be stored here. */
1443 #ifdef APPLY_RESULT_SIZE
1444 size = APPLY_RESULT_SIZE;
1445 #endif
1447 return size;
1450 /* Create a vector describing the result block RESULT. If SAVEP is true,
1451 the result block is used to save the values; otherwise it is used to
1452 restore the values. */
1454 static rtx
1455 result_vector (int savep, rtx result)
1457 int regno, size, align, nelts;
1458 fixed_size_mode mode;
1459 rtx reg, mem;
1460 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1462 size = nelts = 0;
1463 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1464 if ((mode = apply_result_mode[regno]) != VOIDmode)
1466 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1467 if (size % align != 0)
1468 size = CEIL (size, align) * align;
1469 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1470 mem = adjust_address (result, mode, size);
1471 savevec[nelts++] = (savep
1472 ? gen_rtx_SET (mem, reg)
1473 : gen_rtx_SET (reg, mem));
1474 size += GET_MODE_SIZE (mode);
1476 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1479 /* Save the state required to perform an untyped call with the same
1480 arguments as were passed to the current function. */
1482 static rtx
1483 expand_builtin_apply_args_1 (void)
1485 rtx registers, tem;
1486 int size, align, regno;
1487 fixed_size_mode mode;
1488 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1490 /* Create a block where the arg-pointer, structure value address,
1491 and argument registers can be saved. */
1492 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1494 /* Walk past the arg-pointer and structure value address. */
1495 size = GET_MODE_SIZE (Pmode);
1496 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1497 size += GET_MODE_SIZE (Pmode);
1499 /* Save each register used in calling a function to the block. */
1500 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1501 if ((mode = apply_args_mode[regno]) != VOIDmode)
1503 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1504 if (size % align != 0)
1505 size = CEIL (size, align) * align;
1507 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1509 emit_move_insn (adjust_address (registers, mode, size), tem);
1510 size += GET_MODE_SIZE (mode);
1513 /* Save the arg pointer to the block. */
1514 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1515 /* We need the pointer as the caller actually passed them to us, not
1516 as we might have pretended they were passed. Make sure it's a valid
1517 operand, as emit_move_insn isn't expected to handle a PLUS. */
1518 if (STACK_GROWS_DOWNWARD)
1520 = force_operand (plus_constant (Pmode, tem,
1521 crtl->args.pretend_args_size),
1522 NULL_RTX);
1523 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1525 size = GET_MODE_SIZE (Pmode);
1527 /* Save the structure value address unless this is passed as an
1528 "invisible" first argument. */
1529 if (struct_incoming_value)
1531 emit_move_insn (adjust_address (registers, Pmode, size),
1532 copy_to_reg (struct_incoming_value));
1533 size += GET_MODE_SIZE (Pmode);
1536 /* Return the address of the block. */
1537 return copy_addr_to_reg (XEXP (registers, 0));
1540 /* __builtin_apply_args returns block of memory allocated on
1541 the stack into which is stored the arg pointer, structure
1542 value address, static chain, and all the registers that might
1543 possibly be used in performing a function call. The code is
1544 moved to the start of the function so the incoming values are
1545 saved. */
1547 static rtx
1548 expand_builtin_apply_args (void)
1550 /* Don't do __builtin_apply_args more than once in a function.
1551 Save the result of the first call and reuse it. */
1552 if (apply_args_value != 0)
1553 return apply_args_value;
1555 /* When this function is called, it means that registers must be
1556 saved on entry to this function. So we migrate the
1557 call to the first insn of this function. */
1558 rtx temp;
1560 start_sequence ();
1561 temp = expand_builtin_apply_args_1 ();
1562 rtx_insn *seq = get_insns ();
1563 end_sequence ();
1565 apply_args_value = temp;
1567 /* Put the insns after the NOTE that starts the function.
1568 If this is inside a start_sequence, make the outer-level insn
1569 chain current, so the code is placed at the start of the
1570 function. If internal_arg_pointer is a non-virtual pseudo,
1571 it needs to be placed after the function that initializes
1572 that pseudo. */
1573 push_topmost_sequence ();
1574 if (REG_P (crtl->args.internal_arg_pointer)
1575 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1576 emit_insn_before (seq, parm_birth_insn);
1577 else
1578 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1579 pop_topmost_sequence ();
1580 return temp;
1584 /* Perform an untyped call and save the state required to perform an
1585 untyped return of whatever value was returned by the given function. */
1587 static rtx
1588 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1590 int size, align, regno;
1591 fixed_size_mode mode;
1592 rtx incoming_args, result, reg, dest, src;
1593 rtx_call_insn *call_insn;
1594 rtx old_stack_level = 0;
1595 rtx call_fusage = 0;
1596 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1598 arguments = convert_memory_address (Pmode, arguments);
1600 /* Create a block where the return registers can be saved. */
1601 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1603 /* Fetch the arg pointer from the ARGUMENTS block. */
1604 incoming_args = gen_reg_rtx (Pmode);
1605 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1606 if (!STACK_GROWS_DOWNWARD)
1607 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1608 incoming_args, 0, OPTAB_LIB_WIDEN);
1610 /* Push a new argument block and copy the arguments. Do not allow
1611 the (potential) memcpy call below to interfere with our stack
1612 manipulations. */
1613 do_pending_stack_adjust ();
1614 NO_DEFER_POP;
1616 /* Save the stack with nonlocal if available. */
1617 if (targetm.have_save_stack_nonlocal ())
1618 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1619 else
1620 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1622 /* Allocate a block of memory onto the stack and copy the memory
1623 arguments to the outgoing arguments address. We can pass TRUE
1624 as the 4th argument because we just saved the stack pointer
1625 and will restore it right after the call. */
1626 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1628 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1629 may have already set current_function_calls_alloca to true.
1630 current_function_calls_alloca won't be set if argsize is zero,
1631 so we have to guarantee need_drap is true here. */
1632 if (SUPPORTS_STACK_ALIGNMENT)
1633 crtl->need_drap = true;
1635 dest = virtual_outgoing_args_rtx;
1636 if (!STACK_GROWS_DOWNWARD)
1638 if (CONST_INT_P (argsize))
1639 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1640 else
1641 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1643 dest = gen_rtx_MEM (BLKmode, dest);
1644 set_mem_align (dest, PARM_BOUNDARY);
1645 src = gen_rtx_MEM (BLKmode, incoming_args);
1646 set_mem_align (src, PARM_BOUNDARY);
1647 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1649 /* Refer to the argument block. */
1650 apply_args_size ();
1651 arguments = gen_rtx_MEM (BLKmode, arguments);
1652 set_mem_align (arguments, PARM_BOUNDARY);
1654 /* Walk past the arg-pointer and structure value address. */
1655 size = GET_MODE_SIZE (Pmode);
1656 if (struct_value)
1657 size += GET_MODE_SIZE (Pmode);
1659 /* Restore each of the registers previously saved. Make USE insns
1660 for each of these registers for use in making the call. */
1661 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1662 if ((mode = apply_args_mode[regno]) != VOIDmode)
1664 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1665 if (size % align != 0)
1666 size = CEIL (size, align) * align;
1667 reg = gen_rtx_REG (mode, regno);
1668 emit_move_insn (reg, adjust_address (arguments, mode, size));
1669 use_reg (&call_fusage, reg);
1670 size += GET_MODE_SIZE (mode);
1673 /* Restore the structure value address unless this is passed as an
1674 "invisible" first argument. */
1675 size = GET_MODE_SIZE (Pmode);
1676 if (struct_value)
1678 rtx value = gen_reg_rtx (Pmode);
1679 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1680 emit_move_insn (struct_value, value);
1681 if (REG_P (struct_value))
1682 use_reg (&call_fusage, struct_value);
1683 size += GET_MODE_SIZE (Pmode);
1686 /* All arguments and registers used for the call are set up by now! */
1687 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1689 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1690 and we don't want to load it into a register as an optimization,
1691 because prepare_call_address already did it if it should be done. */
1692 if (GET_CODE (function) != SYMBOL_REF)
1693 function = memory_address (FUNCTION_MODE, function);
1695 /* Generate the actual call instruction and save the return value. */
1696 if (targetm.have_untyped_call ())
1698 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1699 emit_call_insn (targetm.gen_untyped_call (mem, result,
1700 result_vector (1, result)));
1702 else if (targetm.have_call_value ())
1704 rtx valreg = 0;
1706 /* Locate the unique return register. It is not possible to
1707 express a call that sets more than one return register using
1708 call_value; use untyped_call for that. In fact, untyped_call
1709 only needs to save the return registers in the given block. */
1710 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1711 if ((mode = apply_result_mode[regno]) != VOIDmode)
1713 gcc_assert (!valreg); /* have_untyped_call required. */
1715 valreg = gen_rtx_REG (mode, regno);
1718 emit_insn (targetm.gen_call_value (valreg,
1719 gen_rtx_MEM (FUNCTION_MODE, function),
1720 const0_rtx, NULL_RTX, const0_rtx));
1722 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1724 else
1725 gcc_unreachable ();
1727 /* Find the CALL insn we just emitted, and attach the register usage
1728 information. */
1729 call_insn = last_call_insn ();
1730 add_function_usage_to (call_insn, call_fusage);
1732 /* Restore the stack. */
1733 if (targetm.have_save_stack_nonlocal ())
1734 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1735 else
1736 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1737 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1739 OK_DEFER_POP;
1741 /* Return the address of the result block. */
1742 result = copy_addr_to_reg (XEXP (result, 0));
1743 return convert_memory_address (ptr_mode, result);
1746 /* Perform an untyped return. */
1748 static void
1749 expand_builtin_return (rtx result)
1751 int size, align, regno;
1752 fixed_size_mode mode;
1753 rtx reg;
1754 rtx_insn *call_fusage = 0;
1756 result = convert_memory_address (Pmode, result);
1758 apply_result_size ();
1759 result = gen_rtx_MEM (BLKmode, result);
1761 if (targetm.have_untyped_return ())
1763 rtx vector = result_vector (0, result);
1764 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1765 emit_barrier ();
1766 return;
1769 /* Restore the return value and note that each value is used. */
1770 size = 0;
1771 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1772 if ((mode = apply_result_mode[regno]) != VOIDmode)
1774 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1775 if (size % align != 0)
1776 size = CEIL (size, align) * align;
1777 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1778 emit_move_insn (reg, adjust_address (result, mode, size));
1780 push_to_sequence (call_fusage);
1781 emit_use (reg);
1782 call_fusage = get_insns ();
1783 end_sequence ();
1784 size += GET_MODE_SIZE (mode);
1787 /* Put the USE insns before the return. */
1788 emit_insn (call_fusage);
1790 /* Return whatever values was restored by jumping directly to the end
1791 of the function. */
1792 expand_naked_return ();
1795 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1797 static enum type_class
1798 type_to_class (tree type)
1800 switch (TREE_CODE (type))
1802 case VOID_TYPE: return void_type_class;
1803 case INTEGER_TYPE: return integer_type_class;
1804 case ENUMERAL_TYPE: return enumeral_type_class;
1805 case BOOLEAN_TYPE: return boolean_type_class;
1806 case POINTER_TYPE: return pointer_type_class;
1807 case REFERENCE_TYPE: return reference_type_class;
1808 case OFFSET_TYPE: return offset_type_class;
1809 case REAL_TYPE: return real_type_class;
1810 case COMPLEX_TYPE: return complex_type_class;
1811 case FUNCTION_TYPE: return function_type_class;
1812 case METHOD_TYPE: return method_type_class;
1813 case RECORD_TYPE: return record_type_class;
1814 case UNION_TYPE:
1815 case QUAL_UNION_TYPE: return union_type_class;
1816 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1817 ? string_type_class : array_type_class);
1818 case LANG_TYPE: return lang_type_class;
1819 default: return no_type_class;
1823 /* Expand a call EXP to __builtin_classify_type. */
1825 static rtx
1826 expand_builtin_classify_type (tree exp)
1828 if (call_expr_nargs (exp))
1829 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1830 return GEN_INT (no_type_class);
1833 /* This helper macro, meant to be used in mathfn_built_in below, determines
1834 which among a set of builtin math functions is appropriate for a given type
1835 mode. The `F' (float) and `L' (long double) are automatically generated
1836 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1837 types, there are additional types that are considered with 'F32', 'F64',
1838 'F128', etc. suffixes. */
1839 #define CASE_MATHFN(MATHFN) \
1840 CASE_CFN_##MATHFN: \
1841 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1842 fcodel = BUILT_IN_##MATHFN##L ; break;
1843 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1844 types. */
1845 #define CASE_MATHFN_FLOATN(MATHFN) \
1846 CASE_CFN_##MATHFN: \
1847 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1848 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1849 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1850 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1851 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1852 break;
1853 /* Similar to above, but appends _R after any F/L suffix. */
1854 #define CASE_MATHFN_REENT(MATHFN) \
1855 case CFN_BUILT_IN_##MATHFN##_R: \
1856 case CFN_BUILT_IN_##MATHFN##F_R: \
1857 case CFN_BUILT_IN_##MATHFN##L_R: \
1858 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1859 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1861 /* Return a function equivalent to FN but operating on floating-point
1862 values of type TYPE, or END_BUILTINS if no such function exists.
1863 This is purely an operation on function codes; it does not guarantee
1864 that the target actually has an implementation of the function. */
1866 static built_in_function
1867 mathfn_built_in_2 (tree type, combined_fn fn)
1869 tree mtype;
1870 built_in_function fcode, fcodef, fcodel;
1871 built_in_function fcodef16 = END_BUILTINS;
1872 built_in_function fcodef32 = END_BUILTINS;
1873 built_in_function fcodef64 = END_BUILTINS;
1874 built_in_function fcodef128 = END_BUILTINS;
1875 built_in_function fcodef32x = END_BUILTINS;
1876 built_in_function fcodef64x = END_BUILTINS;
1877 built_in_function fcodef128x = END_BUILTINS;
1879 switch (fn)
1881 CASE_MATHFN (ACOS)
1882 CASE_MATHFN (ACOSH)
1883 CASE_MATHFN (ASIN)
1884 CASE_MATHFN (ASINH)
1885 CASE_MATHFN (ATAN)
1886 CASE_MATHFN (ATAN2)
1887 CASE_MATHFN (ATANH)
1888 CASE_MATHFN (CBRT)
1889 CASE_MATHFN_FLOATN (CEIL)
1890 CASE_MATHFN (CEXPI)
1891 CASE_MATHFN_FLOATN (COPYSIGN)
1892 CASE_MATHFN (COS)
1893 CASE_MATHFN (COSH)
1894 CASE_MATHFN (DREM)
1895 CASE_MATHFN (ERF)
1896 CASE_MATHFN (ERFC)
1897 CASE_MATHFN (EXP)
1898 CASE_MATHFN (EXP10)
1899 CASE_MATHFN (EXP2)
1900 CASE_MATHFN (EXPM1)
1901 CASE_MATHFN (FABS)
1902 CASE_MATHFN (FDIM)
1903 CASE_MATHFN_FLOATN (FLOOR)
1904 CASE_MATHFN_FLOATN (FMA)
1905 CASE_MATHFN_FLOATN (FMAX)
1906 CASE_MATHFN_FLOATN (FMIN)
1907 CASE_MATHFN (FMOD)
1908 CASE_MATHFN (FREXP)
1909 CASE_MATHFN (GAMMA)
1910 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1911 CASE_MATHFN (HUGE_VAL)
1912 CASE_MATHFN (HYPOT)
1913 CASE_MATHFN (ILOGB)
1914 CASE_MATHFN (ICEIL)
1915 CASE_MATHFN (IFLOOR)
1916 CASE_MATHFN (INF)
1917 CASE_MATHFN (IRINT)
1918 CASE_MATHFN (IROUND)
1919 CASE_MATHFN (ISINF)
1920 CASE_MATHFN (J0)
1921 CASE_MATHFN (J1)
1922 CASE_MATHFN (JN)
1923 CASE_MATHFN (LCEIL)
1924 CASE_MATHFN (LDEXP)
1925 CASE_MATHFN (LFLOOR)
1926 CASE_MATHFN (LGAMMA)
1927 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1928 CASE_MATHFN (LLCEIL)
1929 CASE_MATHFN (LLFLOOR)
1930 CASE_MATHFN (LLRINT)
1931 CASE_MATHFN (LLROUND)
1932 CASE_MATHFN (LOG)
1933 CASE_MATHFN (LOG10)
1934 CASE_MATHFN (LOG1P)
1935 CASE_MATHFN (LOG2)
1936 CASE_MATHFN (LOGB)
1937 CASE_MATHFN (LRINT)
1938 CASE_MATHFN (LROUND)
1939 CASE_MATHFN (MODF)
1940 CASE_MATHFN (NAN)
1941 CASE_MATHFN (NANS)
1942 CASE_MATHFN_FLOATN (NEARBYINT)
1943 CASE_MATHFN (NEXTAFTER)
1944 CASE_MATHFN (NEXTTOWARD)
1945 CASE_MATHFN (POW)
1946 CASE_MATHFN (POWI)
1947 CASE_MATHFN (POW10)
1948 CASE_MATHFN (REMAINDER)
1949 CASE_MATHFN (REMQUO)
1950 CASE_MATHFN_FLOATN (RINT)
1951 CASE_MATHFN_FLOATN (ROUND)
1952 CASE_MATHFN (SCALB)
1953 CASE_MATHFN (SCALBLN)
1954 CASE_MATHFN (SCALBN)
1955 CASE_MATHFN (SIGNBIT)
1956 CASE_MATHFN (SIGNIFICAND)
1957 CASE_MATHFN (SIN)
1958 CASE_MATHFN (SINCOS)
1959 CASE_MATHFN (SINH)
1960 CASE_MATHFN_FLOATN (SQRT)
1961 CASE_MATHFN (TAN)
1962 CASE_MATHFN (TANH)
1963 CASE_MATHFN (TGAMMA)
1964 CASE_MATHFN_FLOATN (TRUNC)
1965 CASE_MATHFN (Y0)
1966 CASE_MATHFN (Y1)
1967 CASE_MATHFN (YN)
1969 default:
1970 return END_BUILTINS;
1973 mtype = TYPE_MAIN_VARIANT (type);
1974 if (mtype == double_type_node)
1975 return fcode;
1976 else if (mtype == float_type_node)
1977 return fcodef;
1978 else if (mtype == long_double_type_node)
1979 return fcodel;
1980 else if (mtype == float16_type_node)
1981 return fcodef16;
1982 else if (mtype == float32_type_node)
1983 return fcodef32;
1984 else if (mtype == float64_type_node)
1985 return fcodef64;
1986 else if (mtype == float128_type_node)
1987 return fcodef128;
1988 else if (mtype == float32x_type_node)
1989 return fcodef32x;
1990 else if (mtype == float64x_type_node)
1991 return fcodef64x;
1992 else if (mtype == float128x_type_node)
1993 return fcodef128x;
1994 else
1995 return END_BUILTINS;
1998 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1999 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2000 otherwise use the explicit declaration. If we can't do the conversion,
2001 return null. */
2003 static tree
2004 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2006 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2007 if (fcode2 == END_BUILTINS)
2008 return NULL_TREE;
2010 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2011 return NULL_TREE;
2013 return builtin_decl_explicit (fcode2);
2016 /* Like mathfn_built_in_1, but always use the implicit array. */
2018 tree
2019 mathfn_built_in (tree type, combined_fn fn)
2021 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2024 /* Like mathfn_built_in_1, but take a built_in_function and
2025 always use the implicit array. */
2027 tree
2028 mathfn_built_in (tree type, enum built_in_function fn)
2030 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2033 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2034 return its code, otherwise return IFN_LAST. Note that this function
2035 only tests whether the function is defined in internals.def, not whether
2036 it is actually available on the target. */
2038 internal_fn
2039 associated_internal_fn (tree fndecl)
2041 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2042 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2043 switch (DECL_FUNCTION_CODE (fndecl))
2045 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2046 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2047 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2048 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2049 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2050 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2051 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2052 #include "internal-fn.def"
2054 CASE_FLT_FN (BUILT_IN_POW10):
2055 return IFN_EXP10;
2057 CASE_FLT_FN (BUILT_IN_DREM):
2058 return IFN_REMAINDER;
2060 CASE_FLT_FN (BUILT_IN_SCALBN):
2061 CASE_FLT_FN (BUILT_IN_SCALBLN):
2062 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2063 return IFN_LDEXP;
2064 return IFN_LAST;
2066 default:
2067 return IFN_LAST;
2071 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2072 on the current target by a call to an internal function, return the
2073 code of that internal function, otherwise return IFN_LAST. The caller
2074 is responsible for ensuring that any side-effects of the built-in
2075 call are dealt with correctly. E.g. if CALL sets errno, the caller
2076 must decide that the errno result isn't needed or make it available
2077 in some other way. */
2079 internal_fn
2080 replacement_internal_fn (gcall *call)
2082 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2084 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2085 if (ifn != IFN_LAST)
2087 tree_pair types = direct_internal_fn_types (ifn, call);
2088 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2089 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2090 return ifn;
2093 return IFN_LAST;
2096 /* Expand a call to the builtin trinary math functions (fma).
2097 Return NULL_RTX if a normal call should be emitted rather than expanding the
2098 function in-line. EXP is the expression that is a call to the builtin
2099 function; if convenient, the result should be placed in TARGET.
2100 SUBTARGET may be used as the target for computing one of EXP's
2101 operands. */
2103 static rtx
2104 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2106 optab builtin_optab;
2107 rtx op0, op1, op2, result;
2108 rtx_insn *insns;
2109 tree fndecl = get_callee_fndecl (exp);
2110 tree arg0, arg1, arg2;
2111 machine_mode mode;
2113 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2114 return NULL_RTX;
2116 arg0 = CALL_EXPR_ARG (exp, 0);
2117 arg1 = CALL_EXPR_ARG (exp, 1);
2118 arg2 = CALL_EXPR_ARG (exp, 2);
2120 switch (DECL_FUNCTION_CODE (fndecl))
2122 CASE_FLT_FN (BUILT_IN_FMA):
2123 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2124 builtin_optab = fma_optab; break;
2125 default:
2126 gcc_unreachable ();
2129 /* Make a suitable register to place result in. */
2130 mode = TYPE_MODE (TREE_TYPE (exp));
2132 /* Before working hard, check whether the instruction is available. */
2133 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2134 return NULL_RTX;
2136 result = gen_reg_rtx (mode);
2138 /* Always stabilize the argument list. */
2139 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2140 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2141 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2143 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2144 op1 = expand_normal (arg1);
2145 op2 = expand_normal (arg2);
2147 start_sequence ();
2149 /* Compute into RESULT.
2150 Set RESULT to wherever the result comes back. */
2151 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2152 result, 0);
2154 /* If we were unable to expand via the builtin, stop the sequence
2155 (without outputting the insns) and call to the library function
2156 with the stabilized argument list. */
2157 if (result == 0)
2159 end_sequence ();
2160 return expand_call (exp, target, target == const0_rtx);
2163 /* Output the entire sequence. */
2164 insns = get_insns ();
2165 end_sequence ();
2166 emit_insn (insns);
2168 return result;
2171 /* Expand a call to the builtin sin and cos math functions.
2172 Return NULL_RTX if a normal call should be emitted rather than expanding the
2173 function in-line. EXP is the expression that is a call to the builtin
2174 function; if convenient, the result should be placed in TARGET.
2175 SUBTARGET may be used as the target for computing one of EXP's
2176 operands. */
2178 static rtx
2179 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2181 optab builtin_optab;
2182 rtx op0;
2183 rtx_insn *insns;
2184 tree fndecl = get_callee_fndecl (exp);
2185 machine_mode mode;
2186 tree arg;
2188 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2189 return NULL_RTX;
2191 arg = CALL_EXPR_ARG (exp, 0);
2193 switch (DECL_FUNCTION_CODE (fndecl))
2195 CASE_FLT_FN (BUILT_IN_SIN):
2196 CASE_FLT_FN (BUILT_IN_COS):
2197 builtin_optab = sincos_optab; break;
2198 default:
2199 gcc_unreachable ();
2202 /* Make a suitable register to place result in. */
2203 mode = TYPE_MODE (TREE_TYPE (exp));
2205 /* Check if sincos insn is available, otherwise fallback
2206 to sin or cos insn. */
2207 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2208 switch (DECL_FUNCTION_CODE (fndecl))
2210 CASE_FLT_FN (BUILT_IN_SIN):
2211 builtin_optab = sin_optab; break;
2212 CASE_FLT_FN (BUILT_IN_COS):
2213 builtin_optab = cos_optab; break;
2214 default:
2215 gcc_unreachable ();
2218 /* Before working hard, check whether the instruction is available. */
2219 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2221 rtx result = gen_reg_rtx (mode);
2223 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2224 need to expand the argument again. This way, we will not perform
2225 side-effects more the once. */
2226 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2228 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2230 start_sequence ();
2232 /* Compute into RESULT.
2233 Set RESULT to wherever the result comes back. */
2234 if (builtin_optab == sincos_optab)
2236 int ok;
2238 switch (DECL_FUNCTION_CODE (fndecl))
2240 CASE_FLT_FN (BUILT_IN_SIN):
2241 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2242 break;
2243 CASE_FLT_FN (BUILT_IN_COS):
2244 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2245 break;
2246 default:
2247 gcc_unreachable ();
2249 gcc_assert (ok);
2251 else
2252 result = expand_unop (mode, builtin_optab, op0, result, 0);
2254 if (result != 0)
2256 /* Output the entire sequence. */
2257 insns = get_insns ();
2258 end_sequence ();
2259 emit_insn (insns);
2260 return result;
2263 /* If we were unable to expand via the builtin, stop the sequence
2264 (without outputting the insns) and call to the library function
2265 with the stabilized argument list. */
2266 end_sequence ();
2269 return expand_call (exp, target, target == const0_rtx);
2272 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2273 return an RTL instruction code that implements the functionality.
2274 If that isn't possible or available return CODE_FOR_nothing. */
2276 static enum insn_code
2277 interclass_mathfn_icode (tree arg, tree fndecl)
2279 bool errno_set = false;
2280 optab builtin_optab = unknown_optab;
2281 machine_mode mode;
2283 switch (DECL_FUNCTION_CODE (fndecl))
2285 CASE_FLT_FN (BUILT_IN_ILOGB):
2286 errno_set = true; builtin_optab = ilogb_optab; break;
2287 CASE_FLT_FN (BUILT_IN_ISINF):
2288 builtin_optab = isinf_optab; break;
2289 case BUILT_IN_ISNORMAL:
2290 case BUILT_IN_ISFINITE:
2291 CASE_FLT_FN (BUILT_IN_FINITE):
2292 case BUILT_IN_FINITED32:
2293 case BUILT_IN_FINITED64:
2294 case BUILT_IN_FINITED128:
2295 case BUILT_IN_ISINFD32:
2296 case BUILT_IN_ISINFD64:
2297 case BUILT_IN_ISINFD128:
2298 /* These builtins have no optabs (yet). */
2299 break;
2300 default:
2301 gcc_unreachable ();
2304 /* There's no easy way to detect the case we need to set EDOM. */
2305 if (flag_errno_math && errno_set)
2306 return CODE_FOR_nothing;
2308 /* Optab mode depends on the mode of the input argument. */
2309 mode = TYPE_MODE (TREE_TYPE (arg));
2311 if (builtin_optab)
2312 return optab_handler (builtin_optab, mode);
2313 return CODE_FOR_nothing;
2316 /* Expand a call to one of the builtin math functions that operate on
2317 floating point argument and output an integer result (ilogb, isinf,
2318 isnan, etc).
2319 Return 0 if a normal call should be emitted rather than expanding the
2320 function in-line. EXP is the expression that is a call to the builtin
2321 function; if convenient, the result should be placed in TARGET. */
2323 static rtx
2324 expand_builtin_interclass_mathfn (tree exp, rtx target)
2326 enum insn_code icode = CODE_FOR_nothing;
2327 rtx op0;
2328 tree fndecl = get_callee_fndecl (exp);
2329 machine_mode mode;
2330 tree arg;
2332 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2333 return NULL_RTX;
2335 arg = CALL_EXPR_ARG (exp, 0);
2336 icode = interclass_mathfn_icode (arg, fndecl);
2337 mode = TYPE_MODE (TREE_TYPE (arg));
2339 if (icode != CODE_FOR_nothing)
2341 struct expand_operand ops[1];
2342 rtx_insn *last = get_last_insn ();
2343 tree orig_arg = arg;
2345 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2346 need to expand the argument again. This way, we will not perform
2347 side-effects more the once. */
2348 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2350 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2352 if (mode != GET_MODE (op0))
2353 op0 = convert_to_mode (mode, op0, 0);
2355 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2356 if (maybe_legitimize_operands (icode, 0, 1, ops)
2357 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2358 return ops[0].value;
2360 delete_insns_since (last);
2361 CALL_EXPR_ARG (exp, 0) = orig_arg;
2364 return NULL_RTX;
2367 /* Expand a call to the builtin sincos math function.
2368 Return NULL_RTX if a normal call should be emitted rather than expanding the
2369 function in-line. EXP is the expression that is a call to the builtin
2370 function. */
2372 static rtx
2373 expand_builtin_sincos (tree exp)
2375 rtx op0, op1, op2, target1, target2;
2376 machine_mode mode;
2377 tree arg, sinp, cosp;
2378 int result;
2379 location_t loc = EXPR_LOCATION (exp);
2380 tree alias_type, alias_off;
2382 if (!validate_arglist (exp, REAL_TYPE,
2383 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2384 return NULL_RTX;
2386 arg = CALL_EXPR_ARG (exp, 0);
2387 sinp = CALL_EXPR_ARG (exp, 1);
2388 cosp = CALL_EXPR_ARG (exp, 2);
2390 /* Make a suitable register to place result in. */
2391 mode = TYPE_MODE (TREE_TYPE (arg));
2393 /* Check if sincos insn is available, otherwise emit the call. */
2394 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2395 return NULL_RTX;
2397 target1 = gen_reg_rtx (mode);
2398 target2 = gen_reg_rtx (mode);
2400 op0 = expand_normal (arg);
2401 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2402 alias_off = build_int_cst (alias_type, 0);
2403 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2404 sinp, alias_off));
2405 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2406 cosp, alias_off));
2408 /* Compute into target1 and target2.
2409 Set TARGET to wherever the result comes back. */
2410 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2411 gcc_assert (result);
2413 /* Move target1 and target2 to the memory locations indicated
2414 by op1 and op2. */
2415 emit_move_insn (op1, target1);
2416 emit_move_insn (op2, target2);
2418 return const0_rtx;
2421 /* Expand a call to the internal cexpi builtin to the sincos math function.
2422 EXP is the expression that is a call to the builtin function; if convenient,
2423 the result should be placed in TARGET. */
2425 static rtx
2426 expand_builtin_cexpi (tree exp, rtx target)
2428 tree fndecl = get_callee_fndecl (exp);
2429 tree arg, type;
2430 machine_mode mode;
2431 rtx op0, op1, op2;
2432 location_t loc = EXPR_LOCATION (exp);
2434 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2435 return NULL_RTX;
2437 arg = CALL_EXPR_ARG (exp, 0);
2438 type = TREE_TYPE (arg);
2439 mode = TYPE_MODE (TREE_TYPE (arg));
2441 /* Try expanding via a sincos optab, fall back to emitting a libcall
2442 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2443 is only generated from sincos, cexp or if we have either of them. */
2444 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2446 op1 = gen_reg_rtx (mode);
2447 op2 = gen_reg_rtx (mode);
2449 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2451 /* Compute into op1 and op2. */
2452 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2454 else if (targetm.libc_has_function (function_sincos))
2456 tree call, fn = NULL_TREE;
2457 tree top1, top2;
2458 rtx op1a, op2a;
2460 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2461 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2462 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2463 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2464 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2465 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2466 else
2467 gcc_unreachable ();
2469 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2470 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2471 op1a = copy_addr_to_reg (XEXP (op1, 0));
2472 op2a = copy_addr_to_reg (XEXP (op2, 0));
2473 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2474 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2476 /* Make sure not to fold the sincos call again. */
2477 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2478 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2479 call, 3, arg, top1, top2));
2481 else
2483 tree call, fn = NULL_TREE, narg;
2484 tree ctype = build_complex_type (type);
2486 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2487 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2488 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2489 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2490 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2491 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2492 else
2493 gcc_unreachable ();
2495 /* If we don't have a decl for cexp create one. This is the
2496 friendliest fallback if the user calls __builtin_cexpi
2497 without full target C99 function support. */
2498 if (fn == NULL_TREE)
2500 tree fntype;
2501 const char *name = NULL;
2503 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2504 name = "cexpf";
2505 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2506 name = "cexp";
2507 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2508 name = "cexpl";
2510 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2511 fn = build_fn_decl (name, fntype);
2514 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2515 build_real (type, dconst0), arg);
2517 /* Make sure not to fold the cexp call again. */
2518 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2519 return expand_expr (build_call_nary (ctype, call, 1, narg),
2520 target, VOIDmode, EXPAND_NORMAL);
2523 /* Now build the proper return type. */
2524 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2525 make_tree (TREE_TYPE (arg), op2),
2526 make_tree (TREE_TYPE (arg), op1)),
2527 target, VOIDmode, EXPAND_NORMAL);
2530 /* Conveniently construct a function call expression. FNDECL names the
2531 function to be called, N is the number of arguments, and the "..."
2532 parameters are the argument expressions. Unlike build_call_exr
2533 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2535 static tree
2536 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2538 va_list ap;
2539 tree fntype = TREE_TYPE (fndecl);
2540 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2542 va_start (ap, n);
2543 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2544 va_end (ap);
2545 SET_EXPR_LOCATION (fn, loc);
2546 return fn;
2549 /* Expand a call to one of the builtin rounding functions gcc defines
2550 as an extension (lfloor and lceil). As these are gcc extensions we
2551 do not need to worry about setting errno to EDOM.
2552 If expanding via optab fails, lower expression to (int)(floor(x)).
2553 EXP is the expression that is a call to the builtin function;
2554 if convenient, the result should be placed in TARGET. */
2556 static rtx
2557 expand_builtin_int_roundingfn (tree exp, rtx target)
2559 convert_optab builtin_optab;
2560 rtx op0, tmp;
2561 rtx_insn *insns;
2562 tree fndecl = get_callee_fndecl (exp);
2563 enum built_in_function fallback_fn;
2564 tree fallback_fndecl;
2565 machine_mode mode;
2566 tree arg;
2568 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2569 gcc_unreachable ();
2571 arg = CALL_EXPR_ARG (exp, 0);
2573 switch (DECL_FUNCTION_CODE (fndecl))
2575 CASE_FLT_FN (BUILT_IN_ICEIL):
2576 CASE_FLT_FN (BUILT_IN_LCEIL):
2577 CASE_FLT_FN (BUILT_IN_LLCEIL):
2578 builtin_optab = lceil_optab;
2579 fallback_fn = BUILT_IN_CEIL;
2580 break;
2582 CASE_FLT_FN (BUILT_IN_IFLOOR):
2583 CASE_FLT_FN (BUILT_IN_LFLOOR):
2584 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2585 builtin_optab = lfloor_optab;
2586 fallback_fn = BUILT_IN_FLOOR;
2587 break;
2589 default:
2590 gcc_unreachable ();
2593 /* Make a suitable register to place result in. */
2594 mode = TYPE_MODE (TREE_TYPE (exp));
2596 target = gen_reg_rtx (mode);
2598 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2599 need to expand the argument again. This way, we will not perform
2600 side-effects more the once. */
2601 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2603 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2605 start_sequence ();
2607 /* Compute into TARGET. */
2608 if (expand_sfix_optab (target, op0, builtin_optab))
2610 /* Output the entire sequence. */
2611 insns = get_insns ();
2612 end_sequence ();
2613 emit_insn (insns);
2614 return target;
2617 /* If we were unable to expand via the builtin, stop the sequence
2618 (without outputting the insns). */
2619 end_sequence ();
2621 /* Fall back to floating point rounding optab. */
2622 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2624 /* For non-C99 targets we may end up without a fallback fndecl here
2625 if the user called __builtin_lfloor directly. In this case emit
2626 a call to the floor/ceil variants nevertheless. This should result
2627 in the best user experience for not full C99 targets. */
2628 if (fallback_fndecl == NULL_TREE)
2630 tree fntype;
2631 const char *name = NULL;
2633 switch (DECL_FUNCTION_CODE (fndecl))
2635 case BUILT_IN_ICEIL:
2636 case BUILT_IN_LCEIL:
2637 case BUILT_IN_LLCEIL:
2638 name = "ceil";
2639 break;
2640 case BUILT_IN_ICEILF:
2641 case BUILT_IN_LCEILF:
2642 case BUILT_IN_LLCEILF:
2643 name = "ceilf";
2644 break;
2645 case BUILT_IN_ICEILL:
2646 case BUILT_IN_LCEILL:
2647 case BUILT_IN_LLCEILL:
2648 name = "ceill";
2649 break;
2650 case BUILT_IN_IFLOOR:
2651 case BUILT_IN_LFLOOR:
2652 case BUILT_IN_LLFLOOR:
2653 name = "floor";
2654 break;
2655 case BUILT_IN_IFLOORF:
2656 case BUILT_IN_LFLOORF:
2657 case BUILT_IN_LLFLOORF:
2658 name = "floorf";
2659 break;
2660 case BUILT_IN_IFLOORL:
2661 case BUILT_IN_LFLOORL:
2662 case BUILT_IN_LLFLOORL:
2663 name = "floorl";
2664 break;
2665 default:
2666 gcc_unreachable ();
2669 fntype = build_function_type_list (TREE_TYPE (arg),
2670 TREE_TYPE (arg), NULL_TREE);
2671 fallback_fndecl = build_fn_decl (name, fntype);
2674 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2676 tmp = expand_normal (exp);
2677 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2679 /* Truncate the result of floating point optab to integer
2680 via expand_fix (). */
2681 target = gen_reg_rtx (mode);
2682 expand_fix (target, tmp, 0);
2684 return target;
2687 /* Expand a call to one of the builtin math functions doing integer
2688 conversion (lrint).
2689 Return 0 if a normal call should be emitted rather than expanding the
2690 function in-line. EXP is the expression that is a call to the builtin
2691 function; if convenient, the result should be placed in TARGET. */
2693 static rtx
2694 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2696 convert_optab builtin_optab;
2697 rtx op0;
2698 rtx_insn *insns;
2699 tree fndecl = get_callee_fndecl (exp);
2700 tree arg;
2701 machine_mode mode;
2702 enum built_in_function fallback_fn = BUILT_IN_NONE;
2704 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2705 gcc_unreachable ();
2707 arg = CALL_EXPR_ARG (exp, 0);
2709 switch (DECL_FUNCTION_CODE (fndecl))
2711 CASE_FLT_FN (BUILT_IN_IRINT):
2712 fallback_fn = BUILT_IN_LRINT;
2713 gcc_fallthrough ();
2714 CASE_FLT_FN (BUILT_IN_LRINT):
2715 CASE_FLT_FN (BUILT_IN_LLRINT):
2716 builtin_optab = lrint_optab;
2717 break;
2719 CASE_FLT_FN (BUILT_IN_IROUND):
2720 fallback_fn = BUILT_IN_LROUND;
2721 gcc_fallthrough ();
2722 CASE_FLT_FN (BUILT_IN_LROUND):
2723 CASE_FLT_FN (BUILT_IN_LLROUND):
2724 builtin_optab = lround_optab;
2725 break;
2727 default:
2728 gcc_unreachable ();
2731 /* There's no easy way to detect the case we need to set EDOM. */
2732 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2733 return NULL_RTX;
2735 /* Make a suitable register to place result in. */
2736 mode = TYPE_MODE (TREE_TYPE (exp));
2738 /* There's no easy way to detect the case we need to set EDOM. */
2739 if (!flag_errno_math)
2741 rtx result = gen_reg_rtx (mode);
2743 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2744 need to expand the argument again. This way, we will not perform
2745 side-effects more the once. */
2746 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2748 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2750 start_sequence ();
2752 if (expand_sfix_optab (result, op0, builtin_optab))
2754 /* Output the entire sequence. */
2755 insns = get_insns ();
2756 end_sequence ();
2757 emit_insn (insns);
2758 return result;
2761 /* If we were unable to expand via the builtin, stop the sequence
2762 (without outputting the insns) and call to the library function
2763 with the stabilized argument list. */
2764 end_sequence ();
2767 if (fallback_fn != BUILT_IN_NONE)
2769 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2770 targets, (int) round (x) should never be transformed into
2771 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2772 a call to lround in the hope that the target provides at least some
2773 C99 functions. This should result in the best user experience for
2774 not full C99 targets. */
2775 tree fallback_fndecl = mathfn_built_in_1
2776 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2778 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2779 fallback_fndecl, 1, arg);
2781 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2782 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2783 return convert_to_mode (mode, target, 0);
2786 return expand_call (exp, target, target == const0_rtx);
2789 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2790 a normal call should be emitted rather than expanding the function
2791 in-line. EXP is the expression that is a call to the builtin
2792 function; if convenient, the result should be placed in TARGET. */
2794 static rtx
2795 expand_builtin_powi (tree exp, rtx target)
2797 tree arg0, arg1;
2798 rtx op0, op1;
2799 machine_mode mode;
2800 machine_mode mode2;
2802 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2803 return NULL_RTX;
2805 arg0 = CALL_EXPR_ARG (exp, 0);
2806 arg1 = CALL_EXPR_ARG (exp, 1);
2807 mode = TYPE_MODE (TREE_TYPE (exp));
2809 /* Emit a libcall to libgcc. */
2811 /* Mode of the 2nd argument must match that of an int. */
2812 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2814 if (target == NULL_RTX)
2815 target = gen_reg_rtx (mode);
2817 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2818 if (GET_MODE (op0) != mode)
2819 op0 = convert_to_mode (mode, op0, 0);
2820 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2821 if (GET_MODE (op1) != mode2)
2822 op1 = convert_to_mode (mode2, op1, 0);
2824 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2825 target, LCT_CONST, mode,
2826 op0, mode, op1, mode2);
2828 return target;
2831 /* Expand expression EXP which is a call to the strlen builtin. Return
2832 NULL_RTX if we failed and the caller should emit a normal call, otherwise
2833 try to get the result in TARGET, if convenient. */
2835 static rtx
2836 expand_builtin_strlen (tree exp, rtx target,
2837 machine_mode target_mode)
2839 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2840 return NULL_RTX;
2842 struct expand_operand ops[4];
2843 rtx pat;
2844 tree len;
2845 tree src = CALL_EXPR_ARG (exp, 0);
2846 rtx src_reg;
2847 rtx_insn *before_strlen;
2848 machine_mode insn_mode;
2849 enum insn_code icode = CODE_FOR_nothing;
2850 unsigned int align;
2852 /* If the length can be computed at compile-time, return it. */
2853 len = c_strlen (src, 0);
2854 if (len)
2855 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2857 /* If the length can be computed at compile-time and is constant
2858 integer, but there are side-effects in src, evaluate
2859 src for side-effects, then return len.
2860 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2861 can be optimized into: i++; x = 3; */
2862 len = c_strlen (src, 1);
2863 if (len && TREE_CODE (len) == INTEGER_CST)
2865 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2866 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2869 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2871 /* If SRC is not a pointer type, don't do this operation inline. */
2872 if (align == 0)
2873 return NULL_RTX;
2875 /* Bail out if we can't compute strlen in the right mode. */
2876 FOR_EACH_MODE_FROM (insn_mode, target_mode)
2878 icode = optab_handler (strlen_optab, insn_mode);
2879 if (icode != CODE_FOR_nothing)
2880 break;
2882 if (insn_mode == VOIDmode)
2883 return NULL_RTX;
2885 /* Make a place to hold the source address. We will not expand
2886 the actual source until we are sure that the expansion will
2887 not fail -- there are trees that cannot be expanded twice. */
2888 src_reg = gen_reg_rtx (Pmode);
2890 /* Mark the beginning of the strlen sequence so we can emit the
2891 source operand later. */
2892 before_strlen = get_last_insn ();
2894 create_output_operand (&ops[0], target, insn_mode);
2895 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2896 create_integer_operand (&ops[2], 0);
2897 create_integer_operand (&ops[3], align);
2898 if (!maybe_expand_insn (icode, 4, ops))
2899 return NULL_RTX;
2901 /* Check to see if the argument was declared attribute nonstring
2902 and if so, issue a warning since at this point it's not known
2903 to be nul-terminated. */
2904 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
2906 /* Now that we are assured of success, expand the source. */
2907 start_sequence ();
2908 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2909 if (pat != src_reg)
2911 #ifdef POINTERS_EXTEND_UNSIGNED
2912 if (GET_MODE (pat) != Pmode)
2913 pat = convert_to_mode (Pmode, pat,
2914 POINTERS_EXTEND_UNSIGNED);
2915 #endif
2916 emit_move_insn (src_reg, pat);
2918 pat = get_insns ();
2919 end_sequence ();
2921 if (before_strlen)
2922 emit_insn_after (pat, before_strlen);
2923 else
2924 emit_insn_before (pat, get_insns ());
2926 /* Return the value in the proper mode for this function. */
2927 if (GET_MODE (ops[0].value) == target_mode)
2928 target = ops[0].value;
2929 else if (target != 0)
2930 convert_move (target, ops[0].value, 0);
2931 else
2932 target = convert_to_mode (target_mode, ops[0].value, 0);
2934 return target;
2937 /* Expand call EXP to the strnlen built-in, returning the result
2938 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
2940 static rtx
2941 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
2943 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2944 return NULL_RTX;
2946 tree src = CALL_EXPR_ARG (exp, 0);
2947 tree bound = CALL_EXPR_ARG (exp, 1);
2949 if (!bound)
2950 return NULL_RTX;
2952 location_t loc = UNKNOWN_LOCATION;
2953 if (EXPR_HAS_LOCATION (exp))
2954 loc = EXPR_LOCATION (exp);
2956 tree maxobjsize = max_object_size ();
2957 tree func = get_callee_fndecl (exp);
2959 tree len = c_strlen (src, 0);
2961 if (TREE_CODE (bound) == INTEGER_CST)
2963 if (!TREE_NO_WARNING (exp)
2964 && tree_int_cst_lt (maxobjsize, bound)
2965 && warning_at (loc, OPT_Wstringop_overflow_,
2966 "%K%qD specified bound %E "
2967 "exceeds maximum object size %E",
2968 exp, func, bound, maxobjsize))
2969 TREE_NO_WARNING (exp) = true;
2971 if (!len || TREE_CODE (len) != INTEGER_CST)
2972 return NULL_RTX;
2974 len = fold_convert_loc (loc, size_type_node, len);
2975 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
2976 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2979 if (TREE_CODE (bound) != SSA_NAME)
2980 return NULL_RTX;
2982 wide_int min, max;
2983 enum value_range_type rng = get_range_info (bound, &min, &max);
2984 if (rng != VR_RANGE)
2985 return NULL_RTX;
2987 if (!TREE_NO_WARNING (exp)
2988 && wi::ltu_p (wi::to_wide (maxobjsize), min)
2989 && warning_at (loc, OPT_Wstringop_overflow_,
2990 "%K%qD specified bound [%wu, %wu] "
2991 "exceeds maximum object size %E",
2992 exp, func, min.to_uhwi (), max.to_uhwi (), maxobjsize))
2993 TREE_NO_WARNING (exp) = true;
2995 if (!len || TREE_CODE (len) != INTEGER_CST)
2996 return NULL_RTX;
2998 if (wi::gtu_p (min, wi::to_wide (len)))
2999 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3001 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3002 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3005 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3006 bytes from constant string DATA + OFFSET and return it as target
3007 constant. */
3009 static rtx
3010 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3011 scalar_int_mode mode)
3013 const char *str = (const char *) data;
3015 gcc_assert (offset >= 0
3016 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3017 <= strlen (str) + 1));
3019 return c_readstr (str + offset, mode);
3022 /* LEN specify length of the block of memcpy/memset operation.
3023 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3024 In some cases we can make very likely guess on max size, then we
3025 set it into PROBABLE_MAX_SIZE. */
3027 static void
3028 determine_block_size (tree len, rtx len_rtx,
3029 unsigned HOST_WIDE_INT *min_size,
3030 unsigned HOST_WIDE_INT *max_size,
3031 unsigned HOST_WIDE_INT *probable_max_size)
3033 if (CONST_INT_P (len_rtx))
3035 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3036 return;
3038 else
3040 wide_int min, max;
3041 enum value_range_type range_type = VR_UNDEFINED;
3043 /* Determine bounds from the type. */
3044 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3045 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3046 else
3047 *min_size = 0;
3048 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3049 *probable_max_size = *max_size
3050 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3051 else
3052 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3054 if (TREE_CODE (len) == SSA_NAME)
3055 range_type = get_range_info (len, &min, &max);
3056 if (range_type == VR_RANGE)
3058 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3059 *min_size = min.to_uhwi ();
3060 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3061 *probable_max_size = *max_size = max.to_uhwi ();
3063 else if (range_type == VR_ANTI_RANGE)
3065 /* Anti range 0...N lets us to determine minimal size to N+1. */
3066 if (min == 0)
3068 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3069 *min_size = max.to_uhwi () + 1;
3071 /* Code like
3073 int n;
3074 if (n < 100)
3075 memcpy (a, b, n)
3077 Produce anti range allowing negative values of N. We still
3078 can use the information and make a guess that N is not negative.
3080 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3081 *probable_max_size = min.to_uhwi () - 1;
3084 gcc_checking_assert (*max_size <=
3085 (unsigned HOST_WIDE_INT)
3086 GET_MODE_MASK (GET_MODE (len_rtx)));
3089 /* Try to verify that the sizes and lengths of the arguments to a string
3090 manipulation function given by EXP are within valid bounds and that
3091 the operation does not lead to buffer overflow or read past the end.
3092 Arguments other than EXP may be null. When non-null, the arguments
3093 have the following meaning:
3094 DST is the destination of a copy call or NULL otherwise.
3095 SRC is the source of a copy call or NULL otherwise.
3096 DSTWRITE is the number of bytes written into the destination obtained
3097 from the user-supplied size argument to the function (such as in
3098 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3099 MAXREAD is the user-supplied bound on the length of the source sequence
3100 (such as in strncat(d, s, N). It specifies the upper limit on the number
3101 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3102 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3103 expression EXP is a string function call (as opposed to a memory call
3104 like memcpy). As an exception, SRCSTR can also be an integer denoting
3105 the precomputed size of the source string or object (for functions like
3106 memcpy).
3107 DSTSIZE is the size of the destination object specified by the last
3108 argument to the _chk builtins, typically resulting from the expansion
3109 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3110 DSTSIZE).
3112 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3113 SIZE_MAX.
3115 If the call is successfully verified as safe return true, otherwise
3116 return false. */
3118 static bool
3119 check_access (tree exp, tree, tree, tree dstwrite,
3120 tree maxread, tree srcstr, tree dstsize)
3122 int opt = OPT_Wstringop_overflow_;
3124 /* The size of the largest object is half the address space, or
3125 PTRDIFF_MAX. (This is way too permissive.) */
3126 tree maxobjsize = max_object_size ();
3128 /* Either the length of the source string for string functions or
3129 the size of the source object for raw memory functions. */
3130 tree slen = NULL_TREE;
3132 tree range[2] = { NULL_TREE, NULL_TREE };
3134 /* Set to true when the exact number of bytes written by a string
3135 function like strcpy is not known and the only thing that is
3136 known is that it must be at least one (for the terminating nul). */
3137 bool at_least_one = false;
3138 if (srcstr)
3140 /* SRCSTR is normally a pointer to string but as a special case
3141 it can be an integer denoting the length of a string. */
3142 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
3144 /* Try to determine the range of lengths the source string
3145 refers to. If it can be determined and is less than
3146 the upper bound given by MAXREAD add one to it for
3147 the terminating nul. Otherwise, set it to one for
3148 the same reason, or to MAXREAD as appropriate. */
3149 get_range_strlen (srcstr, range);
3150 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
3152 if (maxread && tree_int_cst_le (maxread, range[0]))
3153 range[0] = range[1] = maxread;
3154 else
3155 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3156 range[0], size_one_node);
3158 if (maxread && tree_int_cst_le (maxread, range[1]))
3159 range[1] = maxread;
3160 else if (!integer_all_onesp (range[1]))
3161 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3162 range[1], size_one_node);
3164 slen = range[0];
3166 else
3168 at_least_one = true;
3169 slen = size_one_node;
3172 else
3173 slen = srcstr;
3176 if (!dstwrite && !maxread)
3178 /* When the only available piece of data is the object size
3179 there is nothing to do. */
3180 if (!slen)
3181 return true;
3183 /* Otherwise, when the length of the source sequence is known
3184 (as with strlen), set DSTWRITE to it. */
3185 if (!range[0])
3186 dstwrite = slen;
3189 if (!dstsize)
3190 dstsize = maxobjsize;
3192 if (dstwrite)
3193 get_size_range (dstwrite, range);
3195 /* This can happen at -O0. */
3196 if (range[0] && TREE_CODE (range[0]) != INTEGER_CST)
3197 return false;
3199 tree func = get_callee_fndecl (exp);
3201 /* First check the number of bytes to be written against the maximum
3202 object size. */
3203 if (range[0] && tree_int_cst_lt (maxobjsize, range[0]))
3205 if (TREE_NO_WARNING (exp))
3206 return false;
3208 location_t loc = tree_nonartificial_location (exp);
3209 loc = expansion_point_location_if_in_system_header (loc);
3211 bool warned;
3212 if (range[0] == range[1])
3213 warned = warning_at (loc, opt,
3214 "%K%qD specified size %E "
3215 "exceeds maximum object size %E",
3216 exp, func, range[0], maxobjsize);
3217 else
3218 warned = warning_at (loc, opt,
3219 "%K%qD specified size between %E and %E "
3220 "exceeds maximum object size %E",
3221 exp, func,
3222 range[0], range[1], maxobjsize);
3223 if (warned)
3224 TREE_NO_WARNING (exp) = true;
3226 return false;
3229 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3230 constant, and in range of unsigned HOST_WIDE_INT. */
3231 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3233 /* Next check the number of bytes to be written against the destination
3234 object size. */
3235 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
3237 if (range[0]
3238 && ((tree_fits_uhwi_p (dstsize)
3239 && tree_int_cst_lt (dstsize, range[0]))
3240 || (tree_fits_uhwi_p (dstwrite)
3241 && tree_int_cst_lt (dstwrite, range[0]))))
3243 if (TREE_NO_WARNING (exp))
3244 return false;
3246 location_t loc = tree_nonartificial_location (exp);
3247 loc = expansion_point_location_if_in_system_header (loc);
3249 if (dstwrite == slen && at_least_one)
3251 /* This is a call to strcpy with a destination of 0 size
3252 and a source of unknown length. The call will write
3253 at least one byte past the end of the destination. */
3254 warning_at (loc, opt,
3255 "%K%qD writing %E or more bytes into a region "
3256 "of size %E overflows the destination",
3257 exp, func, range[0], dstsize);
3259 else if (tree_int_cst_equal (range[0], range[1]))
3260 warning_n (loc, opt, tree_to_uhwi (range[0]),
3261 "%K%qD writing %E byte into a region "
3262 "of size %E overflows the destination",
3263 "%K%qD writing %E bytes into a region "
3264 "of size %E overflows the destination",
3265 exp, func, range[0], dstsize);
3266 else if (tree_int_cst_sign_bit (range[1]))
3268 /* Avoid printing the upper bound if it's invalid. */
3269 warning_at (loc, opt,
3270 "%K%qD writing %E or more bytes into a region "
3271 "of size %E overflows the destination",
3272 exp, func, range[0], dstsize);
3274 else
3275 warning_at (loc, opt,
3276 "%K%qD writing between %E and %E bytes into "
3277 "a region of size %E overflows the destination",
3278 exp, func, range[0], range[1],
3279 dstsize);
3281 /* Return error when an overflow has been detected. */
3282 return false;
3286 /* Check the maximum length of the source sequence against the size
3287 of the destination object if known, or against the maximum size
3288 of an object. */
3289 if (maxread)
3291 get_size_range (maxread, range);
3293 /* Use the lower end for MAXREAD from now on. */
3294 if (range[0])
3295 maxread = range[0];
3297 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
3299 location_t loc = tree_nonartificial_location (exp);
3300 loc = expansion_point_location_if_in_system_header (loc);
3302 if (tree_int_cst_lt (maxobjsize, range[0]))
3304 if (TREE_NO_WARNING (exp))
3305 return false;
3307 /* Warn about crazy big sizes first since that's more
3308 likely to be meaningful than saying that the bound
3309 is greater than the object size if both are big. */
3310 if (range[0] == range[1])
3311 warning_at (loc, opt,
3312 "%K%qD specified bound %E "
3313 "exceeds maximum object size %E",
3314 exp, func,
3315 range[0], maxobjsize);
3316 else
3317 warning_at (loc, opt,
3318 "%K%qD specified bound between %E and %E "
3319 "exceeds maximum object size %E",
3320 exp, func,
3321 range[0], range[1], maxobjsize);
3323 return false;
3326 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
3328 if (TREE_NO_WARNING (exp))
3329 return false;
3331 if (tree_int_cst_equal (range[0], range[1]))
3332 warning_at (loc, opt,
3333 "%K%qD specified bound %E "
3334 "exceeds destination size %E",
3335 exp, func,
3336 range[0], dstsize);
3337 else
3338 warning_at (loc, opt,
3339 "%K%qD specified bound between %E and %E "
3340 "exceeds destination size %E",
3341 exp, func,
3342 range[0], range[1], dstsize);
3343 return false;
3348 /* Check for reading past the end of SRC. */
3349 if (slen
3350 && slen == srcstr
3351 && dstwrite && range[0]
3352 && tree_int_cst_lt (slen, range[0]))
3354 if (TREE_NO_WARNING (exp))
3355 return false;
3357 location_t loc = tree_nonartificial_location (exp);
3359 if (tree_int_cst_equal (range[0], range[1]))
3360 warning_n (loc, opt, tree_to_uhwi (range[0]),
3361 "%K%qD reading %E byte from a region of size %E",
3362 "%K%qD reading %E bytes from a region of size %E",
3363 exp, func, range[0], slen);
3364 else if (tree_int_cst_sign_bit (range[1]))
3366 /* Avoid printing the upper bound if it's invalid. */
3367 warning_at (loc, opt,
3368 "%K%qD reading %E or more bytes from a region "
3369 "of size %E",
3370 exp, func, range[0], slen);
3372 else
3373 warning_at (loc, opt,
3374 "%K%qD reading between %E and %E bytes from a region "
3375 "of size %E",
3376 exp, func, range[0], range[1], slen);
3377 return false;
3380 return true;
3383 /* Helper to compute the size of the object referenced by the DEST
3384 expression which must have pointer type, using Object Size type
3385 OSTYPE (only the least significant 2 bits are used). Return
3386 an estimate of the size of the object if successful or NULL when
3387 the size cannot be determined. When the referenced object involves
3388 a non-constant offset in some range the returned value represents
3389 the largest size given the smallest non-negative offset in the
3390 range. The function is intended for diagnostics and should not
3391 be used to influence code generation or optimization. */
3393 tree
3394 compute_objsize (tree dest, int ostype)
3396 unsigned HOST_WIDE_INT size;
3398 /* Only the two least significant bits are meaningful. */
3399 ostype &= 3;
3401 if (compute_builtin_object_size (dest, ostype, &size))
3402 return build_int_cst (sizetype, size);
3404 if (TREE_CODE (dest) == SSA_NAME)
3406 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3407 if (!is_gimple_assign (stmt))
3408 return NULL_TREE;
3410 dest = gimple_assign_rhs1 (stmt);
3412 tree_code code = gimple_assign_rhs_code (stmt);
3413 if (code == POINTER_PLUS_EXPR)
3415 /* compute_builtin_object_size fails for addresses with
3416 non-constant offsets. Try to determine the range of
3417 such an offset here and use it to adjust the constant
3418 size. */
3419 tree off = gimple_assign_rhs2 (stmt);
3420 if (TREE_CODE (off) == INTEGER_CST)
3422 if (tree size = compute_objsize (dest, ostype))
3424 wide_int wioff = wi::to_wide (off);
3425 wide_int wisiz = wi::to_wide (size);
3427 /* Ignore negative offsets for now. For others,
3428 use the lower bound as the most optimistic
3429 estimate of the (remaining) size. */
3430 if (wi::sign_mask (wioff))
3432 else if (wi::ltu_p (wioff, wisiz))
3433 return wide_int_to_tree (TREE_TYPE (size),
3434 wi::sub (wisiz, wioff));
3435 else
3436 return size_zero_node;
3439 else if (TREE_CODE (off) == SSA_NAME
3440 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3442 wide_int min, max;
3443 enum value_range_type rng = get_range_info (off, &min, &max);
3445 if (rng == VR_RANGE)
3447 if (tree size = compute_objsize (dest, ostype))
3449 wide_int wisiz = wi::to_wide (size);
3451 /* Ignore negative offsets for now. For others,
3452 use the lower bound as the most optimistic
3453 estimate of the (remaining)size. */
3454 if (wi::sign_mask (min))
3456 else if (wi::ltu_p (min, wisiz))
3457 return wide_int_to_tree (TREE_TYPE (size),
3458 wi::sub (wisiz, min));
3459 else
3460 return size_zero_node;
3465 else if (code != ADDR_EXPR)
3466 return NULL_TREE;
3469 /* Unless computing the largest size (for memcpy and other raw memory
3470 functions), try to determine the size of the object from its type. */
3471 if (!ostype)
3472 return NULL_TREE;
3474 if (TREE_CODE (dest) != ADDR_EXPR)
3475 return NULL_TREE;
3477 tree type = TREE_TYPE (dest);
3478 if (TREE_CODE (type) == POINTER_TYPE)
3479 type = TREE_TYPE (type);
3481 type = TYPE_MAIN_VARIANT (type);
3483 if (TREE_CODE (type) == ARRAY_TYPE
3484 && !array_at_struct_end_p (TREE_OPERAND (dest, 0)))
3486 /* Return the constant size unless it's zero (that's a zero-length
3487 array likely at the end of a struct). */
3488 tree size = TYPE_SIZE_UNIT (type);
3489 if (size && TREE_CODE (size) == INTEGER_CST
3490 && !integer_zerop (size))
3491 return size;
3494 return NULL_TREE;
3497 /* Helper to determine and check the sizes of the source and the destination
3498 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3499 call expression, DEST is the destination argument, SRC is the source
3500 argument or null, and LEN is the number of bytes. Use Object Size type-0
3501 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3502 (no overflow or invalid sizes), false otherwise. */
3504 static bool
3505 check_memop_access (tree exp, tree dest, tree src, tree size)
3507 /* For functions like memset and memcpy that operate on raw memory
3508 try to determine the size of the largest source and destination
3509 object using type-0 Object Size regardless of the object size
3510 type specified by the option. */
3511 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3512 tree dstsize = compute_objsize (dest, 0);
3514 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
3515 srcsize, dstsize);
3518 /* Validate memchr arguments without performing any expansion.
3519 Return NULL_RTX. */
3521 static rtx
3522 expand_builtin_memchr (tree exp, rtx)
3524 if (!validate_arglist (exp,
3525 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3526 return NULL_RTX;
3528 tree arg1 = CALL_EXPR_ARG (exp, 0);
3529 tree len = CALL_EXPR_ARG (exp, 2);
3531 /* Diagnose calls where the specified length exceeds the size
3532 of the object. */
3533 if (warn_stringop_overflow)
3535 tree size = compute_objsize (arg1, 0);
3536 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
3537 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
3540 return NULL_RTX;
3543 /* Expand a call EXP to the memcpy builtin.
3544 Return NULL_RTX if we failed, the caller should emit a normal call,
3545 otherwise try to get the result in TARGET, if convenient (and in
3546 mode MODE if that's convenient). */
3548 static rtx
3549 expand_builtin_memcpy (tree exp, rtx target)
3551 if (!validate_arglist (exp,
3552 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3553 return NULL_RTX;
3555 tree dest = CALL_EXPR_ARG (exp, 0);
3556 tree src = CALL_EXPR_ARG (exp, 1);
3557 tree len = CALL_EXPR_ARG (exp, 2);
3559 check_memop_access (exp, dest, src, len);
3561 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3562 /*endp=*/ 0);
3565 /* Check a call EXP to the memmove built-in for validity.
3566 Return NULL_RTX on both success and failure. */
3568 static rtx
3569 expand_builtin_memmove (tree exp, rtx)
3571 if (!validate_arglist (exp,
3572 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3573 return NULL_RTX;
3575 tree dest = CALL_EXPR_ARG (exp, 0);
3576 tree src = CALL_EXPR_ARG (exp, 1);
3577 tree len = CALL_EXPR_ARG (exp, 2);
3579 check_memop_access (exp, dest, src, len);
3581 return NULL_RTX;
3584 /* Expand a call EXP to the mempcpy builtin.
3585 Return NULL_RTX if we failed; the caller should emit a normal call,
3586 otherwise try to get the result in TARGET, if convenient (and in
3587 mode MODE if that's convenient). If ENDP is 0 return the
3588 destination pointer, if ENDP is 1 return the end pointer ala
3589 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3590 stpcpy. */
3592 static rtx
3593 expand_builtin_mempcpy (tree exp, rtx target)
3595 if (!validate_arglist (exp,
3596 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3597 return NULL_RTX;
3599 tree dest = CALL_EXPR_ARG (exp, 0);
3600 tree src = CALL_EXPR_ARG (exp, 1);
3601 tree len = CALL_EXPR_ARG (exp, 2);
3603 /* Policy does not generally allow using compute_objsize (which
3604 is used internally by check_memop_size) to change code generation
3605 or drive optimization decisions.
3607 In this instance it is safe because the code we generate has
3608 the same semantics regardless of the return value of
3609 check_memop_sizes. Exactly the same amount of data is copied
3610 and the return value is exactly the same in both cases.
3612 Furthermore, check_memop_size always uses mode 0 for the call to
3613 compute_objsize, so the imprecise nature of compute_objsize is
3614 avoided. */
3616 /* Avoid expanding mempcpy into memcpy when the call is determined
3617 to overflow the buffer. This also prevents the same overflow
3618 from being diagnosed again when expanding memcpy. */
3619 if (!check_memop_access (exp, dest, src, len))
3620 return NULL_RTX;
3622 return expand_builtin_mempcpy_args (dest, src, len,
3623 target, exp, /*endp=*/ 1);
3626 /* Helper function to do the actual work for expand of memory copy family
3627 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3628 of memory from SRC to DEST and assign to TARGET if convenient.
3629 If ENDP is 0 return the
3630 destination pointer, if ENDP is 1 return the end pointer ala
3631 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3632 stpcpy. */
3634 static rtx
3635 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3636 rtx target, tree exp, int endp)
3638 const char *src_str;
3639 unsigned int src_align = get_pointer_alignment (src);
3640 unsigned int dest_align = get_pointer_alignment (dest);
3641 rtx dest_mem, src_mem, dest_addr, len_rtx;
3642 HOST_WIDE_INT expected_size = -1;
3643 unsigned int expected_align = 0;
3644 unsigned HOST_WIDE_INT min_size;
3645 unsigned HOST_WIDE_INT max_size;
3646 unsigned HOST_WIDE_INT probable_max_size;
3648 /* If DEST is not a pointer type, call the normal function. */
3649 if (dest_align == 0)
3650 return NULL_RTX;
3652 /* If either SRC is not a pointer type, don't do this
3653 operation in-line. */
3654 if (src_align == 0)
3655 return NULL_RTX;
3657 if (currently_expanding_gimple_stmt)
3658 stringop_block_profile (currently_expanding_gimple_stmt,
3659 &expected_align, &expected_size);
3661 if (expected_align < dest_align)
3662 expected_align = dest_align;
3663 dest_mem = get_memory_rtx (dest, len);
3664 set_mem_align (dest_mem, dest_align);
3665 len_rtx = expand_normal (len);
3666 determine_block_size (len, len_rtx, &min_size, &max_size,
3667 &probable_max_size);
3668 src_str = c_getstr (src);
3670 /* If SRC is a string constant and block move would be done
3671 by pieces, we can avoid loading the string from memory
3672 and only stored the computed constants. */
3673 if (src_str
3674 && CONST_INT_P (len_rtx)
3675 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3676 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3677 CONST_CAST (char *, src_str),
3678 dest_align, false))
3680 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3681 builtin_memcpy_read_str,
3682 CONST_CAST (char *, src_str),
3683 dest_align, false, endp);
3684 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3685 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3686 return dest_mem;
3689 src_mem = get_memory_rtx (src, len);
3690 set_mem_align (src_mem, src_align);
3692 /* Copy word part most expediently. */
3693 enum block_op_methods method = BLOCK_OP_NORMAL;
3694 if (CALL_EXPR_TAILCALL (exp) && (endp == 0 || target == const0_rtx))
3695 method = BLOCK_OP_TAILCALL;
3696 if (endp == 1 && target != const0_rtx)
3697 method = BLOCK_OP_NO_LIBCALL_RET;
3698 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3699 expected_align, expected_size,
3700 min_size, max_size, probable_max_size);
3701 if (dest_addr == pc_rtx)
3702 return NULL_RTX;
3704 if (dest_addr == 0)
3706 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3707 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3710 if (endp && target != const0_rtx)
3712 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3713 /* stpcpy pointer to last byte. */
3714 if (endp == 2)
3715 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3718 return dest_addr;
3721 static rtx
3722 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3723 rtx target, tree orig_exp, int endp)
3725 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3726 endp);
3729 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3730 we failed, the caller should emit a normal call, otherwise try to
3731 get the result in TARGET, if convenient. If ENDP is 0 return the
3732 destination pointer, if ENDP is 1 return the end pointer ala
3733 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3734 stpcpy. */
3736 static rtx
3737 expand_movstr (tree dest, tree src, rtx target, int endp)
3739 struct expand_operand ops[3];
3740 rtx dest_mem;
3741 rtx src_mem;
3743 if (!targetm.have_movstr ())
3744 return NULL_RTX;
3746 dest_mem = get_memory_rtx (dest, NULL);
3747 src_mem = get_memory_rtx (src, NULL);
3748 if (!endp)
3750 target = force_reg (Pmode, XEXP (dest_mem, 0));
3751 dest_mem = replace_equiv_address (dest_mem, target);
3754 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3755 create_fixed_operand (&ops[1], dest_mem);
3756 create_fixed_operand (&ops[2], src_mem);
3757 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3758 return NULL_RTX;
3760 if (endp && target != const0_rtx)
3762 target = ops[0].value;
3763 /* movstr is supposed to set end to the address of the NUL
3764 terminator. If the caller requested a mempcpy-like return value,
3765 adjust it. */
3766 if (endp == 1)
3768 rtx tem = plus_constant (GET_MODE (target),
3769 gen_lowpart (GET_MODE (target), target), 1);
3770 emit_move_insn (target, force_operand (tem, NULL_RTX));
3773 return target;
3776 /* Do some very basic size validation of a call to the strcpy builtin
3777 given by EXP. Return NULL_RTX to have the built-in expand to a call
3778 to the library function. */
3780 static rtx
3781 expand_builtin_strcat (tree exp, rtx)
3783 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3784 || !warn_stringop_overflow)
3785 return NULL_RTX;
3787 tree dest = CALL_EXPR_ARG (exp, 0);
3788 tree src = CALL_EXPR_ARG (exp, 1);
3790 /* There is no way here to determine the length of the string in
3791 the destination to which the SRC string is being appended so
3792 just diagnose cases when the souce string is longer than
3793 the destination object. */
3795 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3797 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
3798 destsize);
3800 return NULL_RTX;
3803 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3804 NULL_RTX if we failed the caller should emit a normal call, otherwise
3805 try to get the result in TARGET, if convenient (and in mode MODE if that's
3806 convenient). */
3808 static rtx
3809 expand_builtin_strcpy (tree exp, rtx target)
3811 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3812 return NULL_RTX;
3814 tree dest = CALL_EXPR_ARG (exp, 0);
3815 tree src = CALL_EXPR_ARG (exp, 1);
3817 if (warn_stringop_overflow)
3819 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3820 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3821 src, destsize);
3824 if (rtx ret = expand_builtin_strcpy_args (dest, src, target))
3826 /* Check to see if the argument was declared attribute nonstring
3827 and if so, issue a warning since at this point it's not known
3828 to be nul-terminated. */
3829 tree fndecl = get_callee_fndecl (exp);
3830 maybe_warn_nonstring_arg (fndecl, exp);
3831 return ret;
3834 return NULL_RTX;
3837 /* Helper function to do the actual work for expand_builtin_strcpy. The
3838 arguments to the builtin_strcpy call DEST and SRC are broken out
3839 so that this can also be called without constructing an actual CALL_EXPR.
3840 The other arguments and return value are the same as for
3841 expand_builtin_strcpy. */
3843 static rtx
3844 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3846 return expand_movstr (dest, src, target, /*endp=*/0);
3849 /* Expand a call EXP to the stpcpy builtin.
3850 Return NULL_RTX if we failed the caller should emit a normal call,
3851 otherwise try to get the result in TARGET, if convenient (and in
3852 mode MODE if that's convenient). */
3854 static rtx
3855 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
3857 tree dst, src;
3858 location_t loc = EXPR_LOCATION (exp);
3860 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3861 return NULL_RTX;
3863 dst = CALL_EXPR_ARG (exp, 0);
3864 src = CALL_EXPR_ARG (exp, 1);
3866 if (warn_stringop_overflow)
3868 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
3869 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3870 src, destsize);
3873 /* If return value is ignored, transform stpcpy into strcpy. */
3874 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3876 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3877 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3878 return expand_expr (result, target, mode, EXPAND_NORMAL);
3880 else
3882 tree len, lenp1;
3883 rtx ret;
3885 /* Ensure we get an actual string whose length can be evaluated at
3886 compile-time, not an expression containing a string. This is
3887 because the latter will potentially produce pessimized code
3888 when used to produce the return value. */
3889 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3890 return expand_movstr (dst, src, target, /*endp=*/2);
3892 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3893 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3894 target, exp, /*endp=*/2);
3896 if (ret)
3897 return ret;
3899 if (TREE_CODE (len) == INTEGER_CST)
3901 rtx len_rtx = expand_normal (len);
3903 if (CONST_INT_P (len_rtx))
3905 ret = expand_builtin_strcpy_args (dst, src, target);
3907 if (ret)
3909 if (! target)
3911 if (mode != VOIDmode)
3912 target = gen_reg_rtx (mode);
3913 else
3914 target = gen_reg_rtx (GET_MODE (ret));
3916 if (GET_MODE (target) != GET_MODE (ret))
3917 ret = gen_lowpart (GET_MODE (target), ret);
3919 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3920 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3921 gcc_assert (ret);
3923 return target;
3928 return expand_movstr (dst, src, target, /*endp=*/2);
3932 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
3933 arguments while being careful to avoid duplicate warnings (which could
3934 be issued if the expander were to expand the call, resulting in it
3935 being emitted in expand_call(). */
3937 static rtx
3938 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3940 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
3942 /* The call has been successfully expanded. Check for nonstring
3943 arguments and issue warnings as appropriate. */
3944 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3945 return ret;
3948 return NULL_RTX;
3951 /* Check a call EXP to the stpncpy built-in for validity.
3952 Return NULL_RTX on both success and failure. */
3954 static rtx
3955 expand_builtin_stpncpy (tree exp, rtx)
3957 if (!validate_arglist (exp,
3958 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3959 || !warn_stringop_overflow)
3960 return NULL_RTX;
3962 /* The source and destination of the call. */
3963 tree dest = CALL_EXPR_ARG (exp, 0);
3964 tree src = CALL_EXPR_ARG (exp, 1);
3966 /* The exact number of bytes to write (not the maximum). */
3967 tree len = CALL_EXPR_ARG (exp, 2);
3969 /* The size of the destination object. */
3970 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3972 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
3974 return NULL_RTX;
3977 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3978 bytes from constant string DATA + OFFSET and return it as target
3979 constant. */
3982 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3983 scalar_int_mode mode)
3985 const char *str = (const char *) data;
3987 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3988 return const0_rtx;
3990 return c_readstr (str + offset, mode);
3993 /* Helper to check the sizes of sequences and the destination of calls
3994 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3995 success (no overflow or invalid sizes), false otherwise. */
3997 static bool
3998 check_strncat_sizes (tree exp, tree objsize)
4000 tree dest = CALL_EXPR_ARG (exp, 0);
4001 tree src = CALL_EXPR_ARG (exp, 1);
4002 tree maxread = CALL_EXPR_ARG (exp, 2);
4004 /* Try to determine the range of lengths that the source expression
4005 refers to. */
4006 tree lenrange[2];
4007 get_range_strlen (src, lenrange);
4009 /* Try to verify that the destination is big enough for the shortest
4010 string. */
4012 if (!objsize && warn_stringop_overflow)
4014 /* If it hasn't been provided by __strncat_chk, try to determine
4015 the size of the destination object into which the source is
4016 being copied. */
4017 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
4020 /* Add one for the terminating nul. */
4021 tree srclen = (lenrange[0]
4022 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4023 size_one_node)
4024 : NULL_TREE);
4026 /* The strncat function copies at most MAXREAD bytes and always appends
4027 the terminating nul so the specified upper bound should never be equal
4028 to (or greater than) the size of the destination. */
4029 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4030 && tree_int_cst_equal (objsize, maxread))
4032 location_t loc = tree_nonartificial_location (exp);
4033 loc = expansion_point_location_if_in_system_header (loc);
4035 warning_at (loc, OPT_Wstringop_overflow_,
4036 "%K%qD specified bound %E equals destination size",
4037 exp, get_callee_fndecl (exp), maxread);
4039 return false;
4042 if (!srclen
4043 || (maxread && tree_fits_uhwi_p (maxread)
4044 && tree_fits_uhwi_p (srclen)
4045 && tree_int_cst_lt (maxread, srclen)))
4046 srclen = maxread;
4048 /* The number of bytes to write is LEN but check_access will also
4049 check SRCLEN if LEN's value isn't known. */
4050 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
4051 objsize);
4054 /* Similar to expand_builtin_strcat, do some very basic size validation
4055 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
4056 the built-in expand to a call to the library function. */
4058 static rtx
4059 expand_builtin_strncat (tree exp, rtx)
4061 if (!validate_arglist (exp,
4062 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4063 || !warn_stringop_overflow)
4064 return NULL_RTX;
4066 tree dest = CALL_EXPR_ARG (exp, 0);
4067 tree src = CALL_EXPR_ARG (exp, 1);
4068 /* The upper bound on the number of bytes to write. */
4069 tree maxread = CALL_EXPR_ARG (exp, 2);
4070 /* The length of the source sequence. */
4071 tree slen = c_strlen (src, 1);
4073 /* Try to determine the range of lengths that the source expression
4074 refers to. */
4075 tree lenrange[2];
4076 if (slen)
4077 lenrange[0] = lenrange[1] = slen;
4078 else
4079 get_range_strlen (src, lenrange);
4081 /* Try to verify that the destination is big enough for the shortest
4082 string. First try to determine the size of the destination object
4083 into which the source is being copied. */
4084 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4086 /* Add one for the terminating nul. */
4087 tree srclen = (lenrange[0]
4088 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4089 size_one_node)
4090 : NULL_TREE);
4092 /* The strncat function copies at most MAXREAD bytes and always appends
4093 the terminating nul so the specified upper bound should never be equal
4094 to (or greater than) the size of the destination. */
4095 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4096 && tree_int_cst_equal (destsize, maxread))
4098 location_t loc = tree_nonartificial_location (exp);
4099 loc = expansion_point_location_if_in_system_header (loc);
4101 warning_at (loc, OPT_Wstringop_overflow_,
4102 "%K%qD specified bound %E equals destination size",
4103 exp, get_callee_fndecl (exp), maxread);
4105 return NULL_RTX;
4108 if (!srclen
4109 || (maxread && tree_fits_uhwi_p (maxread)
4110 && tree_fits_uhwi_p (srclen)
4111 && tree_int_cst_lt (maxread, srclen)))
4112 srclen = maxread;
4114 /* The number of bytes to write is SRCLEN. */
4115 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
4117 return NULL_RTX;
4120 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4121 NULL_RTX if we failed the caller should emit a normal call. */
4123 static rtx
4124 expand_builtin_strncpy (tree exp, rtx target)
4126 location_t loc = EXPR_LOCATION (exp);
4128 if (validate_arglist (exp,
4129 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4131 tree dest = CALL_EXPR_ARG (exp, 0);
4132 tree src = CALL_EXPR_ARG (exp, 1);
4133 /* The number of bytes to write (not the maximum). */
4134 tree len = CALL_EXPR_ARG (exp, 2);
4135 /* The length of the source sequence. */
4136 tree slen = c_strlen (src, 1);
4138 if (warn_stringop_overflow)
4140 tree destsize = compute_objsize (dest,
4141 warn_stringop_overflow - 1);
4143 /* The number of bytes to write is LEN but check_access will also
4144 check SLEN if LEN's value isn't known. */
4145 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4146 destsize);
4149 /* We must be passed a constant len and src parameter. */
4150 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4151 return NULL_RTX;
4153 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4155 /* We're required to pad with trailing zeros if the requested
4156 len is greater than strlen(s2)+1. In that case try to
4157 use store_by_pieces, if it fails, punt. */
4158 if (tree_int_cst_lt (slen, len))
4160 unsigned int dest_align = get_pointer_alignment (dest);
4161 const char *p = c_getstr (src);
4162 rtx dest_mem;
4164 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4165 || !can_store_by_pieces (tree_to_uhwi (len),
4166 builtin_strncpy_read_str,
4167 CONST_CAST (char *, p),
4168 dest_align, false))
4169 return NULL_RTX;
4171 dest_mem = get_memory_rtx (dest, len);
4172 store_by_pieces (dest_mem, tree_to_uhwi (len),
4173 builtin_strncpy_read_str,
4174 CONST_CAST (char *, p), dest_align, false, 0);
4175 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4176 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4177 return dest_mem;
4180 return NULL_RTX;
4183 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4184 bytes from constant string DATA + OFFSET and return it as target
4185 constant. */
4188 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4189 scalar_int_mode mode)
4191 const char *c = (const char *) data;
4192 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4194 memset (p, *c, GET_MODE_SIZE (mode));
4196 return c_readstr (p, mode);
4199 /* Callback routine for store_by_pieces. Return the RTL of a register
4200 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4201 char value given in the RTL register data. For example, if mode is
4202 4 bytes wide, return the RTL for 0x01010101*data. */
4204 static rtx
4205 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4206 scalar_int_mode mode)
4208 rtx target, coeff;
4209 size_t size;
4210 char *p;
4212 size = GET_MODE_SIZE (mode);
4213 if (size == 1)
4214 return (rtx) data;
4216 p = XALLOCAVEC (char, size);
4217 memset (p, 1, size);
4218 coeff = c_readstr (p, mode);
4220 target = convert_to_mode (mode, (rtx) data, 1);
4221 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4222 return force_reg (mode, target);
4225 /* Expand expression EXP, which is a call to the memset builtin. Return
4226 NULL_RTX if we failed the caller should emit a normal call, otherwise
4227 try to get the result in TARGET, if convenient (and in mode MODE if that's
4228 convenient). */
4230 static rtx
4231 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4233 if (!validate_arglist (exp,
4234 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4235 return NULL_RTX;
4237 tree dest = CALL_EXPR_ARG (exp, 0);
4238 tree val = CALL_EXPR_ARG (exp, 1);
4239 tree len = CALL_EXPR_ARG (exp, 2);
4241 check_memop_access (exp, dest, NULL_TREE, len);
4243 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4246 /* Helper function to do the actual work for expand_builtin_memset. The
4247 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4248 so that this can also be called without constructing an actual CALL_EXPR.
4249 The other arguments and return value are the same as for
4250 expand_builtin_memset. */
4252 static rtx
4253 expand_builtin_memset_args (tree dest, tree val, tree len,
4254 rtx target, machine_mode mode, tree orig_exp)
4256 tree fndecl, fn;
4257 enum built_in_function fcode;
4258 machine_mode val_mode;
4259 char c;
4260 unsigned int dest_align;
4261 rtx dest_mem, dest_addr, len_rtx;
4262 HOST_WIDE_INT expected_size = -1;
4263 unsigned int expected_align = 0;
4264 unsigned HOST_WIDE_INT min_size;
4265 unsigned HOST_WIDE_INT max_size;
4266 unsigned HOST_WIDE_INT probable_max_size;
4268 dest_align = get_pointer_alignment (dest);
4270 /* If DEST is not a pointer type, don't do this operation in-line. */
4271 if (dest_align == 0)
4272 return NULL_RTX;
4274 if (currently_expanding_gimple_stmt)
4275 stringop_block_profile (currently_expanding_gimple_stmt,
4276 &expected_align, &expected_size);
4278 if (expected_align < dest_align)
4279 expected_align = dest_align;
4281 /* If the LEN parameter is zero, return DEST. */
4282 if (integer_zerop (len))
4284 /* Evaluate and ignore VAL in case it has side-effects. */
4285 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4286 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4289 /* Stabilize the arguments in case we fail. */
4290 dest = builtin_save_expr (dest);
4291 val = builtin_save_expr (val);
4292 len = builtin_save_expr (len);
4294 len_rtx = expand_normal (len);
4295 determine_block_size (len, len_rtx, &min_size, &max_size,
4296 &probable_max_size);
4297 dest_mem = get_memory_rtx (dest, len);
4298 val_mode = TYPE_MODE (unsigned_char_type_node);
4300 if (TREE_CODE (val) != INTEGER_CST)
4302 rtx val_rtx;
4304 val_rtx = expand_normal (val);
4305 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4307 /* Assume that we can memset by pieces if we can store
4308 * the coefficients by pieces (in the required modes).
4309 * We can't pass builtin_memset_gen_str as that emits RTL. */
4310 c = 1;
4311 if (tree_fits_uhwi_p (len)
4312 && can_store_by_pieces (tree_to_uhwi (len),
4313 builtin_memset_read_str, &c, dest_align,
4314 true))
4316 val_rtx = force_reg (val_mode, val_rtx);
4317 store_by_pieces (dest_mem, tree_to_uhwi (len),
4318 builtin_memset_gen_str, val_rtx, dest_align,
4319 true, 0);
4321 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4322 dest_align, expected_align,
4323 expected_size, min_size, max_size,
4324 probable_max_size))
4325 goto do_libcall;
4327 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4328 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4329 return dest_mem;
4332 if (target_char_cast (val, &c))
4333 goto do_libcall;
4335 if (c)
4337 if (tree_fits_uhwi_p (len)
4338 && can_store_by_pieces (tree_to_uhwi (len),
4339 builtin_memset_read_str, &c, dest_align,
4340 true))
4341 store_by_pieces (dest_mem, tree_to_uhwi (len),
4342 builtin_memset_read_str, &c, dest_align, true, 0);
4343 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4344 gen_int_mode (c, val_mode),
4345 dest_align, expected_align,
4346 expected_size, min_size, max_size,
4347 probable_max_size))
4348 goto do_libcall;
4350 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4351 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4352 return dest_mem;
4355 set_mem_align (dest_mem, dest_align);
4356 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4357 CALL_EXPR_TAILCALL (orig_exp)
4358 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4359 expected_align, expected_size,
4360 min_size, max_size,
4361 probable_max_size);
4363 if (dest_addr == 0)
4365 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4366 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4369 return dest_addr;
4371 do_libcall:
4372 fndecl = get_callee_fndecl (orig_exp);
4373 fcode = DECL_FUNCTION_CODE (fndecl);
4374 if (fcode == BUILT_IN_MEMSET)
4375 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4376 dest, val, len);
4377 else if (fcode == BUILT_IN_BZERO)
4378 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4379 dest, len);
4380 else
4381 gcc_unreachable ();
4382 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4383 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4384 return expand_call (fn, target, target == const0_rtx);
4387 /* Expand expression EXP, which is a call to the bzero builtin. Return
4388 NULL_RTX if we failed the caller should emit a normal call. */
4390 static rtx
4391 expand_builtin_bzero (tree exp)
4393 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4394 return NULL_RTX;
4396 tree dest = CALL_EXPR_ARG (exp, 0);
4397 tree size = CALL_EXPR_ARG (exp, 1);
4399 check_memop_access (exp, dest, NULL_TREE, size);
4401 /* New argument list transforming bzero(ptr x, int y) to
4402 memset(ptr x, int 0, size_t y). This is done this way
4403 so that if it isn't expanded inline, we fallback to
4404 calling bzero instead of memset. */
4406 location_t loc = EXPR_LOCATION (exp);
4408 return expand_builtin_memset_args (dest, integer_zero_node,
4409 fold_convert_loc (loc,
4410 size_type_node, size),
4411 const0_rtx, VOIDmode, exp);
4414 /* Try to expand cmpstr operation ICODE with the given operands.
4415 Return the result rtx on success, otherwise return null. */
4417 static rtx
4418 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4419 HOST_WIDE_INT align)
4421 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4423 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4424 target = NULL_RTX;
4426 struct expand_operand ops[4];
4427 create_output_operand (&ops[0], target, insn_mode);
4428 create_fixed_operand (&ops[1], arg1_rtx);
4429 create_fixed_operand (&ops[2], arg2_rtx);
4430 create_integer_operand (&ops[3], align);
4431 if (maybe_expand_insn (icode, 4, ops))
4432 return ops[0].value;
4433 return NULL_RTX;
4436 /* Expand expression EXP, which is a call to the memcmp built-in function.
4437 Return NULL_RTX if we failed and the caller should emit a normal call,
4438 otherwise try to get the result in TARGET, if convenient.
4439 RESULT_EQ is true if we can relax the returned value to be either zero
4440 or nonzero, without caring about the sign. */
4442 static rtx
4443 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4445 if (!validate_arglist (exp,
4446 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4447 return NULL_RTX;
4449 tree arg1 = CALL_EXPR_ARG (exp, 0);
4450 tree arg2 = CALL_EXPR_ARG (exp, 1);
4451 tree len = CALL_EXPR_ARG (exp, 2);
4452 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4453 bool no_overflow = true;
4455 /* Diagnose calls where the specified length exceeds the size of either
4456 object. */
4457 tree size = compute_objsize (arg1, 0);
4458 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4459 len, /*maxread=*/NULL_TREE, size,
4460 /*objsize=*/NULL_TREE);
4461 if (no_overflow)
4463 size = compute_objsize (arg2, 0);
4464 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4465 len, /*maxread=*/NULL_TREE, size,
4466 /*objsize=*/NULL_TREE);
4469 /* Due to the performance benefit, always inline the calls first
4470 when result_eq is false. */
4471 rtx result = NULL_RTX;
4473 if (!result_eq && fcode != BUILT_IN_BCMP && no_overflow)
4475 result = inline_expand_builtin_string_cmp (exp, target);
4476 if (result)
4477 return result;
4480 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4481 location_t loc = EXPR_LOCATION (exp);
4483 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4484 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4486 /* If we don't have POINTER_TYPE, call the function. */
4487 if (arg1_align == 0 || arg2_align == 0)
4488 return NULL_RTX;
4490 rtx arg1_rtx = get_memory_rtx (arg1, len);
4491 rtx arg2_rtx = get_memory_rtx (arg2, len);
4492 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4494 /* Set MEM_SIZE as appropriate. */
4495 if (CONST_INT_P (len_rtx))
4497 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4498 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4501 by_pieces_constfn constfn = NULL;
4503 const char *src_str = c_getstr (arg2);
4504 if (result_eq && src_str == NULL)
4506 src_str = c_getstr (arg1);
4507 if (src_str != NULL)
4508 std::swap (arg1_rtx, arg2_rtx);
4511 /* If SRC is a string constant and block move would be done
4512 by pieces, we can avoid loading the string from memory
4513 and only stored the computed constants. */
4514 if (src_str
4515 && CONST_INT_P (len_rtx)
4516 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4517 constfn = builtin_memcpy_read_str;
4519 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4520 TREE_TYPE (len), target,
4521 result_eq, constfn,
4522 CONST_CAST (char *, src_str));
4524 if (result)
4526 /* Return the value in the proper mode for this function. */
4527 if (GET_MODE (result) == mode)
4528 return result;
4530 if (target != 0)
4532 convert_move (target, result, 0);
4533 return target;
4536 return convert_to_mode (mode, result, 0);
4539 return NULL_RTX;
4542 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4543 if we failed the caller should emit a normal call, otherwise try to get
4544 the result in TARGET, if convenient. */
4546 static rtx
4547 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4549 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4550 return NULL_RTX;
4552 /* Due to the performance benefit, always inline the calls first. */
4553 rtx result = NULL_RTX;
4554 result = inline_expand_builtin_string_cmp (exp, target);
4555 if (result)
4556 return result;
4558 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4559 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4560 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4561 return NULL_RTX;
4563 tree arg1 = CALL_EXPR_ARG (exp, 0);
4564 tree arg2 = CALL_EXPR_ARG (exp, 1);
4566 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4567 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4569 /* If we don't have POINTER_TYPE, call the function. */
4570 if (arg1_align == 0 || arg2_align == 0)
4571 return NULL_RTX;
4573 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4574 arg1 = builtin_save_expr (arg1);
4575 arg2 = builtin_save_expr (arg2);
4577 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4578 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4580 /* Try to call cmpstrsi. */
4581 if (cmpstr_icode != CODE_FOR_nothing)
4582 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4583 MIN (arg1_align, arg2_align));
4585 /* Try to determine at least one length and call cmpstrnsi. */
4586 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4588 tree len;
4589 rtx arg3_rtx;
4591 tree len1 = c_strlen (arg1, 1);
4592 tree len2 = c_strlen (arg2, 1);
4594 if (len1)
4595 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4596 if (len2)
4597 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4599 /* If we don't have a constant length for the first, use the length
4600 of the second, if we know it. We don't require a constant for
4601 this case; some cost analysis could be done if both are available
4602 but neither is constant. For now, assume they're equally cheap,
4603 unless one has side effects. If both strings have constant lengths,
4604 use the smaller. */
4606 if (!len1)
4607 len = len2;
4608 else if (!len2)
4609 len = len1;
4610 else if (TREE_SIDE_EFFECTS (len1))
4611 len = len2;
4612 else if (TREE_SIDE_EFFECTS (len2))
4613 len = len1;
4614 else if (TREE_CODE (len1) != INTEGER_CST)
4615 len = len2;
4616 else if (TREE_CODE (len2) != INTEGER_CST)
4617 len = len1;
4618 else if (tree_int_cst_lt (len1, len2))
4619 len = len1;
4620 else
4621 len = len2;
4623 /* If both arguments have side effects, we cannot optimize. */
4624 if (len && !TREE_SIDE_EFFECTS (len))
4626 arg3_rtx = expand_normal (len);
4627 result = expand_cmpstrn_or_cmpmem
4628 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4629 arg3_rtx, MIN (arg1_align, arg2_align));
4633 tree fndecl = get_callee_fndecl (exp);
4634 if (result)
4636 /* Check to see if the argument was declared attribute nonstring
4637 and if so, issue a warning since at this point it's not known
4638 to be nul-terminated. */
4639 maybe_warn_nonstring_arg (fndecl, exp);
4641 /* Return the value in the proper mode for this function. */
4642 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4643 if (GET_MODE (result) == mode)
4644 return result;
4645 if (target == 0)
4646 return convert_to_mode (mode, result, 0);
4647 convert_move (target, result, 0);
4648 return target;
4651 /* Expand the library call ourselves using a stabilized argument
4652 list to avoid re-evaluating the function's arguments twice. */
4653 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4654 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4655 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4656 return expand_call (fn, target, target == const0_rtx);
4659 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4660 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4661 the result in TARGET, if convenient. */
4663 static rtx
4664 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4665 ATTRIBUTE_UNUSED machine_mode mode)
4667 if (!validate_arglist (exp,
4668 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4669 return NULL_RTX;
4671 /* Due to the performance benefit, always inline the calls first. */
4672 rtx result = NULL_RTX;
4673 result = inline_expand_builtin_string_cmp (exp, target);
4674 if (result)
4675 return result;
4677 /* If c_strlen can determine an expression for one of the string
4678 lengths, and it doesn't have side effects, then emit cmpstrnsi
4679 using length MIN(strlen(string)+1, arg3). */
4680 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4681 if (cmpstrn_icode == CODE_FOR_nothing)
4682 return NULL_RTX;
4684 tree len;
4686 tree arg1 = CALL_EXPR_ARG (exp, 0);
4687 tree arg2 = CALL_EXPR_ARG (exp, 1);
4688 tree arg3 = CALL_EXPR_ARG (exp, 2);
4690 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4691 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4693 tree len1 = c_strlen (arg1, 1);
4694 tree len2 = c_strlen (arg2, 1);
4696 location_t loc = EXPR_LOCATION (exp);
4698 if (len1)
4699 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4700 if (len2)
4701 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4703 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4705 /* If we don't have a constant length for the first, use the length
4706 of the second, if we know it. If neither string is constant length,
4707 use the given length argument. We don't require a constant for
4708 this case; some cost analysis could be done if both are available
4709 but neither is constant. For now, assume they're equally cheap,
4710 unless one has side effects. If both strings have constant lengths,
4711 use the smaller. */
4713 if (!len1 && !len2)
4714 len = len3;
4715 else if (!len1)
4716 len = len2;
4717 else if (!len2)
4718 len = len1;
4719 else if (TREE_SIDE_EFFECTS (len1))
4720 len = len2;
4721 else if (TREE_SIDE_EFFECTS (len2))
4722 len = len1;
4723 else if (TREE_CODE (len1) != INTEGER_CST)
4724 len = len2;
4725 else if (TREE_CODE (len2) != INTEGER_CST)
4726 len = len1;
4727 else if (tree_int_cst_lt (len1, len2))
4728 len = len1;
4729 else
4730 len = len2;
4732 /* If we are not using the given length, we must incorporate it here.
4733 The actual new length parameter will be MIN(len,arg3) in this case. */
4734 if (len != len3)
4735 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4736 rtx arg1_rtx = get_memory_rtx (arg1, len);
4737 rtx arg2_rtx = get_memory_rtx (arg2, len);
4738 rtx arg3_rtx = expand_normal (len);
4739 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4740 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4741 MIN (arg1_align, arg2_align));
4743 tree fndecl = get_callee_fndecl (exp);
4744 if (result)
4746 /* Check to see if the argument was declared attribute nonstring
4747 and if so, issue a warning since at this point it's not known
4748 to be nul-terminated. */
4749 maybe_warn_nonstring_arg (fndecl, exp);
4751 /* Return the value in the proper mode for this function. */
4752 mode = TYPE_MODE (TREE_TYPE (exp));
4753 if (GET_MODE (result) == mode)
4754 return result;
4755 if (target == 0)
4756 return convert_to_mode (mode, result, 0);
4757 convert_move (target, result, 0);
4758 return target;
4761 /* Expand the library call ourselves using a stabilized argument
4762 list to avoid re-evaluating the function's arguments twice. */
4763 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4764 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4765 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4766 return expand_call (fn, target, target == const0_rtx);
4769 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4770 if that's convenient. */
4773 expand_builtin_saveregs (void)
4775 rtx val;
4776 rtx_insn *seq;
4778 /* Don't do __builtin_saveregs more than once in a function.
4779 Save the result of the first call and reuse it. */
4780 if (saveregs_value != 0)
4781 return saveregs_value;
4783 /* When this function is called, it means that registers must be
4784 saved on entry to this function. So we migrate the call to the
4785 first insn of this function. */
4787 start_sequence ();
4789 /* Do whatever the machine needs done in this case. */
4790 val = targetm.calls.expand_builtin_saveregs ();
4792 seq = get_insns ();
4793 end_sequence ();
4795 saveregs_value = val;
4797 /* Put the insns after the NOTE that starts the function. If this
4798 is inside a start_sequence, make the outer-level insn chain current, so
4799 the code is placed at the start of the function. */
4800 push_topmost_sequence ();
4801 emit_insn_after (seq, entry_of_function ());
4802 pop_topmost_sequence ();
4804 return val;
4807 /* Expand a call to __builtin_next_arg. */
4809 static rtx
4810 expand_builtin_next_arg (void)
4812 /* Checking arguments is already done in fold_builtin_next_arg
4813 that must be called before this function. */
4814 return expand_binop (ptr_mode, add_optab,
4815 crtl->args.internal_arg_pointer,
4816 crtl->args.arg_offset_rtx,
4817 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4820 /* Make it easier for the backends by protecting the valist argument
4821 from multiple evaluations. */
4823 static tree
4824 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4826 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4828 /* The current way of determining the type of valist is completely
4829 bogus. We should have the information on the va builtin instead. */
4830 if (!vatype)
4831 vatype = targetm.fn_abi_va_list (cfun->decl);
4833 if (TREE_CODE (vatype) == ARRAY_TYPE)
4835 if (TREE_SIDE_EFFECTS (valist))
4836 valist = save_expr (valist);
4838 /* For this case, the backends will be expecting a pointer to
4839 vatype, but it's possible we've actually been given an array
4840 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4841 So fix it. */
4842 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4844 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4845 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4848 else
4850 tree pt = build_pointer_type (vatype);
4852 if (! needs_lvalue)
4854 if (! TREE_SIDE_EFFECTS (valist))
4855 return valist;
4857 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4858 TREE_SIDE_EFFECTS (valist) = 1;
4861 if (TREE_SIDE_EFFECTS (valist))
4862 valist = save_expr (valist);
4863 valist = fold_build2_loc (loc, MEM_REF,
4864 vatype, valist, build_int_cst (pt, 0));
4867 return valist;
4870 /* The "standard" definition of va_list is void*. */
4872 tree
4873 std_build_builtin_va_list (void)
4875 return ptr_type_node;
4878 /* The "standard" abi va_list is va_list_type_node. */
4880 tree
4881 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4883 return va_list_type_node;
4886 /* The "standard" type of va_list is va_list_type_node. */
4888 tree
4889 std_canonical_va_list_type (tree type)
4891 tree wtype, htype;
4893 wtype = va_list_type_node;
4894 htype = type;
4896 if (TREE_CODE (wtype) == ARRAY_TYPE)
4898 /* If va_list is an array type, the argument may have decayed
4899 to a pointer type, e.g. by being passed to another function.
4900 In that case, unwrap both types so that we can compare the
4901 underlying records. */
4902 if (TREE_CODE (htype) == ARRAY_TYPE
4903 || POINTER_TYPE_P (htype))
4905 wtype = TREE_TYPE (wtype);
4906 htype = TREE_TYPE (htype);
4909 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4910 return va_list_type_node;
4912 return NULL_TREE;
4915 /* The "standard" implementation of va_start: just assign `nextarg' to
4916 the variable. */
4918 void
4919 std_expand_builtin_va_start (tree valist, rtx nextarg)
4921 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4922 convert_move (va_r, nextarg, 0);
4925 /* Expand EXP, a call to __builtin_va_start. */
4927 static rtx
4928 expand_builtin_va_start (tree exp)
4930 rtx nextarg;
4931 tree valist;
4932 location_t loc = EXPR_LOCATION (exp);
4934 if (call_expr_nargs (exp) < 2)
4936 error_at (loc, "too few arguments to function %<va_start%>");
4937 return const0_rtx;
4940 if (fold_builtin_next_arg (exp, true))
4941 return const0_rtx;
4943 nextarg = expand_builtin_next_arg ();
4944 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4946 if (targetm.expand_builtin_va_start)
4947 targetm.expand_builtin_va_start (valist, nextarg);
4948 else
4949 std_expand_builtin_va_start (valist, nextarg);
4951 return const0_rtx;
4954 /* Expand EXP, a call to __builtin_va_end. */
4956 static rtx
4957 expand_builtin_va_end (tree exp)
4959 tree valist = CALL_EXPR_ARG (exp, 0);
4961 /* Evaluate for side effects, if needed. I hate macros that don't
4962 do that. */
4963 if (TREE_SIDE_EFFECTS (valist))
4964 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4966 return const0_rtx;
4969 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4970 builtin rather than just as an assignment in stdarg.h because of the
4971 nastiness of array-type va_list types. */
4973 static rtx
4974 expand_builtin_va_copy (tree exp)
4976 tree dst, src, t;
4977 location_t loc = EXPR_LOCATION (exp);
4979 dst = CALL_EXPR_ARG (exp, 0);
4980 src = CALL_EXPR_ARG (exp, 1);
4982 dst = stabilize_va_list_loc (loc, dst, 1);
4983 src = stabilize_va_list_loc (loc, src, 0);
4985 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4987 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4989 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4990 TREE_SIDE_EFFECTS (t) = 1;
4991 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4993 else
4995 rtx dstb, srcb, size;
4997 /* Evaluate to pointers. */
4998 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4999 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5000 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5001 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5003 dstb = convert_memory_address (Pmode, dstb);
5004 srcb = convert_memory_address (Pmode, srcb);
5006 /* "Dereference" to BLKmode memories. */
5007 dstb = gen_rtx_MEM (BLKmode, dstb);
5008 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5009 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5010 srcb = gen_rtx_MEM (BLKmode, srcb);
5011 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5012 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5014 /* Copy. */
5015 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5018 return const0_rtx;
5021 /* Expand a call to one of the builtin functions __builtin_frame_address or
5022 __builtin_return_address. */
5024 static rtx
5025 expand_builtin_frame_address (tree fndecl, tree exp)
5027 /* The argument must be a nonnegative integer constant.
5028 It counts the number of frames to scan up the stack.
5029 The value is either the frame pointer value or the return
5030 address saved in that frame. */
5031 if (call_expr_nargs (exp) == 0)
5032 /* Warning about missing arg was already issued. */
5033 return const0_rtx;
5034 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
5036 error ("invalid argument to %qD", fndecl);
5037 return const0_rtx;
5039 else
5041 /* Number of frames to scan up the stack. */
5042 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5044 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
5046 /* Some ports cannot access arbitrary stack frames. */
5047 if (tem == NULL)
5049 warning (0, "unsupported argument to %qD", fndecl);
5050 return const0_rtx;
5053 if (count)
5055 /* Warn since no effort is made to ensure that any frame
5056 beyond the current one exists or can be safely reached. */
5057 warning (OPT_Wframe_address, "calling %qD with "
5058 "a nonzero argument is unsafe", fndecl);
5061 /* For __builtin_frame_address, return what we've got. */
5062 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5063 return tem;
5065 if (!REG_P (tem)
5066 && ! CONSTANT_P (tem))
5067 tem = copy_addr_to_reg (tem);
5068 return tem;
5072 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5073 failed and the caller should emit a normal call. */
5075 static rtx
5076 expand_builtin_alloca (tree exp)
5078 rtx op0;
5079 rtx result;
5080 unsigned int align;
5081 tree fndecl = get_callee_fndecl (exp);
5082 HOST_WIDE_INT max_size;
5083 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5084 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5085 bool valid_arglist
5086 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5087 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5088 VOID_TYPE)
5089 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5090 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5091 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5093 if (!valid_arglist)
5094 return NULL_RTX;
5096 if ((alloca_for_var
5097 && warn_vla_limit >= HOST_WIDE_INT_MAX
5098 && warn_alloc_size_limit < warn_vla_limit)
5099 || (!alloca_for_var
5100 && warn_alloca_limit >= HOST_WIDE_INT_MAX
5101 && warn_alloc_size_limit < warn_alloca_limit
5104 /* -Walloca-larger-than and -Wvla-larger-than settings of
5105 less than HOST_WIDE_INT_MAX override the more general
5106 -Walloc-size-larger-than so unless either of the former
5107 options is smaller than the last one (wchich would imply
5108 that the call was already checked), check the alloca
5109 arguments for overflow. */
5110 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5111 int idx[] = { 0, -1 };
5112 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5115 /* Compute the argument. */
5116 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5118 /* Compute the alignment. */
5119 align = (fcode == BUILT_IN_ALLOCA
5120 ? BIGGEST_ALIGNMENT
5121 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5123 /* Compute the maximum size. */
5124 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5125 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5126 : -1);
5128 /* Allocate the desired space. If the allocation stems from the declaration
5129 of a variable-sized object, it cannot accumulate. */
5130 result
5131 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5132 result = convert_memory_address (ptr_mode, result);
5134 return result;
5137 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5138 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5139 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5140 handle_builtin_stack_restore function. */
5142 static rtx
5143 expand_asan_emit_allocas_unpoison (tree exp)
5145 tree arg0 = CALL_EXPR_ARG (exp, 0);
5146 tree arg1 = CALL_EXPR_ARG (exp, 1);
5147 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5148 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5149 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5150 stack_pointer_rtx, NULL_RTX, 0,
5151 OPTAB_LIB_WIDEN);
5152 off = convert_modes (ptr_mode, Pmode, off, 0);
5153 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5154 OPTAB_LIB_WIDEN);
5155 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5156 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5157 top, ptr_mode, bot, ptr_mode);
5158 return ret;
5161 /* Expand a call to bswap builtin in EXP.
5162 Return NULL_RTX if a normal call should be emitted rather than expanding the
5163 function in-line. If convenient, the result should be placed in TARGET.
5164 SUBTARGET may be used as the target for computing one of EXP's operands. */
5166 static rtx
5167 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5168 rtx subtarget)
5170 tree arg;
5171 rtx op0;
5173 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5174 return NULL_RTX;
5176 arg = CALL_EXPR_ARG (exp, 0);
5177 op0 = expand_expr (arg,
5178 subtarget && GET_MODE (subtarget) == target_mode
5179 ? subtarget : NULL_RTX,
5180 target_mode, EXPAND_NORMAL);
5181 if (GET_MODE (op0) != target_mode)
5182 op0 = convert_to_mode (target_mode, op0, 1);
5184 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5186 gcc_assert (target);
5188 return convert_to_mode (target_mode, target, 1);
5191 /* Expand a call to a unary builtin in EXP.
5192 Return NULL_RTX if a normal call should be emitted rather than expanding the
5193 function in-line. If convenient, the result should be placed in TARGET.
5194 SUBTARGET may be used as the target for computing one of EXP's operands. */
5196 static rtx
5197 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5198 rtx subtarget, optab op_optab)
5200 rtx op0;
5202 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5203 return NULL_RTX;
5205 /* Compute the argument. */
5206 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5207 (subtarget
5208 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5209 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5210 VOIDmode, EXPAND_NORMAL);
5211 /* Compute op, into TARGET if possible.
5212 Set TARGET to wherever the result comes back. */
5213 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5214 op_optab, op0, target, op_optab != clrsb_optab);
5215 gcc_assert (target);
5217 return convert_to_mode (target_mode, target, 0);
5220 /* Expand a call to __builtin_expect. We just return our argument
5221 as the builtin_expect semantic should've been already executed by
5222 tree branch prediction pass. */
5224 static rtx
5225 expand_builtin_expect (tree exp, rtx target)
5227 tree arg;
5229 if (call_expr_nargs (exp) < 2)
5230 return const0_rtx;
5231 arg = CALL_EXPR_ARG (exp, 0);
5233 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5234 /* When guessing was done, the hints should be already stripped away. */
5235 gcc_assert (!flag_guess_branch_prob
5236 || optimize == 0 || seen_error ());
5237 return target;
5240 /* Expand a call to __builtin_assume_aligned. We just return our first
5241 argument as the builtin_assume_aligned semantic should've been already
5242 executed by CCP. */
5244 static rtx
5245 expand_builtin_assume_aligned (tree exp, rtx target)
5247 if (call_expr_nargs (exp) < 2)
5248 return const0_rtx;
5249 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5250 EXPAND_NORMAL);
5251 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5252 && (call_expr_nargs (exp) < 3
5253 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5254 return target;
5257 void
5258 expand_builtin_trap (void)
5260 if (targetm.have_trap ())
5262 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5263 /* For trap insns when not accumulating outgoing args force
5264 REG_ARGS_SIZE note to prevent crossjumping of calls with
5265 different args sizes. */
5266 if (!ACCUMULATE_OUTGOING_ARGS)
5267 add_args_size_note (insn, stack_pointer_delta);
5269 else
5271 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5272 tree call_expr = build_call_expr (fn, 0);
5273 expand_call (call_expr, NULL_RTX, false);
5276 emit_barrier ();
5279 /* Expand a call to __builtin_unreachable. We do nothing except emit
5280 a barrier saying that control flow will not pass here.
5282 It is the responsibility of the program being compiled to ensure
5283 that control flow does never reach __builtin_unreachable. */
5284 static void
5285 expand_builtin_unreachable (void)
5287 emit_barrier ();
5290 /* Expand EXP, a call to fabs, fabsf or fabsl.
5291 Return NULL_RTX if a normal call should be emitted rather than expanding
5292 the function inline. If convenient, the result should be placed
5293 in TARGET. SUBTARGET may be used as the target for computing
5294 the operand. */
5296 static rtx
5297 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5299 machine_mode mode;
5300 tree arg;
5301 rtx op0;
5303 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5304 return NULL_RTX;
5306 arg = CALL_EXPR_ARG (exp, 0);
5307 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5308 mode = TYPE_MODE (TREE_TYPE (arg));
5309 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5310 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5313 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5314 Return NULL is a normal call should be emitted rather than expanding the
5315 function inline. If convenient, the result should be placed in TARGET.
5316 SUBTARGET may be used as the target for computing the operand. */
5318 static rtx
5319 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5321 rtx op0, op1;
5322 tree arg;
5324 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5325 return NULL_RTX;
5327 arg = CALL_EXPR_ARG (exp, 0);
5328 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5330 arg = CALL_EXPR_ARG (exp, 1);
5331 op1 = expand_normal (arg);
5333 return expand_copysign (op0, op1, target);
5336 /* Expand a call to __builtin___clear_cache. */
5338 static rtx
5339 expand_builtin___clear_cache (tree exp)
5341 if (!targetm.code_for_clear_cache)
5343 #ifdef CLEAR_INSN_CACHE
5344 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5345 does something. Just do the default expansion to a call to
5346 __clear_cache(). */
5347 return NULL_RTX;
5348 #else
5349 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5350 does nothing. There is no need to call it. Do nothing. */
5351 return const0_rtx;
5352 #endif /* CLEAR_INSN_CACHE */
5355 /* We have a "clear_cache" insn, and it will handle everything. */
5356 tree begin, end;
5357 rtx begin_rtx, end_rtx;
5359 /* We must not expand to a library call. If we did, any
5360 fallback library function in libgcc that might contain a call to
5361 __builtin___clear_cache() would recurse infinitely. */
5362 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5364 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5365 return const0_rtx;
5368 if (targetm.have_clear_cache ())
5370 struct expand_operand ops[2];
5372 begin = CALL_EXPR_ARG (exp, 0);
5373 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5375 end = CALL_EXPR_ARG (exp, 1);
5376 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5378 create_address_operand (&ops[0], begin_rtx);
5379 create_address_operand (&ops[1], end_rtx);
5380 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5381 return const0_rtx;
5383 return const0_rtx;
5386 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5388 static rtx
5389 round_trampoline_addr (rtx tramp)
5391 rtx temp, addend, mask;
5393 /* If we don't need too much alignment, we'll have been guaranteed
5394 proper alignment by get_trampoline_type. */
5395 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5396 return tramp;
5398 /* Round address up to desired boundary. */
5399 temp = gen_reg_rtx (Pmode);
5400 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5401 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5403 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5404 temp, 0, OPTAB_LIB_WIDEN);
5405 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5406 temp, 0, OPTAB_LIB_WIDEN);
5408 return tramp;
5411 static rtx
5412 expand_builtin_init_trampoline (tree exp, bool onstack)
5414 tree t_tramp, t_func, t_chain;
5415 rtx m_tramp, r_tramp, r_chain, tmp;
5417 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5418 POINTER_TYPE, VOID_TYPE))
5419 return NULL_RTX;
5421 t_tramp = CALL_EXPR_ARG (exp, 0);
5422 t_func = CALL_EXPR_ARG (exp, 1);
5423 t_chain = CALL_EXPR_ARG (exp, 2);
5425 r_tramp = expand_normal (t_tramp);
5426 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5427 MEM_NOTRAP_P (m_tramp) = 1;
5429 /* If ONSTACK, the TRAMP argument should be the address of a field
5430 within the local function's FRAME decl. Either way, let's see if
5431 we can fill in the MEM_ATTRs for this memory. */
5432 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5433 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5435 /* Creator of a heap trampoline is responsible for making sure the
5436 address is aligned to at least STACK_BOUNDARY. Normally malloc
5437 will ensure this anyhow. */
5438 tmp = round_trampoline_addr (r_tramp);
5439 if (tmp != r_tramp)
5441 m_tramp = change_address (m_tramp, BLKmode, tmp);
5442 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5443 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5446 /* The FUNC argument should be the address of the nested function.
5447 Extract the actual function decl to pass to the hook. */
5448 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5449 t_func = TREE_OPERAND (t_func, 0);
5450 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5452 r_chain = expand_normal (t_chain);
5454 /* Generate insns to initialize the trampoline. */
5455 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5457 if (onstack)
5459 trampolines_created = 1;
5461 if (targetm.calls.custom_function_descriptors != 0)
5462 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5463 "trampoline generated for nested function %qD", t_func);
5466 return const0_rtx;
5469 static rtx
5470 expand_builtin_adjust_trampoline (tree exp)
5472 rtx tramp;
5474 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5475 return NULL_RTX;
5477 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5478 tramp = round_trampoline_addr (tramp);
5479 if (targetm.calls.trampoline_adjust_address)
5480 tramp = targetm.calls.trampoline_adjust_address (tramp);
5482 return tramp;
5485 /* Expand a call to the builtin descriptor initialization routine.
5486 A descriptor is made up of a couple of pointers to the static
5487 chain and the code entry in this order. */
5489 static rtx
5490 expand_builtin_init_descriptor (tree exp)
5492 tree t_descr, t_func, t_chain;
5493 rtx m_descr, r_descr, r_func, r_chain;
5495 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5496 VOID_TYPE))
5497 return NULL_RTX;
5499 t_descr = CALL_EXPR_ARG (exp, 0);
5500 t_func = CALL_EXPR_ARG (exp, 1);
5501 t_chain = CALL_EXPR_ARG (exp, 2);
5503 r_descr = expand_normal (t_descr);
5504 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5505 MEM_NOTRAP_P (m_descr) = 1;
5507 r_func = expand_normal (t_func);
5508 r_chain = expand_normal (t_chain);
5510 /* Generate insns to initialize the descriptor. */
5511 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5512 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5513 POINTER_SIZE / BITS_PER_UNIT), r_func);
5515 return const0_rtx;
5518 /* Expand a call to the builtin descriptor adjustment routine. */
5520 static rtx
5521 expand_builtin_adjust_descriptor (tree exp)
5523 rtx tramp;
5525 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5526 return NULL_RTX;
5528 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5530 /* Unalign the descriptor to allow runtime identification. */
5531 tramp = plus_constant (ptr_mode, tramp,
5532 targetm.calls.custom_function_descriptors);
5534 return force_operand (tramp, NULL_RTX);
5537 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5538 function. The function first checks whether the back end provides
5539 an insn to implement signbit for the respective mode. If not, it
5540 checks whether the floating point format of the value is such that
5541 the sign bit can be extracted. If that is not the case, error out.
5542 EXP is the expression that is a call to the builtin function; if
5543 convenient, the result should be placed in TARGET. */
5544 static rtx
5545 expand_builtin_signbit (tree exp, rtx target)
5547 const struct real_format *fmt;
5548 scalar_float_mode fmode;
5549 scalar_int_mode rmode, imode;
5550 tree arg;
5551 int word, bitpos;
5552 enum insn_code icode;
5553 rtx temp;
5554 location_t loc = EXPR_LOCATION (exp);
5556 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5557 return NULL_RTX;
5559 arg = CALL_EXPR_ARG (exp, 0);
5560 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5561 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5562 fmt = REAL_MODE_FORMAT (fmode);
5564 arg = builtin_save_expr (arg);
5566 /* Expand the argument yielding a RTX expression. */
5567 temp = expand_normal (arg);
5569 /* Check if the back end provides an insn that handles signbit for the
5570 argument's mode. */
5571 icode = optab_handler (signbit_optab, fmode);
5572 if (icode != CODE_FOR_nothing)
5574 rtx_insn *last = get_last_insn ();
5575 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5576 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5577 return target;
5578 delete_insns_since (last);
5581 /* For floating point formats without a sign bit, implement signbit
5582 as "ARG < 0.0". */
5583 bitpos = fmt->signbit_ro;
5584 if (bitpos < 0)
5586 /* But we can't do this if the format supports signed zero. */
5587 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5589 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5590 build_real (TREE_TYPE (arg), dconst0));
5591 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5594 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5596 imode = int_mode_for_mode (fmode).require ();
5597 temp = gen_lowpart (imode, temp);
5599 else
5601 imode = word_mode;
5602 /* Handle targets with different FP word orders. */
5603 if (FLOAT_WORDS_BIG_ENDIAN)
5604 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5605 else
5606 word = bitpos / BITS_PER_WORD;
5607 temp = operand_subword_force (temp, word, fmode);
5608 bitpos = bitpos % BITS_PER_WORD;
5611 /* Force the intermediate word_mode (or narrower) result into a
5612 register. This avoids attempting to create paradoxical SUBREGs
5613 of floating point modes below. */
5614 temp = force_reg (imode, temp);
5616 /* If the bitpos is within the "result mode" lowpart, the operation
5617 can be implement with a single bitwise AND. Otherwise, we need
5618 a right shift and an AND. */
5620 if (bitpos < GET_MODE_BITSIZE (rmode))
5622 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5624 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5625 temp = gen_lowpart (rmode, temp);
5626 temp = expand_binop (rmode, and_optab, temp,
5627 immed_wide_int_const (mask, rmode),
5628 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5630 else
5632 /* Perform a logical right shift to place the signbit in the least
5633 significant bit, then truncate the result to the desired mode
5634 and mask just this bit. */
5635 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5636 temp = gen_lowpart (rmode, temp);
5637 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5638 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5641 return temp;
5644 /* Expand fork or exec calls. TARGET is the desired target of the
5645 call. EXP is the call. FN is the
5646 identificator of the actual function. IGNORE is nonzero if the
5647 value is to be ignored. */
5649 static rtx
5650 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5652 tree id, decl;
5653 tree call;
5655 /* If we are not profiling, just call the function. */
5656 if (!profile_arc_flag)
5657 return NULL_RTX;
5659 /* Otherwise call the wrapper. This should be equivalent for the rest of
5660 compiler, so the code does not diverge, and the wrapper may run the
5661 code necessary for keeping the profiling sane. */
5663 switch (DECL_FUNCTION_CODE (fn))
5665 case BUILT_IN_FORK:
5666 id = get_identifier ("__gcov_fork");
5667 break;
5669 case BUILT_IN_EXECL:
5670 id = get_identifier ("__gcov_execl");
5671 break;
5673 case BUILT_IN_EXECV:
5674 id = get_identifier ("__gcov_execv");
5675 break;
5677 case BUILT_IN_EXECLP:
5678 id = get_identifier ("__gcov_execlp");
5679 break;
5681 case BUILT_IN_EXECLE:
5682 id = get_identifier ("__gcov_execle");
5683 break;
5685 case BUILT_IN_EXECVP:
5686 id = get_identifier ("__gcov_execvp");
5687 break;
5689 case BUILT_IN_EXECVE:
5690 id = get_identifier ("__gcov_execve");
5691 break;
5693 default:
5694 gcc_unreachable ();
5697 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5698 FUNCTION_DECL, id, TREE_TYPE (fn));
5699 DECL_EXTERNAL (decl) = 1;
5700 TREE_PUBLIC (decl) = 1;
5701 DECL_ARTIFICIAL (decl) = 1;
5702 TREE_NOTHROW (decl) = 1;
5703 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5704 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5705 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5706 return expand_call (call, target, ignore);
5711 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5712 the pointer in these functions is void*, the tree optimizers may remove
5713 casts. The mode computed in expand_builtin isn't reliable either, due
5714 to __sync_bool_compare_and_swap.
5716 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5717 group of builtins. This gives us log2 of the mode size. */
5719 static inline machine_mode
5720 get_builtin_sync_mode (int fcode_diff)
5722 /* The size is not negotiable, so ask not to get BLKmode in return
5723 if the target indicates that a smaller size would be better. */
5724 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5727 /* Expand the memory expression LOC and return the appropriate memory operand
5728 for the builtin_sync operations. */
5730 static rtx
5731 get_builtin_sync_mem (tree loc, machine_mode mode)
5733 rtx addr, mem;
5735 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5736 addr = convert_memory_address (Pmode, addr);
5738 /* Note that we explicitly do not want any alias information for this
5739 memory, so that we kill all other live memories. Otherwise we don't
5740 satisfy the full barrier semantics of the intrinsic. */
5741 mem = validize_mem (gen_rtx_MEM (mode, addr));
5743 /* The alignment needs to be at least according to that of the mode. */
5744 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5745 get_pointer_alignment (loc)));
5746 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5747 MEM_VOLATILE_P (mem) = 1;
5749 return mem;
5752 /* Make sure an argument is in the right mode.
5753 EXP is the tree argument.
5754 MODE is the mode it should be in. */
5756 static rtx
5757 expand_expr_force_mode (tree exp, machine_mode mode)
5759 rtx val;
5760 machine_mode old_mode;
5762 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5763 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5764 of CONST_INTs, where we know the old_mode only from the call argument. */
5766 old_mode = GET_MODE (val);
5767 if (old_mode == VOIDmode)
5768 old_mode = TYPE_MODE (TREE_TYPE (exp));
5769 val = convert_modes (mode, old_mode, val, 1);
5770 return val;
5774 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5775 EXP is the CALL_EXPR. CODE is the rtx code
5776 that corresponds to the arithmetic or logical operation from the name;
5777 an exception here is that NOT actually means NAND. TARGET is an optional
5778 place for us to store the results; AFTER is true if this is the
5779 fetch_and_xxx form. */
5781 static rtx
5782 expand_builtin_sync_operation (machine_mode mode, tree exp,
5783 enum rtx_code code, bool after,
5784 rtx target)
5786 rtx val, mem;
5787 location_t loc = EXPR_LOCATION (exp);
5789 if (code == NOT && warn_sync_nand)
5791 tree fndecl = get_callee_fndecl (exp);
5792 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5794 static bool warned_f_a_n, warned_n_a_f;
5796 switch (fcode)
5798 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5799 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5800 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5801 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5802 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5803 if (warned_f_a_n)
5804 break;
5806 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5807 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5808 warned_f_a_n = true;
5809 break;
5811 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5812 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5813 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5814 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5815 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5816 if (warned_n_a_f)
5817 break;
5819 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5820 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5821 warned_n_a_f = true;
5822 break;
5824 default:
5825 gcc_unreachable ();
5829 /* Expand the operands. */
5830 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5831 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5833 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5834 after);
5837 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5838 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5839 true if this is the boolean form. TARGET is a place for us to store the
5840 results; this is NOT optional if IS_BOOL is true. */
5842 static rtx
5843 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5844 bool is_bool, rtx target)
5846 rtx old_val, new_val, mem;
5847 rtx *pbool, *poval;
5849 /* Expand the operands. */
5850 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5851 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5852 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5854 pbool = poval = NULL;
5855 if (target != const0_rtx)
5857 if (is_bool)
5858 pbool = &target;
5859 else
5860 poval = &target;
5862 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5863 false, MEMMODEL_SYNC_SEQ_CST,
5864 MEMMODEL_SYNC_SEQ_CST))
5865 return NULL_RTX;
5867 return target;
5870 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5871 general form is actually an atomic exchange, and some targets only
5872 support a reduced form with the second argument being a constant 1.
5873 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5874 the results. */
5876 static rtx
5877 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5878 rtx target)
5880 rtx val, mem;
5882 /* Expand the operands. */
5883 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5884 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5886 return expand_sync_lock_test_and_set (target, mem, val);
5889 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5891 static void
5892 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5894 rtx mem;
5896 /* Expand the operands. */
5897 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5899 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5902 /* Given an integer representing an ``enum memmodel'', verify its
5903 correctness and return the memory model enum. */
5905 static enum memmodel
5906 get_memmodel (tree exp)
5908 rtx op;
5909 unsigned HOST_WIDE_INT val;
5910 source_location loc
5911 = expansion_point_location_if_in_system_header (input_location);
5913 /* If the parameter is not a constant, it's a run time value so we'll just
5914 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5915 if (TREE_CODE (exp) != INTEGER_CST)
5916 return MEMMODEL_SEQ_CST;
5918 op = expand_normal (exp);
5920 val = INTVAL (op);
5921 if (targetm.memmodel_check)
5922 val = targetm.memmodel_check (val);
5923 else if (val & ~MEMMODEL_MASK)
5925 warning_at (loc, OPT_Winvalid_memory_model,
5926 "unknown architecture specifier in memory model to builtin");
5927 return MEMMODEL_SEQ_CST;
5930 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5931 if (memmodel_base (val) >= MEMMODEL_LAST)
5933 warning_at (loc, OPT_Winvalid_memory_model,
5934 "invalid memory model argument to builtin");
5935 return MEMMODEL_SEQ_CST;
5938 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5939 be conservative and promote consume to acquire. */
5940 if (val == MEMMODEL_CONSUME)
5941 val = MEMMODEL_ACQUIRE;
5943 return (enum memmodel) val;
5946 /* Expand the __atomic_exchange intrinsic:
5947 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5948 EXP is the CALL_EXPR.
5949 TARGET is an optional place for us to store the results. */
5951 static rtx
5952 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5954 rtx val, mem;
5955 enum memmodel model;
5957 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5959 if (!flag_inline_atomics)
5960 return NULL_RTX;
5962 /* Expand the operands. */
5963 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5964 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5966 return expand_atomic_exchange (target, mem, val, model);
5969 /* Expand the __atomic_compare_exchange intrinsic:
5970 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5971 TYPE desired, BOOL weak,
5972 enum memmodel success,
5973 enum memmodel failure)
5974 EXP is the CALL_EXPR.
5975 TARGET is an optional place for us to store the results. */
5977 static rtx
5978 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5979 rtx target)
5981 rtx expect, desired, mem, oldval;
5982 rtx_code_label *label;
5983 enum memmodel success, failure;
5984 tree weak;
5985 bool is_weak;
5986 source_location loc
5987 = expansion_point_location_if_in_system_header (input_location);
5989 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5990 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5992 if (failure > success)
5994 warning_at (loc, OPT_Winvalid_memory_model,
5995 "failure memory model cannot be stronger than success "
5996 "memory model for %<__atomic_compare_exchange%>");
5997 success = MEMMODEL_SEQ_CST;
6000 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6002 warning_at (loc, OPT_Winvalid_memory_model,
6003 "invalid failure memory model for "
6004 "%<__atomic_compare_exchange%>");
6005 failure = MEMMODEL_SEQ_CST;
6006 success = MEMMODEL_SEQ_CST;
6010 if (!flag_inline_atomics)
6011 return NULL_RTX;
6013 /* Expand the operands. */
6014 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6016 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6017 expect = convert_memory_address (Pmode, expect);
6018 expect = gen_rtx_MEM (mode, expect);
6019 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6021 weak = CALL_EXPR_ARG (exp, 3);
6022 is_weak = false;
6023 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
6024 is_weak = true;
6026 if (target == const0_rtx)
6027 target = NULL;
6029 /* Lest the rtl backend create a race condition with an imporoper store
6030 to memory, always create a new pseudo for OLDVAL. */
6031 oldval = NULL;
6033 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6034 is_weak, success, failure))
6035 return NULL_RTX;
6037 /* Conditionally store back to EXPECT, lest we create a race condition
6038 with an improper store to memory. */
6039 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6040 the normal case where EXPECT is totally private, i.e. a register. At
6041 which point the store can be unconditional. */
6042 label = gen_label_rtx ();
6043 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6044 GET_MODE (target), 1, label);
6045 emit_move_insn (expect, oldval);
6046 emit_label (label);
6048 return target;
6051 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6052 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6053 call. The weak parameter must be dropped to match the expected parameter
6054 list and the expected argument changed from value to pointer to memory
6055 slot. */
6057 static void
6058 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6060 unsigned int z;
6061 vec<tree, va_gc> *vec;
6063 vec_alloc (vec, 5);
6064 vec->quick_push (gimple_call_arg (call, 0));
6065 tree expected = gimple_call_arg (call, 1);
6066 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6067 TREE_TYPE (expected));
6068 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6069 if (expd != x)
6070 emit_move_insn (x, expd);
6071 tree v = make_tree (TREE_TYPE (expected), x);
6072 vec->quick_push (build1 (ADDR_EXPR,
6073 build_pointer_type (TREE_TYPE (expected)), v));
6074 vec->quick_push (gimple_call_arg (call, 2));
6075 /* Skip the boolean weak parameter. */
6076 for (z = 4; z < 6; z++)
6077 vec->quick_push (gimple_call_arg (call, z));
6078 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6079 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6080 gcc_assert (bytes_log2 < 5);
6081 built_in_function fncode
6082 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6083 + bytes_log2);
6084 tree fndecl = builtin_decl_explicit (fncode);
6085 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6086 fndecl);
6087 tree exp = build_call_vec (boolean_type_node, fn, vec);
6088 tree lhs = gimple_call_lhs (call);
6089 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6090 if (lhs)
6092 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6093 if (GET_MODE (boolret) != mode)
6094 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6095 x = force_reg (mode, x);
6096 write_complex_part (target, boolret, true);
6097 write_complex_part (target, x, false);
6101 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6103 void
6104 expand_ifn_atomic_compare_exchange (gcall *call)
6106 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6107 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6108 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6109 rtx expect, desired, mem, oldval, boolret;
6110 enum memmodel success, failure;
6111 tree lhs;
6112 bool is_weak;
6113 source_location loc
6114 = expansion_point_location_if_in_system_header (gimple_location (call));
6116 success = get_memmodel (gimple_call_arg (call, 4));
6117 failure = get_memmodel (gimple_call_arg (call, 5));
6119 if (failure > success)
6121 warning_at (loc, OPT_Winvalid_memory_model,
6122 "failure memory model cannot be stronger than success "
6123 "memory model for %<__atomic_compare_exchange%>");
6124 success = MEMMODEL_SEQ_CST;
6127 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6129 warning_at (loc, OPT_Winvalid_memory_model,
6130 "invalid failure memory model for "
6131 "%<__atomic_compare_exchange%>");
6132 failure = MEMMODEL_SEQ_CST;
6133 success = MEMMODEL_SEQ_CST;
6136 if (!flag_inline_atomics)
6138 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6139 return;
6142 /* Expand the operands. */
6143 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6145 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6146 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6148 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6150 boolret = NULL;
6151 oldval = NULL;
6153 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6154 is_weak, success, failure))
6156 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6157 return;
6160 lhs = gimple_call_lhs (call);
6161 if (lhs)
6163 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6164 if (GET_MODE (boolret) != mode)
6165 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6166 write_complex_part (target, boolret, true);
6167 write_complex_part (target, oldval, false);
6171 /* Expand the __atomic_load intrinsic:
6172 TYPE __atomic_load (TYPE *object, enum memmodel)
6173 EXP is the CALL_EXPR.
6174 TARGET is an optional place for us to store the results. */
6176 static rtx
6177 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6179 rtx mem;
6180 enum memmodel model;
6182 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6183 if (is_mm_release (model) || is_mm_acq_rel (model))
6185 source_location loc
6186 = expansion_point_location_if_in_system_header (input_location);
6187 warning_at (loc, OPT_Winvalid_memory_model,
6188 "invalid memory model for %<__atomic_load%>");
6189 model = MEMMODEL_SEQ_CST;
6192 if (!flag_inline_atomics)
6193 return NULL_RTX;
6195 /* Expand the operand. */
6196 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6198 return expand_atomic_load (target, mem, model);
6202 /* Expand the __atomic_store intrinsic:
6203 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6204 EXP is the CALL_EXPR.
6205 TARGET is an optional place for us to store the results. */
6207 static rtx
6208 expand_builtin_atomic_store (machine_mode mode, tree exp)
6210 rtx mem, val;
6211 enum memmodel model;
6213 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6214 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6215 || is_mm_release (model)))
6217 source_location loc
6218 = expansion_point_location_if_in_system_header (input_location);
6219 warning_at (loc, OPT_Winvalid_memory_model,
6220 "invalid memory model for %<__atomic_store%>");
6221 model = MEMMODEL_SEQ_CST;
6224 if (!flag_inline_atomics)
6225 return NULL_RTX;
6227 /* Expand the operands. */
6228 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6229 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6231 return expand_atomic_store (mem, val, model, false);
6234 /* Expand the __atomic_fetch_XXX intrinsic:
6235 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6236 EXP is the CALL_EXPR.
6237 TARGET is an optional place for us to store the results.
6238 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6239 FETCH_AFTER is true if returning the result of the operation.
6240 FETCH_AFTER is false if returning the value before the operation.
6241 IGNORE is true if the result is not used.
6242 EXT_CALL is the correct builtin for an external call if this cannot be
6243 resolved to an instruction sequence. */
6245 static rtx
6246 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6247 enum rtx_code code, bool fetch_after,
6248 bool ignore, enum built_in_function ext_call)
6250 rtx val, mem, ret;
6251 enum memmodel model;
6252 tree fndecl;
6253 tree addr;
6255 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6257 /* Expand the operands. */
6258 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6259 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6261 /* Only try generating instructions if inlining is turned on. */
6262 if (flag_inline_atomics)
6264 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6265 if (ret)
6266 return ret;
6269 /* Return if a different routine isn't needed for the library call. */
6270 if (ext_call == BUILT_IN_NONE)
6271 return NULL_RTX;
6273 /* Change the call to the specified function. */
6274 fndecl = get_callee_fndecl (exp);
6275 addr = CALL_EXPR_FN (exp);
6276 STRIP_NOPS (addr);
6278 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6279 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6281 /* If we will emit code after the call, the call can not be a tail call.
6282 If it is emitted as a tail call, a barrier is emitted after it, and
6283 then all trailing code is removed. */
6284 if (!ignore)
6285 CALL_EXPR_TAILCALL (exp) = 0;
6287 /* Expand the call here so we can emit trailing code. */
6288 ret = expand_call (exp, target, ignore);
6290 /* Replace the original function just in case it matters. */
6291 TREE_OPERAND (addr, 0) = fndecl;
6293 /* Then issue the arithmetic correction to return the right result. */
6294 if (!ignore)
6296 if (code == NOT)
6298 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6299 OPTAB_LIB_WIDEN);
6300 ret = expand_simple_unop (mode, NOT, ret, target, true);
6302 else
6303 ret = expand_simple_binop (mode, code, ret, val, target, true,
6304 OPTAB_LIB_WIDEN);
6306 return ret;
6309 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6311 void
6312 expand_ifn_atomic_bit_test_and (gcall *call)
6314 tree ptr = gimple_call_arg (call, 0);
6315 tree bit = gimple_call_arg (call, 1);
6316 tree flag = gimple_call_arg (call, 2);
6317 tree lhs = gimple_call_lhs (call);
6318 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6319 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6320 enum rtx_code code;
6321 optab optab;
6322 struct expand_operand ops[5];
6324 gcc_assert (flag_inline_atomics);
6326 if (gimple_call_num_args (call) == 4)
6327 model = get_memmodel (gimple_call_arg (call, 3));
6329 rtx mem = get_builtin_sync_mem (ptr, mode);
6330 rtx val = expand_expr_force_mode (bit, mode);
6332 switch (gimple_call_internal_fn (call))
6334 case IFN_ATOMIC_BIT_TEST_AND_SET:
6335 code = IOR;
6336 optab = atomic_bit_test_and_set_optab;
6337 break;
6338 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6339 code = XOR;
6340 optab = atomic_bit_test_and_complement_optab;
6341 break;
6342 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6343 code = AND;
6344 optab = atomic_bit_test_and_reset_optab;
6345 break;
6346 default:
6347 gcc_unreachable ();
6350 if (lhs == NULL_TREE)
6352 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6353 val, NULL_RTX, true, OPTAB_DIRECT);
6354 if (code == AND)
6355 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6356 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6357 return;
6360 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6361 enum insn_code icode = direct_optab_handler (optab, mode);
6362 gcc_assert (icode != CODE_FOR_nothing);
6363 create_output_operand (&ops[0], target, mode);
6364 create_fixed_operand (&ops[1], mem);
6365 create_convert_operand_to (&ops[2], val, mode, true);
6366 create_integer_operand (&ops[3], model);
6367 create_integer_operand (&ops[4], integer_onep (flag));
6368 if (maybe_expand_insn (icode, 5, ops))
6369 return;
6371 rtx bitval = val;
6372 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6373 val, NULL_RTX, true, OPTAB_DIRECT);
6374 rtx maskval = val;
6375 if (code == AND)
6376 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6377 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6378 code, model, false);
6379 if (integer_onep (flag))
6381 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6382 NULL_RTX, true, OPTAB_DIRECT);
6383 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6384 true, OPTAB_DIRECT);
6386 else
6387 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6388 OPTAB_DIRECT);
6389 if (result != target)
6390 emit_move_insn (target, result);
6393 /* Expand an atomic clear operation.
6394 void _atomic_clear (BOOL *obj, enum memmodel)
6395 EXP is the call expression. */
6397 static rtx
6398 expand_builtin_atomic_clear (tree exp)
6400 machine_mode mode;
6401 rtx mem, ret;
6402 enum memmodel model;
6404 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6405 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6406 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6408 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6410 source_location loc
6411 = expansion_point_location_if_in_system_header (input_location);
6412 warning_at (loc, OPT_Winvalid_memory_model,
6413 "invalid memory model for %<__atomic_store%>");
6414 model = MEMMODEL_SEQ_CST;
6417 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6418 Failing that, a store is issued by __atomic_store. The only way this can
6419 fail is if the bool type is larger than a word size. Unlikely, but
6420 handle it anyway for completeness. Assume a single threaded model since
6421 there is no atomic support in this case, and no barriers are required. */
6422 ret = expand_atomic_store (mem, const0_rtx, model, true);
6423 if (!ret)
6424 emit_move_insn (mem, const0_rtx);
6425 return const0_rtx;
6428 /* Expand an atomic test_and_set operation.
6429 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6430 EXP is the call expression. */
6432 static rtx
6433 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6435 rtx mem;
6436 enum memmodel model;
6437 machine_mode mode;
6439 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6440 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6441 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6443 return expand_atomic_test_and_set (target, mem, model);
6447 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6448 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6450 static tree
6451 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6453 int size;
6454 machine_mode mode;
6455 unsigned int mode_align, type_align;
6457 if (TREE_CODE (arg0) != INTEGER_CST)
6458 return NULL_TREE;
6460 /* We need a corresponding integer mode for the access to be lock-free. */
6461 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6462 if (!int_mode_for_size (size, 0).exists (&mode))
6463 return boolean_false_node;
6465 mode_align = GET_MODE_ALIGNMENT (mode);
6467 if (TREE_CODE (arg1) == INTEGER_CST)
6469 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6471 /* Either this argument is null, or it's a fake pointer encoding
6472 the alignment of the object. */
6473 val = least_bit_hwi (val);
6474 val *= BITS_PER_UNIT;
6476 if (val == 0 || mode_align < val)
6477 type_align = mode_align;
6478 else
6479 type_align = val;
6481 else
6483 tree ttype = TREE_TYPE (arg1);
6485 /* This function is usually invoked and folded immediately by the front
6486 end before anything else has a chance to look at it. The pointer
6487 parameter at this point is usually cast to a void *, so check for that
6488 and look past the cast. */
6489 if (CONVERT_EXPR_P (arg1)
6490 && POINTER_TYPE_P (ttype)
6491 && VOID_TYPE_P (TREE_TYPE (ttype))
6492 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6493 arg1 = TREE_OPERAND (arg1, 0);
6495 ttype = TREE_TYPE (arg1);
6496 gcc_assert (POINTER_TYPE_P (ttype));
6498 /* Get the underlying type of the object. */
6499 ttype = TREE_TYPE (ttype);
6500 type_align = TYPE_ALIGN (ttype);
6503 /* If the object has smaller alignment, the lock free routines cannot
6504 be used. */
6505 if (type_align < mode_align)
6506 return boolean_false_node;
6508 /* Check if a compare_and_swap pattern exists for the mode which represents
6509 the required size. The pattern is not allowed to fail, so the existence
6510 of the pattern indicates support is present. Also require that an
6511 atomic load exists for the required size. */
6512 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6513 return boolean_true_node;
6514 else
6515 return boolean_false_node;
6518 /* Return true if the parameters to call EXP represent an object which will
6519 always generate lock free instructions. The first argument represents the
6520 size of the object, and the second parameter is a pointer to the object
6521 itself. If NULL is passed for the object, then the result is based on
6522 typical alignment for an object of the specified size. Otherwise return
6523 false. */
6525 static rtx
6526 expand_builtin_atomic_always_lock_free (tree exp)
6528 tree size;
6529 tree arg0 = CALL_EXPR_ARG (exp, 0);
6530 tree arg1 = CALL_EXPR_ARG (exp, 1);
6532 if (TREE_CODE (arg0) != INTEGER_CST)
6534 error ("non-constant argument 1 to __atomic_always_lock_free");
6535 return const0_rtx;
6538 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6539 if (size == boolean_true_node)
6540 return const1_rtx;
6541 return const0_rtx;
6544 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6545 is lock free on this architecture. */
6547 static tree
6548 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6550 if (!flag_inline_atomics)
6551 return NULL_TREE;
6553 /* If it isn't always lock free, don't generate a result. */
6554 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6555 return boolean_true_node;
6557 return NULL_TREE;
6560 /* Return true if the parameters to call EXP represent an object which will
6561 always generate lock free instructions. The first argument represents the
6562 size of the object, and the second parameter is a pointer to the object
6563 itself. If NULL is passed for the object, then the result is based on
6564 typical alignment for an object of the specified size. Otherwise return
6565 NULL*/
6567 static rtx
6568 expand_builtin_atomic_is_lock_free (tree exp)
6570 tree size;
6571 tree arg0 = CALL_EXPR_ARG (exp, 0);
6572 tree arg1 = CALL_EXPR_ARG (exp, 1);
6574 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6576 error ("non-integer argument 1 to __atomic_is_lock_free");
6577 return NULL_RTX;
6580 if (!flag_inline_atomics)
6581 return NULL_RTX;
6583 /* If the value is known at compile time, return the RTX for it. */
6584 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6585 if (size == boolean_true_node)
6586 return const1_rtx;
6588 return NULL_RTX;
6591 /* Expand the __atomic_thread_fence intrinsic:
6592 void __atomic_thread_fence (enum memmodel)
6593 EXP is the CALL_EXPR. */
6595 static void
6596 expand_builtin_atomic_thread_fence (tree exp)
6598 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6599 expand_mem_thread_fence (model);
6602 /* Expand the __atomic_signal_fence intrinsic:
6603 void __atomic_signal_fence (enum memmodel)
6604 EXP is the CALL_EXPR. */
6606 static void
6607 expand_builtin_atomic_signal_fence (tree exp)
6609 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6610 expand_mem_signal_fence (model);
6613 /* Expand the __sync_synchronize intrinsic. */
6615 static void
6616 expand_builtin_sync_synchronize (void)
6618 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6621 static rtx
6622 expand_builtin_thread_pointer (tree exp, rtx target)
6624 enum insn_code icode;
6625 if (!validate_arglist (exp, VOID_TYPE))
6626 return const0_rtx;
6627 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6628 if (icode != CODE_FOR_nothing)
6630 struct expand_operand op;
6631 /* If the target is not sutitable then create a new target. */
6632 if (target == NULL_RTX
6633 || !REG_P (target)
6634 || GET_MODE (target) != Pmode)
6635 target = gen_reg_rtx (Pmode);
6636 create_output_operand (&op, target, Pmode);
6637 expand_insn (icode, 1, &op);
6638 return target;
6640 error ("__builtin_thread_pointer is not supported on this target");
6641 return const0_rtx;
6644 static void
6645 expand_builtin_set_thread_pointer (tree exp)
6647 enum insn_code icode;
6648 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6649 return;
6650 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6651 if (icode != CODE_FOR_nothing)
6653 struct expand_operand op;
6654 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6655 Pmode, EXPAND_NORMAL);
6656 create_input_operand (&op, val, Pmode);
6657 expand_insn (icode, 1, &op);
6658 return;
6660 error ("__builtin_set_thread_pointer is not supported on this target");
6664 /* Emit code to restore the current value of stack. */
6666 static void
6667 expand_stack_restore (tree var)
6669 rtx_insn *prev;
6670 rtx sa = expand_normal (var);
6672 sa = convert_memory_address (Pmode, sa);
6674 prev = get_last_insn ();
6675 emit_stack_restore (SAVE_BLOCK, sa);
6677 record_new_stack_level ();
6679 fixup_args_size_notes (prev, get_last_insn (), 0);
6682 /* Emit code to save the current value of stack. */
6684 static rtx
6685 expand_stack_save (void)
6687 rtx ret = NULL_RTX;
6689 emit_stack_save (SAVE_BLOCK, &ret);
6690 return ret;
6693 /* Emit code to get the openacc gang, worker or vector id or size. */
6695 static rtx
6696 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
6698 const char *name;
6699 rtx fallback_retval;
6700 rtx_insn *(*gen_fn) (rtx, rtx);
6701 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
6703 case BUILT_IN_GOACC_PARLEVEL_ID:
6704 name = "__builtin_goacc_parlevel_id";
6705 fallback_retval = const0_rtx;
6706 gen_fn = targetm.gen_oacc_dim_pos;
6707 break;
6708 case BUILT_IN_GOACC_PARLEVEL_SIZE:
6709 name = "__builtin_goacc_parlevel_size";
6710 fallback_retval = const1_rtx;
6711 gen_fn = targetm.gen_oacc_dim_size;
6712 break;
6713 default:
6714 gcc_unreachable ();
6717 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
6719 error ("%qs only supported in OpenACC code", name);
6720 return const0_rtx;
6723 tree arg = CALL_EXPR_ARG (exp, 0);
6724 if (TREE_CODE (arg) != INTEGER_CST)
6726 error ("non-constant argument 0 to %qs", name);
6727 return const0_rtx;
6730 int dim = TREE_INT_CST_LOW (arg);
6731 switch (dim)
6733 case GOMP_DIM_GANG:
6734 case GOMP_DIM_WORKER:
6735 case GOMP_DIM_VECTOR:
6736 break;
6737 default:
6738 error ("illegal argument 0 to %qs", name);
6739 return const0_rtx;
6742 if (ignore)
6743 return target;
6745 if (target == NULL_RTX)
6746 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6748 if (!targetm.have_oacc_dim_size ())
6750 emit_move_insn (target, fallback_retval);
6751 return target;
6754 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
6755 emit_insn (gen_fn (reg, GEN_INT (dim)));
6756 if (reg != target)
6757 emit_move_insn (target, reg);
6759 return target;
6762 /* Expand a string compare operation using a sequence of char comparison
6763 to get rid of the calling overhead, with result going to TARGET if
6764 that's convenient.
6766 VAR_STR is the variable string source;
6767 CONST_STR is the constant string source;
6768 LENGTH is the number of chars to compare;
6769 CONST_STR_N indicates which source string is the constant string;
6770 IS_MEMCMP indicates whether it's a memcmp or strcmp.
6772 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
6774 target = (int) (unsigned char) var_str[0]
6775 - (int) (unsigned char) const_str[0];
6776 if (target != 0)
6777 goto ne_label;
6779 target = (int) (unsigned char) var_str[length - 2]
6780 - (int) (unsigned char) const_str[length - 2];
6781 if (target != 0)
6782 goto ne_label;
6783 target = (int) (unsigned char) var_str[length - 1]
6784 - (int) (unsigned char) const_str[length - 1];
6785 ne_label:
6788 static rtx
6789 inline_string_cmp (rtx target, tree var_str, const char *const_str,
6790 unsigned HOST_WIDE_INT length,
6791 int const_str_n, machine_mode mode)
6793 HOST_WIDE_INT offset = 0;
6794 rtx var_rtx_array
6795 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
6796 rtx var_rtx = NULL_RTX;
6797 rtx const_rtx = NULL_RTX;
6798 rtx result = target ? target : gen_reg_rtx (mode);
6799 rtx_code_label *ne_label = gen_label_rtx ();
6800 tree unit_type_node = unsigned_char_type_node;
6801 scalar_int_mode unit_mode
6802 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
6804 start_sequence ();
6806 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
6808 var_rtx
6809 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
6810 const_rtx = c_readstr (const_str + offset, unit_mode);
6811 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
6812 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
6814 op0 = convert_modes (mode, unit_mode, op0, 1);
6815 op1 = convert_modes (mode, unit_mode, op1, 1);
6816 result = expand_simple_binop (mode, MINUS, op0, op1,
6817 result, 1, OPTAB_WIDEN);
6818 if (i < length - 1)
6819 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
6820 mode, true, ne_label);
6821 offset += GET_MODE_SIZE (unit_mode);
6824 emit_label (ne_label);
6825 rtx_insn *insns = get_insns ();
6826 end_sequence ();
6827 emit_insn (insns);
6829 return result;
6832 /* Inline expansion a call to str(n)cmp, with result going to
6833 TARGET if that's convenient.
6834 If the call is not been inlined, return NULL_RTX. */
6835 static rtx
6836 inline_expand_builtin_string_cmp (tree exp, rtx target)
6838 tree fndecl = get_callee_fndecl (exp);
6839 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6840 unsigned HOST_WIDE_INT length = 0;
6841 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
6843 gcc_checking_assert (fcode == BUILT_IN_STRCMP
6844 || fcode == BUILT_IN_STRNCMP
6845 || fcode == BUILT_IN_MEMCMP);
6847 /* On a target where the type of the call (int) has same or narrower presicion
6848 than unsigned char, give up the inlining expansion. */
6849 if (TYPE_PRECISION (unsigned_char_type_node)
6850 >= TYPE_PRECISION (TREE_TYPE (exp)))
6851 return NULL_RTX;
6853 tree arg1 = CALL_EXPR_ARG (exp, 0);
6854 tree arg2 = CALL_EXPR_ARG (exp, 1);
6855 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
6857 unsigned HOST_WIDE_INT len1 = 0;
6858 unsigned HOST_WIDE_INT len2 = 0;
6859 unsigned HOST_WIDE_INT len3 = 0;
6861 const char *src_str1 = c_getstr (arg1, &len1);
6862 const char *src_str2 = c_getstr (arg2, &len2);
6864 /* If neither strings is constant string, the call is not qualify. */
6865 if (!src_str1 && !src_str2)
6866 return NULL_RTX;
6868 /* For strncmp, if the length is not a const, not qualify. */
6869 if (is_ncmp && !tree_fits_uhwi_p (len3_tree))
6870 return NULL_RTX;
6872 int const_str_n = 0;
6873 if (!len1)
6874 const_str_n = 2;
6875 else if (!len2)
6876 const_str_n = 1;
6877 else if (len2 > len1)
6878 const_str_n = 1;
6879 else
6880 const_str_n = 2;
6882 gcc_checking_assert (const_str_n > 0);
6883 length = (const_str_n == 1) ? len1 : len2;
6885 if (is_ncmp && (len3 = tree_to_uhwi (len3_tree)) < length)
6886 length = len3;
6888 /* If the length of the comparision is larger than the threshold,
6889 do nothing. */
6890 if (length > (unsigned HOST_WIDE_INT)
6891 PARAM_VALUE (BUILTIN_STRING_CMP_INLINE_LENGTH))
6892 return NULL_RTX;
6894 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6896 /* Now, start inline expansion the call. */
6897 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
6898 (const_str_n == 1) ? src_str1 : src_str2, length,
6899 const_str_n, mode);
6902 /* Expand an expression EXP that calls a built-in function,
6903 with result going to TARGET if that's convenient
6904 (and in mode MODE if that's convenient).
6905 SUBTARGET may be used as the target for computing one of EXP's operands.
6906 IGNORE is nonzero if the value is to be ignored. */
6909 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
6910 int ignore)
6912 tree fndecl = get_callee_fndecl (exp);
6913 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6914 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6915 int flags;
6917 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6918 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6920 /* When ASan is enabled, we don't want to expand some memory/string
6921 builtins and rely on libsanitizer's hooks. This allows us to avoid
6922 redundant checks and be sure, that possible overflow will be detected
6923 by ASan. */
6925 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
6926 return expand_call (exp, target, ignore);
6928 /* When not optimizing, generate calls to library functions for a certain
6929 set of builtins. */
6930 if (!optimize
6931 && !called_as_built_in (fndecl)
6932 && fcode != BUILT_IN_FORK
6933 && fcode != BUILT_IN_EXECL
6934 && fcode != BUILT_IN_EXECV
6935 && fcode != BUILT_IN_EXECLP
6936 && fcode != BUILT_IN_EXECLE
6937 && fcode != BUILT_IN_EXECVP
6938 && fcode != BUILT_IN_EXECVE
6939 && !ALLOCA_FUNCTION_CODE_P (fcode)
6940 && fcode != BUILT_IN_FREE)
6941 return expand_call (exp, target, ignore);
6943 /* The built-in function expanders test for target == const0_rtx
6944 to determine whether the function's result will be ignored. */
6945 if (ignore)
6946 target = const0_rtx;
6948 /* If the result of a pure or const built-in function is ignored, and
6949 none of its arguments are volatile, we can avoid expanding the
6950 built-in call and just evaluate the arguments for side-effects. */
6951 if (target == const0_rtx
6952 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6953 && !(flags & ECF_LOOPING_CONST_OR_PURE))
6955 bool volatilep = false;
6956 tree arg;
6957 call_expr_arg_iterator iter;
6959 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6960 if (TREE_THIS_VOLATILE (arg))
6962 volatilep = true;
6963 break;
6966 if (! volatilep)
6968 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6969 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6970 return const0_rtx;
6974 switch (fcode)
6976 CASE_FLT_FN (BUILT_IN_FABS):
6977 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
6978 case BUILT_IN_FABSD32:
6979 case BUILT_IN_FABSD64:
6980 case BUILT_IN_FABSD128:
6981 target = expand_builtin_fabs (exp, target, subtarget);
6982 if (target)
6983 return target;
6984 break;
6986 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6987 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
6988 target = expand_builtin_copysign (exp, target, subtarget);
6989 if (target)
6990 return target;
6991 break;
6993 /* Just do a normal library call if we were unable to fold
6994 the values. */
6995 CASE_FLT_FN (BUILT_IN_CABS):
6996 break;
6998 CASE_FLT_FN (BUILT_IN_FMA):
6999 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7000 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7001 if (target)
7002 return target;
7003 break;
7005 CASE_FLT_FN (BUILT_IN_ILOGB):
7006 if (! flag_unsafe_math_optimizations)
7007 break;
7008 gcc_fallthrough ();
7009 CASE_FLT_FN (BUILT_IN_ISINF):
7010 CASE_FLT_FN (BUILT_IN_FINITE):
7011 case BUILT_IN_ISFINITE:
7012 case BUILT_IN_ISNORMAL:
7013 target = expand_builtin_interclass_mathfn (exp, target);
7014 if (target)
7015 return target;
7016 break;
7018 CASE_FLT_FN (BUILT_IN_ICEIL):
7019 CASE_FLT_FN (BUILT_IN_LCEIL):
7020 CASE_FLT_FN (BUILT_IN_LLCEIL):
7021 CASE_FLT_FN (BUILT_IN_LFLOOR):
7022 CASE_FLT_FN (BUILT_IN_IFLOOR):
7023 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7024 target = expand_builtin_int_roundingfn (exp, target);
7025 if (target)
7026 return target;
7027 break;
7029 CASE_FLT_FN (BUILT_IN_IRINT):
7030 CASE_FLT_FN (BUILT_IN_LRINT):
7031 CASE_FLT_FN (BUILT_IN_LLRINT):
7032 CASE_FLT_FN (BUILT_IN_IROUND):
7033 CASE_FLT_FN (BUILT_IN_LROUND):
7034 CASE_FLT_FN (BUILT_IN_LLROUND):
7035 target = expand_builtin_int_roundingfn_2 (exp, target);
7036 if (target)
7037 return target;
7038 break;
7040 CASE_FLT_FN (BUILT_IN_POWI):
7041 target = expand_builtin_powi (exp, target);
7042 if (target)
7043 return target;
7044 break;
7046 CASE_FLT_FN (BUILT_IN_CEXPI):
7047 target = expand_builtin_cexpi (exp, target);
7048 gcc_assert (target);
7049 return target;
7051 CASE_FLT_FN (BUILT_IN_SIN):
7052 CASE_FLT_FN (BUILT_IN_COS):
7053 if (! flag_unsafe_math_optimizations)
7054 break;
7055 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7056 if (target)
7057 return target;
7058 break;
7060 CASE_FLT_FN (BUILT_IN_SINCOS):
7061 if (! flag_unsafe_math_optimizations)
7062 break;
7063 target = expand_builtin_sincos (exp);
7064 if (target)
7065 return target;
7066 break;
7068 case BUILT_IN_APPLY_ARGS:
7069 return expand_builtin_apply_args ();
7071 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7072 FUNCTION with a copy of the parameters described by
7073 ARGUMENTS, and ARGSIZE. It returns a block of memory
7074 allocated on the stack into which is stored all the registers
7075 that might possibly be used for returning the result of a
7076 function. ARGUMENTS is the value returned by
7077 __builtin_apply_args. ARGSIZE is the number of bytes of
7078 arguments that must be copied. ??? How should this value be
7079 computed? We'll also need a safe worst case value for varargs
7080 functions. */
7081 case BUILT_IN_APPLY:
7082 if (!validate_arglist (exp, POINTER_TYPE,
7083 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7084 && !validate_arglist (exp, REFERENCE_TYPE,
7085 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7086 return const0_rtx;
7087 else
7089 rtx ops[3];
7091 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7092 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7093 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
7095 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7098 /* __builtin_return (RESULT) causes the function to return the
7099 value described by RESULT. RESULT is address of the block of
7100 memory returned by __builtin_apply. */
7101 case BUILT_IN_RETURN:
7102 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7103 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
7104 return const0_rtx;
7106 case BUILT_IN_SAVEREGS:
7107 return expand_builtin_saveregs ();
7109 case BUILT_IN_VA_ARG_PACK:
7110 /* All valid uses of __builtin_va_arg_pack () are removed during
7111 inlining. */
7112 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
7113 return const0_rtx;
7115 case BUILT_IN_VA_ARG_PACK_LEN:
7116 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7117 inlining. */
7118 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
7119 return const0_rtx;
7121 /* Return the address of the first anonymous stack arg. */
7122 case BUILT_IN_NEXT_ARG:
7123 if (fold_builtin_next_arg (exp, false))
7124 return const0_rtx;
7125 return expand_builtin_next_arg ();
7127 case BUILT_IN_CLEAR_CACHE:
7128 target = expand_builtin___clear_cache (exp);
7129 if (target)
7130 return target;
7131 break;
7133 case BUILT_IN_CLASSIFY_TYPE:
7134 return expand_builtin_classify_type (exp);
7136 case BUILT_IN_CONSTANT_P:
7137 return const0_rtx;
7139 case BUILT_IN_FRAME_ADDRESS:
7140 case BUILT_IN_RETURN_ADDRESS:
7141 return expand_builtin_frame_address (fndecl, exp);
7143 /* Returns the address of the area where the structure is returned.
7144 0 otherwise. */
7145 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
7146 if (call_expr_nargs (exp) != 0
7147 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
7148 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
7149 return const0_rtx;
7150 else
7151 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7153 CASE_BUILT_IN_ALLOCA:
7154 target = expand_builtin_alloca (exp);
7155 if (target)
7156 return target;
7157 break;
7159 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7160 return expand_asan_emit_allocas_unpoison (exp);
7162 case BUILT_IN_STACK_SAVE:
7163 return expand_stack_save ();
7165 case BUILT_IN_STACK_RESTORE:
7166 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
7167 return const0_rtx;
7169 case BUILT_IN_BSWAP16:
7170 case BUILT_IN_BSWAP32:
7171 case BUILT_IN_BSWAP64:
7172 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
7173 if (target)
7174 return target;
7175 break;
7177 CASE_INT_FN (BUILT_IN_FFS):
7178 target = expand_builtin_unop (target_mode, exp, target,
7179 subtarget, ffs_optab);
7180 if (target)
7181 return target;
7182 break;
7184 CASE_INT_FN (BUILT_IN_CLZ):
7185 target = expand_builtin_unop (target_mode, exp, target,
7186 subtarget, clz_optab);
7187 if (target)
7188 return target;
7189 break;
7191 CASE_INT_FN (BUILT_IN_CTZ):
7192 target = expand_builtin_unop (target_mode, exp, target,
7193 subtarget, ctz_optab);
7194 if (target)
7195 return target;
7196 break;
7198 CASE_INT_FN (BUILT_IN_CLRSB):
7199 target = expand_builtin_unop (target_mode, exp, target,
7200 subtarget, clrsb_optab);
7201 if (target)
7202 return target;
7203 break;
7205 CASE_INT_FN (BUILT_IN_POPCOUNT):
7206 target = expand_builtin_unop (target_mode, exp, target,
7207 subtarget, popcount_optab);
7208 if (target)
7209 return target;
7210 break;
7212 CASE_INT_FN (BUILT_IN_PARITY):
7213 target = expand_builtin_unop (target_mode, exp, target,
7214 subtarget, parity_optab);
7215 if (target)
7216 return target;
7217 break;
7219 case BUILT_IN_STRLEN:
7220 target = expand_builtin_strlen (exp, target, target_mode);
7221 if (target)
7222 return target;
7223 break;
7225 case BUILT_IN_STRNLEN:
7226 target = expand_builtin_strnlen (exp, target, target_mode);
7227 if (target)
7228 return target;
7229 break;
7231 case BUILT_IN_STRCAT:
7232 target = expand_builtin_strcat (exp, target);
7233 if (target)
7234 return target;
7235 break;
7237 case BUILT_IN_STRCPY:
7238 target = expand_builtin_strcpy (exp, target);
7239 if (target)
7240 return target;
7241 break;
7243 case BUILT_IN_STRNCAT:
7244 target = expand_builtin_strncat (exp, target);
7245 if (target)
7246 return target;
7247 break;
7249 case BUILT_IN_STRNCPY:
7250 target = expand_builtin_strncpy (exp, target);
7251 if (target)
7252 return target;
7253 break;
7255 case BUILT_IN_STPCPY:
7256 target = expand_builtin_stpcpy (exp, target, mode);
7257 if (target)
7258 return target;
7259 break;
7261 case BUILT_IN_STPNCPY:
7262 target = expand_builtin_stpncpy (exp, target);
7263 if (target)
7264 return target;
7265 break;
7267 case BUILT_IN_MEMCHR:
7268 target = expand_builtin_memchr (exp, target);
7269 if (target)
7270 return target;
7271 break;
7273 case BUILT_IN_MEMCPY:
7274 target = expand_builtin_memcpy (exp, target);
7275 if (target)
7276 return target;
7277 break;
7279 case BUILT_IN_MEMMOVE:
7280 target = expand_builtin_memmove (exp, target);
7281 if (target)
7282 return target;
7283 break;
7285 case BUILT_IN_MEMPCPY:
7286 target = expand_builtin_mempcpy (exp, target);
7287 if (target)
7288 return target;
7289 break;
7291 case BUILT_IN_MEMSET:
7292 target = expand_builtin_memset (exp, target, mode);
7293 if (target)
7294 return target;
7295 break;
7297 case BUILT_IN_BZERO:
7298 target = expand_builtin_bzero (exp);
7299 if (target)
7300 return target;
7301 break;
7303 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7304 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
7305 when changing it to a strcmp call. */
7306 case BUILT_IN_STRCMP_EQ:
7307 target = expand_builtin_memcmp (exp, target, true);
7308 if (target)
7309 return target;
7311 /* Change this call back to a BUILT_IN_STRCMP. */
7312 TREE_OPERAND (exp, 1)
7313 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7315 /* Delete the last parameter. */
7316 unsigned int i;
7317 vec<tree, va_gc> *arg_vec;
7318 vec_alloc (arg_vec, 2);
7319 for (i = 0; i < 2; i++)
7320 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7321 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7322 /* FALLTHROUGH */
7324 case BUILT_IN_STRCMP:
7325 target = expand_builtin_strcmp (exp, target);
7326 if (target)
7327 return target;
7328 break;
7330 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7331 back to a BUILT_IN_STRNCMP. */
7332 case BUILT_IN_STRNCMP_EQ:
7333 target = expand_builtin_memcmp (exp, target, true);
7334 if (target)
7335 return target;
7337 /* Change it back to a BUILT_IN_STRNCMP. */
7338 TREE_OPERAND (exp, 1)
7339 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7340 /* FALLTHROUGH */
7342 case BUILT_IN_STRNCMP:
7343 target = expand_builtin_strncmp (exp, target, mode);
7344 if (target)
7345 return target;
7346 break;
7348 case BUILT_IN_BCMP:
7349 case BUILT_IN_MEMCMP:
7350 case BUILT_IN_MEMCMP_EQ:
7351 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7352 if (target)
7353 return target;
7354 if (fcode == BUILT_IN_MEMCMP_EQ)
7356 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7357 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7359 break;
7361 case BUILT_IN_SETJMP:
7362 /* This should have been lowered to the builtins below. */
7363 gcc_unreachable ();
7365 case BUILT_IN_SETJMP_SETUP:
7366 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7367 and the receiver label. */
7368 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7370 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7371 VOIDmode, EXPAND_NORMAL);
7372 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7373 rtx_insn *label_r = label_rtx (label);
7375 /* This is copied from the handling of non-local gotos. */
7376 expand_builtin_setjmp_setup (buf_addr, label_r);
7377 nonlocal_goto_handler_labels
7378 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7379 nonlocal_goto_handler_labels);
7380 /* ??? Do not let expand_label treat us as such since we would
7381 not want to be both on the list of non-local labels and on
7382 the list of forced labels. */
7383 FORCED_LABEL (label) = 0;
7384 return const0_rtx;
7386 break;
7388 case BUILT_IN_SETJMP_RECEIVER:
7389 /* __builtin_setjmp_receiver is passed the receiver label. */
7390 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7392 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7393 rtx_insn *label_r = label_rtx (label);
7395 expand_builtin_setjmp_receiver (label_r);
7396 return const0_rtx;
7398 break;
7400 /* __builtin_longjmp is passed a pointer to an array of five words.
7401 It's similar to the C library longjmp function but works with
7402 __builtin_setjmp above. */
7403 case BUILT_IN_LONGJMP:
7404 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7406 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7407 VOIDmode, EXPAND_NORMAL);
7408 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7410 if (value != const1_rtx)
7412 error ("%<__builtin_longjmp%> second argument must be 1");
7413 return const0_rtx;
7416 expand_builtin_longjmp (buf_addr, value);
7417 return const0_rtx;
7419 break;
7421 case BUILT_IN_NONLOCAL_GOTO:
7422 target = expand_builtin_nonlocal_goto (exp);
7423 if (target)
7424 return target;
7425 break;
7427 /* This updates the setjmp buffer that is its argument with the value
7428 of the current stack pointer. */
7429 case BUILT_IN_UPDATE_SETJMP_BUF:
7430 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7432 rtx buf_addr
7433 = expand_normal (CALL_EXPR_ARG (exp, 0));
7435 expand_builtin_update_setjmp_buf (buf_addr);
7436 return const0_rtx;
7438 break;
7440 case BUILT_IN_TRAP:
7441 expand_builtin_trap ();
7442 return const0_rtx;
7444 case BUILT_IN_UNREACHABLE:
7445 expand_builtin_unreachable ();
7446 return const0_rtx;
7448 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7449 case BUILT_IN_SIGNBITD32:
7450 case BUILT_IN_SIGNBITD64:
7451 case BUILT_IN_SIGNBITD128:
7452 target = expand_builtin_signbit (exp, target);
7453 if (target)
7454 return target;
7455 break;
7457 /* Various hooks for the DWARF 2 __throw routine. */
7458 case BUILT_IN_UNWIND_INIT:
7459 expand_builtin_unwind_init ();
7460 return const0_rtx;
7461 case BUILT_IN_DWARF_CFA:
7462 return virtual_cfa_rtx;
7463 #ifdef DWARF2_UNWIND_INFO
7464 case BUILT_IN_DWARF_SP_COLUMN:
7465 return expand_builtin_dwarf_sp_column ();
7466 case BUILT_IN_INIT_DWARF_REG_SIZES:
7467 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7468 return const0_rtx;
7469 #endif
7470 case BUILT_IN_FROB_RETURN_ADDR:
7471 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7472 case BUILT_IN_EXTRACT_RETURN_ADDR:
7473 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7474 case BUILT_IN_EH_RETURN:
7475 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7476 CALL_EXPR_ARG (exp, 1));
7477 return const0_rtx;
7478 case BUILT_IN_EH_RETURN_DATA_REGNO:
7479 return expand_builtin_eh_return_data_regno (exp);
7480 case BUILT_IN_EXTEND_POINTER:
7481 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7482 case BUILT_IN_EH_POINTER:
7483 return expand_builtin_eh_pointer (exp);
7484 case BUILT_IN_EH_FILTER:
7485 return expand_builtin_eh_filter (exp);
7486 case BUILT_IN_EH_COPY_VALUES:
7487 return expand_builtin_eh_copy_values (exp);
7489 case BUILT_IN_VA_START:
7490 return expand_builtin_va_start (exp);
7491 case BUILT_IN_VA_END:
7492 return expand_builtin_va_end (exp);
7493 case BUILT_IN_VA_COPY:
7494 return expand_builtin_va_copy (exp);
7495 case BUILT_IN_EXPECT:
7496 return expand_builtin_expect (exp, target);
7497 case BUILT_IN_ASSUME_ALIGNED:
7498 return expand_builtin_assume_aligned (exp, target);
7499 case BUILT_IN_PREFETCH:
7500 expand_builtin_prefetch (exp);
7501 return const0_rtx;
7503 case BUILT_IN_INIT_TRAMPOLINE:
7504 return expand_builtin_init_trampoline (exp, true);
7505 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7506 return expand_builtin_init_trampoline (exp, false);
7507 case BUILT_IN_ADJUST_TRAMPOLINE:
7508 return expand_builtin_adjust_trampoline (exp);
7510 case BUILT_IN_INIT_DESCRIPTOR:
7511 return expand_builtin_init_descriptor (exp);
7512 case BUILT_IN_ADJUST_DESCRIPTOR:
7513 return expand_builtin_adjust_descriptor (exp);
7515 case BUILT_IN_FORK:
7516 case BUILT_IN_EXECL:
7517 case BUILT_IN_EXECV:
7518 case BUILT_IN_EXECLP:
7519 case BUILT_IN_EXECLE:
7520 case BUILT_IN_EXECVP:
7521 case BUILT_IN_EXECVE:
7522 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7523 if (target)
7524 return target;
7525 break;
7527 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7528 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7529 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7530 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7531 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7532 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7533 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7534 if (target)
7535 return target;
7536 break;
7538 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7539 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7540 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7541 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7542 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7543 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7544 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7545 if (target)
7546 return target;
7547 break;
7549 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7550 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7551 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7552 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7553 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7554 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7555 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7556 if (target)
7557 return target;
7558 break;
7560 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7561 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7562 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7563 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7564 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7565 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7566 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7567 if (target)
7568 return target;
7569 break;
7571 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7572 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7573 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7574 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7575 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7576 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7577 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7578 if (target)
7579 return target;
7580 break;
7582 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7583 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7584 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7585 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7586 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7587 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7588 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7589 if (target)
7590 return target;
7591 break;
7593 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7594 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7595 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7596 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7597 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7598 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7599 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7600 if (target)
7601 return target;
7602 break;
7604 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7605 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7606 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7607 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7608 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7609 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7610 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7611 if (target)
7612 return target;
7613 break;
7615 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7616 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7617 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7618 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7619 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7620 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7621 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7622 if (target)
7623 return target;
7624 break;
7626 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7627 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7628 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7629 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7630 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7631 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7632 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7633 if (target)
7634 return target;
7635 break;
7637 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7638 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7639 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7640 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7641 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7642 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7643 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7644 if (target)
7645 return target;
7646 break;
7648 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7649 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7650 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7651 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7652 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7653 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7654 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7655 if (target)
7656 return target;
7657 break;
7659 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7660 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7661 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7662 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7663 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7664 if (mode == VOIDmode)
7665 mode = TYPE_MODE (boolean_type_node);
7666 if (!target || !register_operand (target, mode))
7667 target = gen_reg_rtx (mode);
7669 mode = get_builtin_sync_mode
7670 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7671 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7672 if (target)
7673 return target;
7674 break;
7676 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7677 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7678 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7679 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7680 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7681 mode = get_builtin_sync_mode
7682 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7683 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7684 if (target)
7685 return target;
7686 break;
7688 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7689 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7690 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7691 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7692 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7693 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7694 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7695 if (target)
7696 return target;
7697 break;
7699 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7700 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7701 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7702 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7703 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7704 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7705 expand_builtin_sync_lock_release (mode, exp);
7706 return const0_rtx;
7708 case BUILT_IN_SYNC_SYNCHRONIZE:
7709 expand_builtin_sync_synchronize ();
7710 return const0_rtx;
7712 case BUILT_IN_ATOMIC_EXCHANGE_1:
7713 case BUILT_IN_ATOMIC_EXCHANGE_2:
7714 case BUILT_IN_ATOMIC_EXCHANGE_4:
7715 case BUILT_IN_ATOMIC_EXCHANGE_8:
7716 case BUILT_IN_ATOMIC_EXCHANGE_16:
7717 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7718 target = expand_builtin_atomic_exchange (mode, exp, target);
7719 if (target)
7720 return target;
7721 break;
7723 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7724 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7725 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7726 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7727 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7729 unsigned int nargs, z;
7730 vec<tree, va_gc> *vec;
7732 mode =
7733 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7734 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7735 if (target)
7736 return target;
7738 /* If this is turned into an external library call, the weak parameter
7739 must be dropped to match the expected parameter list. */
7740 nargs = call_expr_nargs (exp);
7741 vec_alloc (vec, nargs - 1);
7742 for (z = 0; z < 3; z++)
7743 vec->quick_push (CALL_EXPR_ARG (exp, z));
7744 /* Skip the boolean weak parameter. */
7745 for (z = 4; z < 6; z++)
7746 vec->quick_push (CALL_EXPR_ARG (exp, z));
7747 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7748 break;
7751 case BUILT_IN_ATOMIC_LOAD_1:
7752 case BUILT_IN_ATOMIC_LOAD_2:
7753 case BUILT_IN_ATOMIC_LOAD_4:
7754 case BUILT_IN_ATOMIC_LOAD_8:
7755 case BUILT_IN_ATOMIC_LOAD_16:
7756 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7757 target = expand_builtin_atomic_load (mode, exp, target);
7758 if (target)
7759 return target;
7760 break;
7762 case BUILT_IN_ATOMIC_STORE_1:
7763 case BUILT_IN_ATOMIC_STORE_2:
7764 case BUILT_IN_ATOMIC_STORE_4:
7765 case BUILT_IN_ATOMIC_STORE_8:
7766 case BUILT_IN_ATOMIC_STORE_16:
7767 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7768 target = expand_builtin_atomic_store (mode, exp);
7769 if (target)
7770 return const0_rtx;
7771 break;
7773 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7774 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7775 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7776 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7777 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7779 enum built_in_function lib;
7780 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7781 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7782 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7783 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7784 ignore, lib);
7785 if (target)
7786 return target;
7787 break;
7789 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7790 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7791 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7792 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7793 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7795 enum built_in_function lib;
7796 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7797 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7798 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7799 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7800 ignore, lib);
7801 if (target)
7802 return target;
7803 break;
7805 case BUILT_IN_ATOMIC_AND_FETCH_1:
7806 case BUILT_IN_ATOMIC_AND_FETCH_2:
7807 case BUILT_IN_ATOMIC_AND_FETCH_4:
7808 case BUILT_IN_ATOMIC_AND_FETCH_8:
7809 case BUILT_IN_ATOMIC_AND_FETCH_16:
7811 enum built_in_function lib;
7812 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7813 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7814 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7815 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7816 ignore, lib);
7817 if (target)
7818 return target;
7819 break;
7821 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7822 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7823 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7824 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7825 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7827 enum built_in_function lib;
7828 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7829 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7830 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7831 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7832 ignore, lib);
7833 if (target)
7834 return target;
7835 break;
7837 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7838 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7839 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7840 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7841 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7843 enum built_in_function lib;
7844 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7845 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7846 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7847 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7848 ignore, lib);
7849 if (target)
7850 return target;
7851 break;
7853 case BUILT_IN_ATOMIC_OR_FETCH_1:
7854 case BUILT_IN_ATOMIC_OR_FETCH_2:
7855 case BUILT_IN_ATOMIC_OR_FETCH_4:
7856 case BUILT_IN_ATOMIC_OR_FETCH_8:
7857 case BUILT_IN_ATOMIC_OR_FETCH_16:
7859 enum built_in_function lib;
7860 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7861 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7862 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7863 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7864 ignore, lib);
7865 if (target)
7866 return target;
7867 break;
7869 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7870 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7871 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7872 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7873 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7874 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7875 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7876 ignore, BUILT_IN_NONE);
7877 if (target)
7878 return target;
7879 break;
7881 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7882 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7883 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7884 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7885 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7886 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7887 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7888 ignore, BUILT_IN_NONE);
7889 if (target)
7890 return target;
7891 break;
7893 case BUILT_IN_ATOMIC_FETCH_AND_1:
7894 case BUILT_IN_ATOMIC_FETCH_AND_2:
7895 case BUILT_IN_ATOMIC_FETCH_AND_4:
7896 case BUILT_IN_ATOMIC_FETCH_AND_8:
7897 case BUILT_IN_ATOMIC_FETCH_AND_16:
7898 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7899 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7900 ignore, BUILT_IN_NONE);
7901 if (target)
7902 return target;
7903 break;
7905 case BUILT_IN_ATOMIC_FETCH_NAND_1:
7906 case BUILT_IN_ATOMIC_FETCH_NAND_2:
7907 case BUILT_IN_ATOMIC_FETCH_NAND_4:
7908 case BUILT_IN_ATOMIC_FETCH_NAND_8:
7909 case BUILT_IN_ATOMIC_FETCH_NAND_16:
7910 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
7911 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
7912 ignore, BUILT_IN_NONE);
7913 if (target)
7914 return target;
7915 break;
7917 case BUILT_IN_ATOMIC_FETCH_XOR_1:
7918 case BUILT_IN_ATOMIC_FETCH_XOR_2:
7919 case BUILT_IN_ATOMIC_FETCH_XOR_4:
7920 case BUILT_IN_ATOMIC_FETCH_XOR_8:
7921 case BUILT_IN_ATOMIC_FETCH_XOR_16:
7922 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
7923 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
7924 ignore, BUILT_IN_NONE);
7925 if (target)
7926 return target;
7927 break;
7929 case BUILT_IN_ATOMIC_FETCH_OR_1:
7930 case BUILT_IN_ATOMIC_FETCH_OR_2:
7931 case BUILT_IN_ATOMIC_FETCH_OR_4:
7932 case BUILT_IN_ATOMIC_FETCH_OR_8:
7933 case BUILT_IN_ATOMIC_FETCH_OR_16:
7934 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
7935 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
7936 ignore, BUILT_IN_NONE);
7937 if (target)
7938 return target;
7939 break;
7941 case BUILT_IN_ATOMIC_TEST_AND_SET:
7942 return expand_builtin_atomic_test_and_set (exp, target);
7944 case BUILT_IN_ATOMIC_CLEAR:
7945 return expand_builtin_atomic_clear (exp);
7947 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
7948 return expand_builtin_atomic_always_lock_free (exp);
7950 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
7951 target = expand_builtin_atomic_is_lock_free (exp);
7952 if (target)
7953 return target;
7954 break;
7956 case BUILT_IN_ATOMIC_THREAD_FENCE:
7957 expand_builtin_atomic_thread_fence (exp);
7958 return const0_rtx;
7960 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
7961 expand_builtin_atomic_signal_fence (exp);
7962 return const0_rtx;
7964 case BUILT_IN_OBJECT_SIZE:
7965 return expand_builtin_object_size (exp);
7967 case BUILT_IN_MEMCPY_CHK:
7968 case BUILT_IN_MEMPCPY_CHK:
7969 case BUILT_IN_MEMMOVE_CHK:
7970 case BUILT_IN_MEMSET_CHK:
7971 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7972 if (target)
7973 return target;
7974 break;
7976 case BUILT_IN_STRCPY_CHK:
7977 case BUILT_IN_STPCPY_CHK:
7978 case BUILT_IN_STRNCPY_CHK:
7979 case BUILT_IN_STPNCPY_CHK:
7980 case BUILT_IN_STRCAT_CHK:
7981 case BUILT_IN_STRNCAT_CHK:
7982 case BUILT_IN_SNPRINTF_CHK:
7983 case BUILT_IN_VSNPRINTF_CHK:
7984 maybe_emit_chk_warning (exp, fcode);
7985 break;
7987 case BUILT_IN_SPRINTF_CHK:
7988 case BUILT_IN_VSPRINTF_CHK:
7989 maybe_emit_sprintf_chk_warning (exp, fcode);
7990 break;
7992 case BUILT_IN_FREE:
7993 if (warn_free_nonheap_object)
7994 maybe_emit_free_warning (exp);
7995 break;
7997 case BUILT_IN_THREAD_POINTER:
7998 return expand_builtin_thread_pointer (exp, target);
8000 case BUILT_IN_SET_THREAD_POINTER:
8001 expand_builtin_set_thread_pointer (exp);
8002 return const0_rtx;
8004 case BUILT_IN_ACC_ON_DEVICE:
8005 /* Do library call, if we failed to expand the builtin when
8006 folding. */
8007 break;
8009 case BUILT_IN_GOACC_PARLEVEL_ID:
8010 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8011 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8013 default: /* just do library call, if unknown builtin */
8014 break;
8017 /* The switch statement above can drop through to cause the function
8018 to be called normally. */
8019 return expand_call (exp, target, ignore);
8022 /* Determine whether a tree node represents a call to a built-in
8023 function. If the tree T is a call to a built-in function with
8024 the right number of arguments of the appropriate types, return
8025 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8026 Otherwise the return value is END_BUILTINS. */
8028 enum built_in_function
8029 builtin_mathfn_code (const_tree t)
8031 const_tree fndecl, arg, parmlist;
8032 const_tree argtype, parmtype;
8033 const_call_expr_arg_iterator iter;
8035 if (TREE_CODE (t) != CALL_EXPR)
8036 return END_BUILTINS;
8038 fndecl = get_callee_fndecl (t);
8039 if (fndecl == NULL_TREE
8040 || TREE_CODE (fndecl) != FUNCTION_DECL
8041 || ! DECL_BUILT_IN (fndecl)
8042 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8043 return END_BUILTINS;
8045 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8046 init_const_call_expr_arg_iterator (t, &iter);
8047 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
8049 /* If a function doesn't take a variable number of arguments,
8050 the last element in the list will have type `void'. */
8051 parmtype = TREE_VALUE (parmlist);
8052 if (VOID_TYPE_P (parmtype))
8054 if (more_const_call_expr_args_p (&iter))
8055 return END_BUILTINS;
8056 return DECL_FUNCTION_CODE (fndecl);
8059 if (! more_const_call_expr_args_p (&iter))
8060 return END_BUILTINS;
8062 arg = next_const_call_expr_arg (&iter);
8063 argtype = TREE_TYPE (arg);
8065 if (SCALAR_FLOAT_TYPE_P (parmtype))
8067 if (! SCALAR_FLOAT_TYPE_P (argtype))
8068 return END_BUILTINS;
8070 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8072 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8073 return END_BUILTINS;
8075 else if (POINTER_TYPE_P (parmtype))
8077 if (! POINTER_TYPE_P (argtype))
8078 return END_BUILTINS;
8080 else if (INTEGRAL_TYPE_P (parmtype))
8082 if (! INTEGRAL_TYPE_P (argtype))
8083 return END_BUILTINS;
8085 else
8086 return END_BUILTINS;
8089 /* Variable-length argument list. */
8090 return DECL_FUNCTION_CODE (fndecl);
8093 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8094 evaluate to a constant. */
8096 static tree
8097 fold_builtin_constant_p (tree arg)
8099 /* We return 1 for a numeric type that's known to be a constant
8100 value at compile-time or for an aggregate type that's a
8101 literal constant. */
8102 STRIP_NOPS (arg);
8104 /* If we know this is a constant, emit the constant of one. */
8105 if (CONSTANT_CLASS_P (arg)
8106 || (TREE_CODE (arg) == CONSTRUCTOR
8107 && TREE_CONSTANT (arg)))
8108 return integer_one_node;
8109 if (TREE_CODE (arg) == ADDR_EXPR)
8111 tree op = TREE_OPERAND (arg, 0);
8112 if (TREE_CODE (op) == STRING_CST
8113 || (TREE_CODE (op) == ARRAY_REF
8114 && integer_zerop (TREE_OPERAND (op, 1))
8115 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8116 return integer_one_node;
8119 /* If this expression has side effects, show we don't know it to be a
8120 constant. Likewise if it's a pointer or aggregate type since in
8121 those case we only want literals, since those are only optimized
8122 when generating RTL, not later.
8123 And finally, if we are compiling an initializer, not code, we
8124 need to return a definite result now; there's not going to be any
8125 more optimization done. */
8126 if (TREE_SIDE_EFFECTS (arg)
8127 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8128 || POINTER_TYPE_P (TREE_TYPE (arg))
8129 || cfun == 0
8130 || folding_initializer
8131 || force_folding_builtin_constant_p)
8132 return integer_zero_node;
8134 return NULL_TREE;
8137 /* Create builtin_expect with PRED and EXPECTED as its arguments and
8138 return it as a truthvalue. */
8140 static tree
8141 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
8142 tree predictor)
8144 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
8146 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
8147 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8148 ret_type = TREE_TYPE (TREE_TYPE (fn));
8149 pred_type = TREE_VALUE (arg_types);
8150 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8152 pred = fold_convert_loc (loc, pred_type, pred);
8153 expected = fold_convert_loc (loc, expected_type, expected);
8154 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8155 predictor);
8157 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8158 build_int_cst (ret_type, 0));
8161 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
8162 NULL_TREE if no simplification is possible. */
8164 tree
8165 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
8167 tree inner, fndecl, inner_arg0;
8168 enum tree_code code;
8170 /* Distribute the expected value over short-circuiting operators.
8171 See through the cast from truthvalue_type_node to long. */
8172 inner_arg0 = arg0;
8173 while (CONVERT_EXPR_P (inner_arg0)
8174 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8175 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8176 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8178 /* If this is a builtin_expect within a builtin_expect keep the
8179 inner one. See through a comparison against a constant. It
8180 might have been added to create a thruthvalue. */
8181 inner = inner_arg0;
8183 if (COMPARISON_CLASS_P (inner)
8184 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8185 inner = TREE_OPERAND (inner, 0);
8187 if (TREE_CODE (inner) == CALL_EXPR
8188 && (fndecl = get_callee_fndecl (inner))
8189 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
8190 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
8191 return arg0;
8193 inner = inner_arg0;
8194 code = TREE_CODE (inner);
8195 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8197 tree op0 = TREE_OPERAND (inner, 0);
8198 tree op1 = TREE_OPERAND (inner, 1);
8199 arg1 = save_expr (arg1);
8201 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
8202 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
8203 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8205 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
8208 /* If the argument isn't invariant then there's nothing else we can do. */
8209 if (!TREE_CONSTANT (inner_arg0))
8210 return NULL_TREE;
8212 /* If we expect that a comparison against the argument will fold to
8213 a constant return the constant. In practice, this means a true
8214 constant or the address of a non-weak symbol. */
8215 inner = inner_arg0;
8216 STRIP_NOPS (inner);
8217 if (TREE_CODE (inner) == ADDR_EXPR)
8221 inner = TREE_OPERAND (inner, 0);
8223 while (TREE_CODE (inner) == COMPONENT_REF
8224 || TREE_CODE (inner) == ARRAY_REF);
8225 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8226 return NULL_TREE;
8229 /* Otherwise, ARG0 already has the proper type for the return value. */
8230 return arg0;
8233 /* Fold a call to __builtin_classify_type with argument ARG. */
8235 static tree
8236 fold_builtin_classify_type (tree arg)
8238 if (arg == 0)
8239 return build_int_cst (integer_type_node, no_type_class);
8241 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8244 /* Fold a call to __builtin_strlen with argument ARG. */
8246 static tree
8247 fold_builtin_strlen (location_t loc, tree type, tree arg)
8249 if (!validate_arg (arg, POINTER_TYPE))
8250 return NULL_TREE;
8251 else
8253 tree len = c_strlen (arg, 0);
8255 if (len)
8256 return fold_convert_loc (loc, type, len);
8258 return NULL_TREE;
8262 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8264 static tree
8265 fold_builtin_inf (location_t loc, tree type, int warn)
8267 REAL_VALUE_TYPE real;
8269 /* __builtin_inff is intended to be usable to define INFINITY on all
8270 targets. If an infinity is not available, INFINITY expands "to a
8271 positive constant of type float that overflows at translation
8272 time", footnote "In this case, using INFINITY will violate the
8273 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8274 Thus we pedwarn to ensure this constraint violation is
8275 diagnosed. */
8276 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8277 pedwarn (loc, 0, "target format does not support infinity");
8279 real_inf (&real);
8280 return build_real (type, real);
8283 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8284 NULL_TREE if no simplification can be made. */
8286 static tree
8287 fold_builtin_sincos (location_t loc,
8288 tree arg0, tree arg1, tree arg2)
8290 tree type;
8291 tree fndecl, call = NULL_TREE;
8293 if (!validate_arg (arg0, REAL_TYPE)
8294 || !validate_arg (arg1, POINTER_TYPE)
8295 || !validate_arg (arg2, POINTER_TYPE))
8296 return NULL_TREE;
8298 type = TREE_TYPE (arg0);
8300 /* Calculate the result when the argument is a constant. */
8301 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8302 if (fn == END_BUILTINS)
8303 return NULL_TREE;
8305 /* Canonicalize sincos to cexpi. */
8306 if (TREE_CODE (arg0) == REAL_CST)
8308 tree complex_type = build_complex_type (type);
8309 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8311 if (!call)
8313 if (!targetm.libc_has_function (function_c99_math_complex)
8314 || !builtin_decl_implicit_p (fn))
8315 return NULL_TREE;
8316 fndecl = builtin_decl_explicit (fn);
8317 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8318 call = builtin_save_expr (call);
8321 tree ptype = build_pointer_type (type);
8322 arg1 = fold_convert (ptype, arg1);
8323 arg2 = fold_convert (ptype, arg2);
8324 return build2 (COMPOUND_EXPR, void_type_node,
8325 build2 (MODIFY_EXPR, void_type_node,
8326 build_fold_indirect_ref_loc (loc, arg1),
8327 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8328 build2 (MODIFY_EXPR, void_type_node,
8329 build_fold_indirect_ref_loc (loc, arg2),
8330 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8333 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8334 Return NULL_TREE if no simplification can be made. */
8336 static tree
8337 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8339 if (!validate_arg (arg1, POINTER_TYPE)
8340 || !validate_arg (arg2, POINTER_TYPE)
8341 || !validate_arg (len, INTEGER_TYPE))
8342 return NULL_TREE;
8344 /* If the LEN parameter is zero, return zero. */
8345 if (integer_zerop (len))
8346 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8347 arg1, arg2);
8349 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8350 if (operand_equal_p (arg1, arg2, 0))
8351 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8353 /* If len parameter is one, return an expression corresponding to
8354 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8355 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8357 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8358 tree cst_uchar_ptr_node
8359 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8361 tree ind1
8362 = fold_convert_loc (loc, integer_type_node,
8363 build1 (INDIRECT_REF, cst_uchar_node,
8364 fold_convert_loc (loc,
8365 cst_uchar_ptr_node,
8366 arg1)));
8367 tree ind2
8368 = fold_convert_loc (loc, integer_type_node,
8369 build1 (INDIRECT_REF, cst_uchar_node,
8370 fold_convert_loc (loc,
8371 cst_uchar_ptr_node,
8372 arg2)));
8373 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8376 return NULL_TREE;
8379 /* Fold a call to builtin isascii with argument ARG. */
8381 static tree
8382 fold_builtin_isascii (location_t loc, tree arg)
8384 if (!validate_arg (arg, INTEGER_TYPE))
8385 return NULL_TREE;
8386 else
8388 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8389 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8390 build_int_cst (integer_type_node,
8391 ~ (unsigned HOST_WIDE_INT) 0x7f));
8392 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8393 arg, integer_zero_node);
8397 /* Fold a call to builtin toascii with argument ARG. */
8399 static tree
8400 fold_builtin_toascii (location_t loc, tree arg)
8402 if (!validate_arg (arg, INTEGER_TYPE))
8403 return NULL_TREE;
8405 /* Transform toascii(c) -> (c & 0x7f). */
8406 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8407 build_int_cst (integer_type_node, 0x7f));
8410 /* Fold a call to builtin isdigit with argument ARG. */
8412 static tree
8413 fold_builtin_isdigit (location_t loc, tree arg)
8415 if (!validate_arg (arg, INTEGER_TYPE))
8416 return NULL_TREE;
8417 else
8419 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8420 /* According to the C standard, isdigit is unaffected by locale.
8421 However, it definitely is affected by the target character set. */
8422 unsigned HOST_WIDE_INT target_digit0
8423 = lang_hooks.to_target_charset ('0');
8425 if (target_digit0 == 0)
8426 return NULL_TREE;
8428 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8429 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8430 build_int_cst (unsigned_type_node, target_digit0));
8431 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8432 build_int_cst (unsigned_type_node, 9));
8436 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8438 static tree
8439 fold_builtin_fabs (location_t loc, tree arg, tree type)
8441 if (!validate_arg (arg, REAL_TYPE))
8442 return NULL_TREE;
8444 arg = fold_convert_loc (loc, type, arg);
8445 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8448 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8450 static tree
8451 fold_builtin_abs (location_t loc, tree arg, tree type)
8453 if (!validate_arg (arg, INTEGER_TYPE))
8454 return NULL_TREE;
8456 arg = fold_convert_loc (loc, type, arg);
8457 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8460 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8462 static tree
8463 fold_builtin_carg (location_t loc, tree arg, tree type)
8465 if (validate_arg (arg, COMPLEX_TYPE)
8466 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8468 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8470 if (atan2_fn)
8472 tree new_arg = builtin_save_expr (arg);
8473 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8474 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8475 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8479 return NULL_TREE;
8482 /* Fold a call to builtin frexp, we can assume the base is 2. */
8484 static tree
8485 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8487 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8488 return NULL_TREE;
8490 STRIP_NOPS (arg0);
8492 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8493 return NULL_TREE;
8495 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8497 /* Proceed if a valid pointer type was passed in. */
8498 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8500 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8501 tree frac, exp;
8503 switch (value->cl)
8505 case rvc_zero:
8506 /* For +-0, return (*exp = 0, +-0). */
8507 exp = integer_zero_node;
8508 frac = arg0;
8509 break;
8510 case rvc_nan:
8511 case rvc_inf:
8512 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8513 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8514 case rvc_normal:
8516 /* Since the frexp function always expects base 2, and in
8517 GCC normalized significands are already in the range
8518 [0.5, 1.0), we have exactly what frexp wants. */
8519 REAL_VALUE_TYPE frac_rvt = *value;
8520 SET_REAL_EXP (&frac_rvt, 0);
8521 frac = build_real (rettype, frac_rvt);
8522 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8524 break;
8525 default:
8526 gcc_unreachable ();
8529 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8530 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8531 TREE_SIDE_EFFECTS (arg1) = 1;
8532 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8535 return NULL_TREE;
8538 /* Fold a call to builtin modf. */
8540 static tree
8541 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8543 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8544 return NULL_TREE;
8546 STRIP_NOPS (arg0);
8548 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8549 return NULL_TREE;
8551 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8553 /* Proceed if a valid pointer type was passed in. */
8554 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8556 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8557 REAL_VALUE_TYPE trunc, frac;
8559 switch (value->cl)
8561 case rvc_nan:
8562 case rvc_zero:
8563 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8564 trunc = frac = *value;
8565 break;
8566 case rvc_inf:
8567 /* For +-Inf, return (*arg1 = arg0, +-0). */
8568 frac = dconst0;
8569 frac.sign = value->sign;
8570 trunc = *value;
8571 break;
8572 case rvc_normal:
8573 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8574 real_trunc (&trunc, VOIDmode, value);
8575 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8576 /* If the original number was negative and already
8577 integral, then the fractional part is -0.0. */
8578 if (value->sign && frac.cl == rvc_zero)
8579 frac.sign = value->sign;
8580 break;
8583 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8584 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8585 build_real (rettype, trunc));
8586 TREE_SIDE_EFFECTS (arg1) = 1;
8587 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8588 build_real (rettype, frac));
8591 return NULL_TREE;
8594 /* Given a location LOC, an interclass builtin function decl FNDECL
8595 and its single argument ARG, return an folded expression computing
8596 the same, or NULL_TREE if we either couldn't or didn't want to fold
8597 (the latter happen if there's an RTL instruction available). */
8599 static tree
8600 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8602 machine_mode mode;
8604 if (!validate_arg (arg, REAL_TYPE))
8605 return NULL_TREE;
8607 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8608 return NULL_TREE;
8610 mode = TYPE_MODE (TREE_TYPE (arg));
8612 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8614 /* If there is no optab, try generic code. */
8615 switch (DECL_FUNCTION_CODE (fndecl))
8617 tree result;
8619 CASE_FLT_FN (BUILT_IN_ISINF):
8621 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8622 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8623 tree type = TREE_TYPE (arg);
8624 REAL_VALUE_TYPE r;
8625 char buf[128];
8627 if (is_ibm_extended)
8629 /* NaN and Inf are encoded in the high-order double value
8630 only. The low-order value is not significant. */
8631 type = double_type_node;
8632 mode = DFmode;
8633 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8635 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8636 real_from_string (&r, buf);
8637 result = build_call_expr (isgr_fn, 2,
8638 fold_build1_loc (loc, ABS_EXPR, type, arg),
8639 build_real (type, r));
8640 return result;
8642 CASE_FLT_FN (BUILT_IN_FINITE):
8643 case BUILT_IN_ISFINITE:
8645 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8646 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8647 tree type = TREE_TYPE (arg);
8648 REAL_VALUE_TYPE r;
8649 char buf[128];
8651 if (is_ibm_extended)
8653 /* NaN and Inf are encoded in the high-order double value
8654 only. The low-order value is not significant. */
8655 type = double_type_node;
8656 mode = DFmode;
8657 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8659 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8660 real_from_string (&r, buf);
8661 result = build_call_expr (isle_fn, 2,
8662 fold_build1_loc (loc, ABS_EXPR, type, arg),
8663 build_real (type, r));
8664 /*result = fold_build2_loc (loc, UNGT_EXPR,
8665 TREE_TYPE (TREE_TYPE (fndecl)),
8666 fold_build1_loc (loc, ABS_EXPR, type, arg),
8667 build_real (type, r));
8668 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8669 TREE_TYPE (TREE_TYPE (fndecl)),
8670 result);*/
8671 return result;
8673 case BUILT_IN_ISNORMAL:
8675 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8676 islessequal(fabs(x),DBL_MAX). */
8677 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8678 tree type = TREE_TYPE (arg);
8679 tree orig_arg, max_exp, min_exp;
8680 machine_mode orig_mode = mode;
8681 REAL_VALUE_TYPE rmax, rmin;
8682 char buf[128];
8684 orig_arg = arg = builtin_save_expr (arg);
8685 if (is_ibm_extended)
8687 /* Use double to test the normal range of IBM extended
8688 precision. Emin for IBM extended precision is
8689 different to emin for IEEE double, being 53 higher
8690 since the low double exponent is at least 53 lower
8691 than the high double exponent. */
8692 type = double_type_node;
8693 mode = DFmode;
8694 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8696 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8698 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8699 real_from_string (&rmax, buf);
8700 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8701 real_from_string (&rmin, buf);
8702 max_exp = build_real (type, rmax);
8703 min_exp = build_real (type, rmin);
8705 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8706 if (is_ibm_extended)
8708 /* Testing the high end of the range is done just using
8709 the high double, using the same test as isfinite().
8710 For the subnormal end of the range we first test the
8711 high double, then if its magnitude is equal to the
8712 limit of 0x1p-969, we test whether the low double is
8713 non-zero and opposite sign to the high double. */
8714 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8715 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8716 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8717 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8718 arg, min_exp);
8719 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8720 complex_double_type_node, orig_arg);
8721 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8722 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8723 tree zero = build_real (type, dconst0);
8724 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8725 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8726 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8727 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8728 fold_build3 (COND_EXPR,
8729 integer_type_node,
8730 hilt, logt, lolt));
8731 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8732 eq_min, ok_lo);
8733 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8734 gt_min, eq_min);
8736 else
8738 tree const isge_fn
8739 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8740 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8742 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8743 max_exp, min_exp);
8744 return result;
8746 default:
8747 break;
8750 return NULL_TREE;
8753 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8754 ARG is the argument for the call. */
8756 static tree
8757 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8759 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8761 if (!validate_arg (arg, REAL_TYPE))
8762 return NULL_TREE;
8764 switch (builtin_index)
8766 case BUILT_IN_ISINF:
8767 if (!HONOR_INFINITIES (arg))
8768 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8770 return NULL_TREE;
8772 case BUILT_IN_ISINF_SIGN:
8774 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8775 /* In a boolean context, GCC will fold the inner COND_EXPR to
8776 1. So e.g. "if (isinf_sign(x))" would be folded to just
8777 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8778 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8779 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8780 tree tmp = NULL_TREE;
8782 arg = builtin_save_expr (arg);
8784 if (signbit_fn && isinf_fn)
8786 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8787 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8789 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8790 signbit_call, integer_zero_node);
8791 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8792 isinf_call, integer_zero_node);
8794 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8795 integer_minus_one_node, integer_one_node);
8796 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8797 isinf_call, tmp,
8798 integer_zero_node);
8801 return tmp;
8804 case BUILT_IN_ISFINITE:
8805 if (!HONOR_NANS (arg)
8806 && !HONOR_INFINITIES (arg))
8807 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8809 return NULL_TREE;
8811 case BUILT_IN_ISNAN:
8812 if (!HONOR_NANS (arg))
8813 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8816 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8817 if (is_ibm_extended)
8819 /* NaN and Inf are encoded in the high-order double value
8820 only. The low-order value is not significant. */
8821 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8824 arg = builtin_save_expr (arg);
8825 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8827 default:
8828 gcc_unreachable ();
8832 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8833 This builtin will generate code to return the appropriate floating
8834 point classification depending on the value of the floating point
8835 number passed in. The possible return values must be supplied as
8836 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8837 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8838 one floating point argument which is "type generic". */
8840 static tree
8841 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8843 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8844 arg, type, res, tmp;
8845 machine_mode mode;
8846 REAL_VALUE_TYPE r;
8847 char buf[128];
8849 /* Verify the required arguments in the original call. */
8850 if (nargs != 6
8851 || !validate_arg (args[0], INTEGER_TYPE)
8852 || !validate_arg (args[1], INTEGER_TYPE)
8853 || !validate_arg (args[2], INTEGER_TYPE)
8854 || !validate_arg (args[3], INTEGER_TYPE)
8855 || !validate_arg (args[4], INTEGER_TYPE)
8856 || !validate_arg (args[5], REAL_TYPE))
8857 return NULL_TREE;
8859 fp_nan = args[0];
8860 fp_infinite = args[1];
8861 fp_normal = args[2];
8862 fp_subnormal = args[3];
8863 fp_zero = args[4];
8864 arg = args[5];
8865 type = TREE_TYPE (arg);
8866 mode = TYPE_MODE (type);
8867 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8869 /* fpclassify(x) ->
8870 isnan(x) ? FP_NAN :
8871 (fabs(x) == Inf ? FP_INFINITE :
8872 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8873 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8875 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8876 build_real (type, dconst0));
8877 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8878 tmp, fp_zero, fp_subnormal);
8880 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8881 real_from_string (&r, buf);
8882 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8883 arg, build_real (type, r));
8884 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8886 if (HONOR_INFINITIES (mode))
8888 real_inf (&r);
8889 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8890 build_real (type, r));
8891 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8892 fp_infinite, res);
8895 if (HONOR_NANS (mode))
8897 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8898 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8901 return res;
8904 /* Fold a call to an unordered comparison function such as
8905 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8906 being called and ARG0 and ARG1 are the arguments for the call.
8907 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8908 the opposite of the desired result. UNORDERED_CODE is used
8909 for modes that can hold NaNs and ORDERED_CODE is used for
8910 the rest. */
8912 static tree
8913 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8914 enum tree_code unordered_code,
8915 enum tree_code ordered_code)
8917 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8918 enum tree_code code;
8919 tree type0, type1;
8920 enum tree_code code0, code1;
8921 tree cmp_type = NULL_TREE;
8923 type0 = TREE_TYPE (arg0);
8924 type1 = TREE_TYPE (arg1);
8926 code0 = TREE_CODE (type0);
8927 code1 = TREE_CODE (type1);
8929 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8930 /* Choose the wider of two real types. */
8931 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8932 ? type0 : type1;
8933 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8934 cmp_type = type0;
8935 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8936 cmp_type = type1;
8938 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8939 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8941 if (unordered_code == UNORDERED_EXPR)
8943 if (!HONOR_NANS (arg0))
8944 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8945 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8948 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8949 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8950 fold_build2_loc (loc, code, type, arg0, arg1));
8953 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8954 arithmetics if it can never overflow, or into internal functions that
8955 return both result of arithmetics and overflowed boolean flag in
8956 a complex integer result, or some other check for overflow.
8957 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8958 checking part of that. */
8960 static tree
8961 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8962 tree arg0, tree arg1, tree arg2)
8964 enum internal_fn ifn = IFN_LAST;
8965 /* The code of the expression corresponding to the type-generic
8966 built-in, or ERROR_MARK for the type-specific ones. */
8967 enum tree_code opcode = ERROR_MARK;
8968 bool ovf_only = false;
8970 switch (fcode)
8972 case BUILT_IN_ADD_OVERFLOW_P:
8973 ovf_only = true;
8974 /* FALLTHRU */
8975 case BUILT_IN_ADD_OVERFLOW:
8976 opcode = PLUS_EXPR;
8977 /* FALLTHRU */
8978 case BUILT_IN_SADD_OVERFLOW:
8979 case BUILT_IN_SADDL_OVERFLOW:
8980 case BUILT_IN_SADDLL_OVERFLOW:
8981 case BUILT_IN_UADD_OVERFLOW:
8982 case BUILT_IN_UADDL_OVERFLOW:
8983 case BUILT_IN_UADDLL_OVERFLOW:
8984 ifn = IFN_ADD_OVERFLOW;
8985 break;
8986 case BUILT_IN_SUB_OVERFLOW_P:
8987 ovf_only = true;
8988 /* FALLTHRU */
8989 case BUILT_IN_SUB_OVERFLOW:
8990 opcode = MINUS_EXPR;
8991 /* FALLTHRU */
8992 case BUILT_IN_SSUB_OVERFLOW:
8993 case BUILT_IN_SSUBL_OVERFLOW:
8994 case BUILT_IN_SSUBLL_OVERFLOW:
8995 case BUILT_IN_USUB_OVERFLOW:
8996 case BUILT_IN_USUBL_OVERFLOW:
8997 case BUILT_IN_USUBLL_OVERFLOW:
8998 ifn = IFN_SUB_OVERFLOW;
8999 break;
9000 case BUILT_IN_MUL_OVERFLOW_P:
9001 ovf_only = true;
9002 /* FALLTHRU */
9003 case BUILT_IN_MUL_OVERFLOW:
9004 opcode = MULT_EXPR;
9005 /* FALLTHRU */
9006 case BUILT_IN_SMUL_OVERFLOW:
9007 case BUILT_IN_SMULL_OVERFLOW:
9008 case BUILT_IN_SMULLL_OVERFLOW:
9009 case BUILT_IN_UMUL_OVERFLOW:
9010 case BUILT_IN_UMULL_OVERFLOW:
9011 case BUILT_IN_UMULLL_OVERFLOW:
9012 ifn = IFN_MUL_OVERFLOW;
9013 break;
9014 default:
9015 gcc_unreachable ();
9018 /* For the "generic" overloads, the first two arguments can have different
9019 types and the last argument determines the target type to use to check
9020 for overflow. The arguments of the other overloads all have the same
9021 type. */
9022 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9024 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9025 arguments are constant, attempt to fold the built-in call into a constant
9026 expression indicating whether or not it detected an overflow. */
9027 if (ovf_only
9028 && TREE_CODE (arg0) == INTEGER_CST
9029 && TREE_CODE (arg1) == INTEGER_CST)
9030 /* Perform the computation in the target type and check for overflow. */
9031 return omit_one_operand_loc (loc, boolean_type_node,
9032 arith_overflowed_p (opcode, type, arg0, arg1)
9033 ? boolean_true_node : boolean_false_node,
9034 arg2);
9036 tree ctype = build_complex_type (type);
9037 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9038 2, arg0, arg1);
9039 tree tgt = save_expr (call);
9040 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9041 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9042 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9044 if (ovf_only)
9045 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9047 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9048 tree store
9049 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9050 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9053 /* Fold a call to __builtin_FILE to a constant string. */
9055 static inline tree
9056 fold_builtin_FILE (location_t loc)
9058 if (const char *fname = LOCATION_FILE (loc))
9060 /* The documentation says this builtin is equivalent to the preprocessor
9061 __FILE__ macro so it appears appropriate to use the same file prefix
9062 mappings. */
9063 fname = remap_macro_filename (fname);
9064 return build_string_literal (strlen (fname) + 1, fname);
9067 return build_string_literal (1, "");
9070 /* Fold a call to __builtin_FUNCTION to a constant string. */
9072 static inline tree
9073 fold_builtin_FUNCTION ()
9075 const char *name = "";
9077 if (current_function_decl)
9078 name = lang_hooks.decl_printable_name (current_function_decl, 0);
9080 return build_string_literal (strlen (name) + 1, name);
9083 /* Fold a call to __builtin_LINE to an integer constant. */
9085 static inline tree
9086 fold_builtin_LINE (location_t loc, tree type)
9088 return build_int_cst (type, LOCATION_LINE (loc));
9091 /* Fold a call to built-in function FNDECL with 0 arguments.
9092 This function returns NULL_TREE if no simplification was possible. */
9094 static tree
9095 fold_builtin_0 (location_t loc, tree fndecl)
9097 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9098 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9099 switch (fcode)
9101 case BUILT_IN_FILE:
9102 return fold_builtin_FILE (loc);
9104 case BUILT_IN_FUNCTION:
9105 return fold_builtin_FUNCTION ();
9107 case BUILT_IN_LINE:
9108 return fold_builtin_LINE (loc, type);
9110 CASE_FLT_FN (BUILT_IN_INF):
9111 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
9112 case BUILT_IN_INFD32:
9113 case BUILT_IN_INFD64:
9114 case BUILT_IN_INFD128:
9115 return fold_builtin_inf (loc, type, true);
9117 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9118 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
9119 return fold_builtin_inf (loc, type, false);
9121 case BUILT_IN_CLASSIFY_TYPE:
9122 return fold_builtin_classify_type (NULL_TREE);
9124 default:
9125 break;
9127 return NULL_TREE;
9130 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9131 This function returns NULL_TREE if no simplification was possible. */
9133 static tree
9134 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9136 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9137 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9139 if (TREE_CODE (arg0) == ERROR_MARK)
9140 return NULL_TREE;
9142 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
9143 return ret;
9145 switch (fcode)
9147 case BUILT_IN_CONSTANT_P:
9149 tree val = fold_builtin_constant_p (arg0);
9151 /* Gimplification will pull the CALL_EXPR for the builtin out of
9152 an if condition. When not optimizing, we'll not CSE it back.
9153 To avoid link error types of regressions, return false now. */
9154 if (!val && !optimize)
9155 val = integer_zero_node;
9157 return val;
9160 case BUILT_IN_CLASSIFY_TYPE:
9161 return fold_builtin_classify_type (arg0);
9163 case BUILT_IN_STRLEN:
9164 return fold_builtin_strlen (loc, type, arg0);
9166 CASE_FLT_FN (BUILT_IN_FABS):
9167 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9168 case BUILT_IN_FABSD32:
9169 case BUILT_IN_FABSD64:
9170 case BUILT_IN_FABSD128:
9171 return fold_builtin_fabs (loc, arg0, type);
9173 case BUILT_IN_ABS:
9174 case BUILT_IN_LABS:
9175 case BUILT_IN_LLABS:
9176 case BUILT_IN_IMAXABS:
9177 return fold_builtin_abs (loc, arg0, type);
9179 CASE_FLT_FN (BUILT_IN_CONJ):
9180 if (validate_arg (arg0, COMPLEX_TYPE)
9181 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9182 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9183 break;
9185 CASE_FLT_FN (BUILT_IN_CREAL):
9186 if (validate_arg (arg0, COMPLEX_TYPE)
9187 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9188 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9189 break;
9191 CASE_FLT_FN (BUILT_IN_CIMAG):
9192 if (validate_arg (arg0, COMPLEX_TYPE)
9193 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9194 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9195 break;
9197 CASE_FLT_FN (BUILT_IN_CARG):
9198 return fold_builtin_carg (loc, arg0, type);
9200 case BUILT_IN_ISASCII:
9201 return fold_builtin_isascii (loc, arg0);
9203 case BUILT_IN_TOASCII:
9204 return fold_builtin_toascii (loc, arg0);
9206 case BUILT_IN_ISDIGIT:
9207 return fold_builtin_isdigit (loc, arg0);
9209 CASE_FLT_FN (BUILT_IN_FINITE):
9210 case BUILT_IN_FINITED32:
9211 case BUILT_IN_FINITED64:
9212 case BUILT_IN_FINITED128:
9213 case BUILT_IN_ISFINITE:
9215 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9216 if (ret)
9217 return ret;
9218 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9221 CASE_FLT_FN (BUILT_IN_ISINF):
9222 case BUILT_IN_ISINFD32:
9223 case BUILT_IN_ISINFD64:
9224 case BUILT_IN_ISINFD128:
9226 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9227 if (ret)
9228 return ret;
9229 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9232 case BUILT_IN_ISNORMAL:
9233 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9235 case BUILT_IN_ISINF_SIGN:
9236 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9238 CASE_FLT_FN (BUILT_IN_ISNAN):
9239 case BUILT_IN_ISNAND32:
9240 case BUILT_IN_ISNAND64:
9241 case BUILT_IN_ISNAND128:
9242 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9244 case BUILT_IN_FREE:
9245 if (integer_zerop (arg0))
9246 return build_empty_stmt (loc);
9247 break;
9249 default:
9250 break;
9253 return NULL_TREE;
9257 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9258 This function returns NULL_TREE if no simplification was possible. */
9260 static tree
9261 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
9263 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9264 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9266 if (TREE_CODE (arg0) == ERROR_MARK
9267 || TREE_CODE (arg1) == ERROR_MARK)
9268 return NULL_TREE;
9270 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9271 return ret;
9273 switch (fcode)
9275 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9276 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9277 if (validate_arg (arg0, REAL_TYPE)
9278 && validate_arg (arg1, POINTER_TYPE))
9279 return do_mpfr_lgamma_r (arg0, arg1, type);
9280 break;
9282 CASE_FLT_FN (BUILT_IN_FREXP):
9283 return fold_builtin_frexp (loc, arg0, arg1, type);
9285 CASE_FLT_FN (BUILT_IN_MODF):
9286 return fold_builtin_modf (loc, arg0, arg1, type);
9288 case BUILT_IN_STRSPN:
9289 return fold_builtin_strspn (loc, arg0, arg1);
9291 case BUILT_IN_STRCSPN:
9292 return fold_builtin_strcspn (loc, arg0, arg1);
9294 case BUILT_IN_STRPBRK:
9295 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9297 case BUILT_IN_EXPECT:
9298 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
9300 case BUILT_IN_ISGREATER:
9301 return fold_builtin_unordered_cmp (loc, fndecl,
9302 arg0, arg1, UNLE_EXPR, LE_EXPR);
9303 case BUILT_IN_ISGREATEREQUAL:
9304 return fold_builtin_unordered_cmp (loc, fndecl,
9305 arg0, arg1, UNLT_EXPR, LT_EXPR);
9306 case BUILT_IN_ISLESS:
9307 return fold_builtin_unordered_cmp (loc, fndecl,
9308 arg0, arg1, UNGE_EXPR, GE_EXPR);
9309 case BUILT_IN_ISLESSEQUAL:
9310 return fold_builtin_unordered_cmp (loc, fndecl,
9311 arg0, arg1, UNGT_EXPR, GT_EXPR);
9312 case BUILT_IN_ISLESSGREATER:
9313 return fold_builtin_unordered_cmp (loc, fndecl,
9314 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9315 case BUILT_IN_ISUNORDERED:
9316 return fold_builtin_unordered_cmp (loc, fndecl,
9317 arg0, arg1, UNORDERED_EXPR,
9318 NOP_EXPR);
9320 /* We do the folding for va_start in the expander. */
9321 case BUILT_IN_VA_START:
9322 break;
9324 case BUILT_IN_OBJECT_SIZE:
9325 return fold_builtin_object_size (arg0, arg1);
9327 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9328 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9330 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9331 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9333 default:
9334 break;
9336 return NULL_TREE;
9339 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9340 and ARG2.
9341 This function returns NULL_TREE if no simplification was possible. */
9343 static tree
9344 fold_builtin_3 (location_t loc, tree fndecl,
9345 tree arg0, tree arg1, tree arg2)
9347 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9348 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9350 if (TREE_CODE (arg0) == ERROR_MARK
9351 || TREE_CODE (arg1) == ERROR_MARK
9352 || TREE_CODE (arg2) == ERROR_MARK)
9353 return NULL_TREE;
9355 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9356 arg0, arg1, arg2))
9357 return ret;
9359 switch (fcode)
9362 CASE_FLT_FN (BUILT_IN_SINCOS):
9363 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9365 CASE_FLT_FN (BUILT_IN_REMQUO):
9366 if (validate_arg (arg0, REAL_TYPE)
9367 && validate_arg (arg1, REAL_TYPE)
9368 && validate_arg (arg2, POINTER_TYPE))
9369 return do_mpfr_remquo (arg0, arg1, arg2);
9370 break;
9372 case BUILT_IN_MEMCMP:
9373 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9375 case BUILT_IN_EXPECT:
9376 return fold_builtin_expect (loc, arg0, arg1, arg2);
9378 case BUILT_IN_ADD_OVERFLOW:
9379 case BUILT_IN_SUB_OVERFLOW:
9380 case BUILT_IN_MUL_OVERFLOW:
9381 case BUILT_IN_ADD_OVERFLOW_P:
9382 case BUILT_IN_SUB_OVERFLOW_P:
9383 case BUILT_IN_MUL_OVERFLOW_P:
9384 case BUILT_IN_SADD_OVERFLOW:
9385 case BUILT_IN_SADDL_OVERFLOW:
9386 case BUILT_IN_SADDLL_OVERFLOW:
9387 case BUILT_IN_SSUB_OVERFLOW:
9388 case BUILT_IN_SSUBL_OVERFLOW:
9389 case BUILT_IN_SSUBLL_OVERFLOW:
9390 case BUILT_IN_SMUL_OVERFLOW:
9391 case BUILT_IN_SMULL_OVERFLOW:
9392 case BUILT_IN_SMULLL_OVERFLOW:
9393 case BUILT_IN_UADD_OVERFLOW:
9394 case BUILT_IN_UADDL_OVERFLOW:
9395 case BUILT_IN_UADDLL_OVERFLOW:
9396 case BUILT_IN_USUB_OVERFLOW:
9397 case BUILT_IN_USUBL_OVERFLOW:
9398 case BUILT_IN_USUBLL_OVERFLOW:
9399 case BUILT_IN_UMUL_OVERFLOW:
9400 case BUILT_IN_UMULL_OVERFLOW:
9401 case BUILT_IN_UMULLL_OVERFLOW:
9402 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9404 default:
9405 break;
9407 return NULL_TREE;
9410 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9411 arguments. IGNORE is true if the result of the
9412 function call is ignored. This function returns NULL_TREE if no
9413 simplification was possible. */
9415 tree
9416 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9418 tree ret = NULL_TREE;
9420 switch (nargs)
9422 case 0:
9423 ret = fold_builtin_0 (loc, fndecl);
9424 break;
9425 case 1:
9426 ret = fold_builtin_1 (loc, fndecl, args[0]);
9427 break;
9428 case 2:
9429 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9430 break;
9431 case 3:
9432 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9433 break;
9434 default:
9435 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9436 break;
9438 if (ret)
9440 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9441 SET_EXPR_LOCATION (ret, loc);
9442 return ret;
9444 return NULL_TREE;
9447 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9448 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9449 of arguments in ARGS to be omitted. OLDNARGS is the number of
9450 elements in ARGS. */
9452 static tree
9453 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9454 int skip, tree fndecl, int n, va_list newargs)
9456 int nargs = oldnargs - skip + n;
9457 tree *buffer;
9459 if (n > 0)
9461 int i, j;
9463 buffer = XALLOCAVEC (tree, nargs);
9464 for (i = 0; i < n; i++)
9465 buffer[i] = va_arg (newargs, tree);
9466 for (j = skip; j < oldnargs; j++, i++)
9467 buffer[i] = args[j];
9469 else
9470 buffer = args + skip;
9472 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9475 /* Return true if FNDECL shouldn't be folded right now.
9476 If a built-in function has an inline attribute always_inline
9477 wrapper, defer folding it after always_inline functions have
9478 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9479 might not be performed. */
9481 bool
9482 avoid_folding_inline_builtin (tree fndecl)
9484 return (DECL_DECLARED_INLINE_P (fndecl)
9485 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9486 && cfun
9487 && !cfun->always_inline_functions_inlined
9488 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9491 /* A wrapper function for builtin folding that prevents warnings for
9492 "statement without effect" and the like, caused by removing the
9493 call node earlier than the warning is generated. */
9495 tree
9496 fold_call_expr (location_t loc, tree exp, bool ignore)
9498 tree ret = NULL_TREE;
9499 tree fndecl = get_callee_fndecl (exp);
9500 if (fndecl
9501 && TREE_CODE (fndecl) == FUNCTION_DECL
9502 && DECL_BUILT_IN (fndecl)
9503 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9504 yet. Defer folding until we see all the arguments
9505 (after inlining). */
9506 && !CALL_EXPR_VA_ARG_PACK (exp))
9508 int nargs = call_expr_nargs (exp);
9510 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9511 instead last argument is __builtin_va_arg_pack (). Defer folding
9512 even in that case, until arguments are finalized. */
9513 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9515 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9516 if (fndecl2
9517 && TREE_CODE (fndecl2) == FUNCTION_DECL
9518 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9519 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9520 return NULL_TREE;
9523 if (avoid_folding_inline_builtin (fndecl))
9524 return NULL_TREE;
9526 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9527 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9528 CALL_EXPR_ARGP (exp), ignore);
9529 else
9531 tree *args = CALL_EXPR_ARGP (exp);
9532 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9533 if (ret)
9534 return ret;
9537 return NULL_TREE;
9540 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9541 N arguments are passed in the array ARGARRAY. Return a folded
9542 expression or NULL_TREE if no simplification was possible. */
9544 tree
9545 fold_builtin_call_array (location_t loc, tree,
9546 tree fn,
9547 int n,
9548 tree *argarray)
9550 if (TREE_CODE (fn) != ADDR_EXPR)
9551 return NULL_TREE;
9553 tree fndecl = TREE_OPERAND (fn, 0);
9554 if (TREE_CODE (fndecl) == FUNCTION_DECL
9555 && DECL_BUILT_IN (fndecl))
9557 /* If last argument is __builtin_va_arg_pack (), arguments to this
9558 function are not finalized yet. Defer folding until they are. */
9559 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9561 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9562 if (fndecl2
9563 && TREE_CODE (fndecl2) == FUNCTION_DECL
9564 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9565 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9566 return NULL_TREE;
9568 if (avoid_folding_inline_builtin (fndecl))
9569 return NULL_TREE;
9570 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9571 return targetm.fold_builtin (fndecl, n, argarray, false);
9572 else
9573 return fold_builtin_n (loc, fndecl, argarray, n, false);
9576 return NULL_TREE;
9579 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9580 along with N new arguments specified as the "..." parameters. SKIP
9581 is the number of arguments in EXP to be omitted. This function is used
9582 to do varargs-to-varargs transformations. */
9584 static tree
9585 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9587 va_list ap;
9588 tree t;
9590 va_start (ap, n);
9591 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9592 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9593 va_end (ap);
9595 return t;
9598 /* Validate a single argument ARG against a tree code CODE representing
9599 a type. Return true when argument is valid. */
9601 static bool
9602 validate_arg (const_tree arg, enum tree_code code)
9604 if (!arg)
9605 return false;
9606 else if (code == POINTER_TYPE)
9607 return POINTER_TYPE_P (TREE_TYPE (arg));
9608 else if (code == INTEGER_TYPE)
9609 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9610 return code == TREE_CODE (TREE_TYPE (arg));
9613 /* This function validates the types of a function call argument list
9614 against a specified list of tree_codes. If the last specifier is a 0,
9615 that represents an ellipses, otherwise the last specifier must be a
9616 VOID_TYPE.
9618 This is the GIMPLE version of validate_arglist. Eventually we want to
9619 completely convert builtins.c to work from GIMPLEs and the tree based
9620 validate_arglist will then be removed. */
9622 bool
9623 validate_gimple_arglist (const gcall *call, ...)
9625 enum tree_code code;
9626 bool res = 0;
9627 va_list ap;
9628 const_tree arg;
9629 size_t i;
9631 va_start (ap, call);
9632 i = 0;
9636 code = (enum tree_code) va_arg (ap, int);
9637 switch (code)
9639 case 0:
9640 /* This signifies an ellipses, any further arguments are all ok. */
9641 res = true;
9642 goto end;
9643 case VOID_TYPE:
9644 /* This signifies an endlink, if no arguments remain, return
9645 true, otherwise return false. */
9646 res = (i == gimple_call_num_args (call));
9647 goto end;
9648 default:
9649 /* If no parameters remain or the parameter's code does not
9650 match the specified code, return false. Otherwise continue
9651 checking any remaining arguments. */
9652 arg = gimple_call_arg (call, i++);
9653 if (!validate_arg (arg, code))
9654 goto end;
9655 break;
9658 while (1);
9660 /* We need gotos here since we can only have one VA_CLOSE in a
9661 function. */
9662 end: ;
9663 va_end (ap);
9665 return res;
9668 /* Default target-specific builtin expander that does nothing. */
9671 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9672 rtx target ATTRIBUTE_UNUSED,
9673 rtx subtarget ATTRIBUTE_UNUSED,
9674 machine_mode mode ATTRIBUTE_UNUSED,
9675 int ignore ATTRIBUTE_UNUSED)
9677 return NULL_RTX;
9680 /* Returns true is EXP represents data that would potentially reside
9681 in a readonly section. */
9683 bool
9684 readonly_data_expr (tree exp)
9686 STRIP_NOPS (exp);
9688 if (TREE_CODE (exp) != ADDR_EXPR)
9689 return false;
9691 exp = get_base_address (TREE_OPERAND (exp, 0));
9692 if (!exp)
9693 return false;
9695 /* Make sure we call decl_readonly_section only for trees it
9696 can handle (since it returns true for everything it doesn't
9697 understand). */
9698 if (TREE_CODE (exp) == STRING_CST
9699 || TREE_CODE (exp) == CONSTRUCTOR
9700 || (VAR_P (exp) && TREE_STATIC (exp)))
9701 return decl_readonly_section (exp, 0);
9702 else
9703 return false;
9706 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9707 to the call, and TYPE is its return type.
9709 Return NULL_TREE if no simplification was possible, otherwise return the
9710 simplified form of the call as a tree.
9712 The simplified form may be a constant or other expression which
9713 computes the same value, but in a more efficient manner (including
9714 calls to other builtin functions).
9716 The call may contain arguments which need to be evaluated, but
9717 which are not useful to determine the result of the call. In
9718 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9719 COMPOUND_EXPR will be an argument which must be evaluated.
9720 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9721 COMPOUND_EXPR in the chain will contain the tree for the simplified
9722 form of the builtin function call. */
9724 static tree
9725 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9727 if (!validate_arg (s1, POINTER_TYPE)
9728 || !validate_arg (s2, POINTER_TYPE))
9729 return NULL_TREE;
9730 else
9732 tree fn;
9733 const char *p1, *p2;
9735 p2 = c_getstr (s2);
9736 if (p2 == NULL)
9737 return NULL_TREE;
9739 p1 = c_getstr (s1);
9740 if (p1 != NULL)
9742 const char *r = strpbrk (p1, p2);
9743 tree tem;
9745 if (r == NULL)
9746 return build_int_cst (TREE_TYPE (s1), 0);
9748 /* Return an offset into the constant string argument. */
9749 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9750 return fold_convert_loc (loc, type, tem);
9753 if (p2[0] == '\0')
9754 /* strpbrk(x, "") == NULL.
9755 Evaluate and ignore s1 in case it had side-effects. */
9756 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
9758 if (p2[1] != '\0')
9759 return NULL_TREE; /* Really call strpbrk. */
9761 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9762 if (!fn)
9763 return NULL_TREE;
9765 /* New argument list transforming strpbrk(s1, s2) to
9766 strchr(s1, s2[0]). */
9767 return build_call_expr_loc (loc, fn, 2, s1,
9768 build_int_cst (integer_type_node, p2[0]));
9772 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9773 to the call.
9775 Return NULL_TREE if no simplification was possible, otherwise return the
9776 simplified form of the call as a tree.
9778 The simplified form may be a constant or other expression which
9779 computes the same value, but in a more efficient manner (including
9780 calls to other builtin functions).
9782 The call may contain arguments which need to be evaluated, but
9783 which are not useful to determine the result of the call. In
9784 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9785 COMPOUND_EXPR will be an argument which must be evaluated.
9786 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9787 COMPOUND_EXPR in the chain will contain the tree for the simplified
9788 form of the builtin function call. */
9790 static tree
9791 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9793 if (!validate_arg (s1, POINTER_TYPE)
9794 || !validate_arg (s2, POINTER_TYPE))
9795 return NULL_TREE;
9796 else
9798 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9800 /* If either argument is "", return NULL_TREE. */
9801 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9802 /* Evaluate and ignore both arguments in case either one has
9803 side-effects. */
9804 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9805 s1, s2);
9806 return NULL_TREE;
9810 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9811 to the call.
9813 Return NULL_TREE if no simplification was possible, otherwise return the
9814 simplified form of the call as a tree.
9816 The simplified form may be a constant or other expression which
9817 computes the same value, but in a more efficient manner (including
9818 calls to other builtin functions).
9820 The call may contain arguments which need to be evaluated, but
9821 which are not useful to determine the result of the call. In
9822 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9823 COMPOUND_EXPR will be an argument which must be evaluated.
9824 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9825 COMPOUND_EXPR in the chain will contain the tree for the simplified
9826 form of the builtin function call. */
9828 static tree
9829 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9831 if (!validate_arg (s1, POINTER_TYPE)
9832 || !validate_arg (s2, POINTER_TYPE))
9833 return NULL_TREE;
9834 else
9836 /* If the first argument is "", return NULL_TREE. */
9837 const char *p1 = c_getstr (s1);
9838 if (p1 && *p1 == '\0')
9840 /* Evaluate and ignore argument s2 in case it has
9841 side-effects. */
9842 return omit_one_operand_loc (loc, size_type_node,
9843 size_zero_node, s2);
9846 /* If the second argument is "", return __builtin_strlen(s1). */
9847 const char *p2 = c_getstr (s2);
9848 if (p2 && *p2 == '\0')
9850 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9852 /* If the replacement _DECL isn't initialized, don't do the
9853 transformation. */
9854 if (!fn)
9855 return NULL_TREE;
9857 return build_call_expr_loc (loc, fn, 1, s1);
9859 return NULL_TREE;
9863 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9864 produced. False otherwise. This is done so that we don't output the error
9865 or warning twice or three times. */
9867 bool
9868 fold_builtin_next_arg (tree exp, bool va_start_p)
9870 tree fntype = TREE_TYPE (current_function_decl);
9871 int nargs = call_expr_nargs (exp);
9872 tree arg;
9873 /* There is good chance the current input_location points inside the
9874 definition of the va_start macro (perhaps on the token for
9875 builtin) in a system header, so warnings will not be emitted.
9876 Use the location in real source code. */
9877 source_location current_location =
9878 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9879 NULL);
9881 if (!stdarg_p (fntype))
9883 error ("%<va_start%> used in function with fixed args");
9884 return true;
9887 if (va_start_p)
9889 if (va_start_p && (nargs != 2))
9891 error ("wrong number of arguments to function %<va_start%>");
9892 return true;
9894 arg = CALL_EXPR_ARG (exp, 1);
9896 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9897 when we checked the arguments and if needed issued a warning. */
9898 else
9900 if (nargs == 0)
9902 /* Evidently an out of date version of <stdarg.h>; can't validate
9903 va_start's second argument, but can still work as intended. */
9904 warning_at (current_location,
9905 OPT_Wvarargs,
9906 "%<__builtin_next_arg%> called without an argument");
9907 return true;
9909 else if (nargs > 1)
9911 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9912 return true;
9914 arg = CALL_EXPR_ARG (exp, 0);
9917 if (TREE_CODE (arg) == SSA_NAME)
9918 arg = SSA_NAME_VAR (arg);
9920 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9921 or __builtin_next_arg (0) the first time we see it, after checking
9922 the arguments and if needed issuing a warning. */
9923 if (!integer_zerop (arg))
9925 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9927 /* Strip off all nops for the sake of the comparison. This
9928 is not quite the same as STRIP_NOPS. It does more.
9929 We must also strip off INDIRECT_EXPR for C++ reference
9930 parameters. */
9931 while (CONVERT_EXPR_P (arg)
9932 || TREE_CODE (arg) == INDIRECT_REF)
9933 arg = TREE_OPERAND (arg, 0);
9934 if (arg != last_parm)
9936 /* FIXME: Sometimes with the tree optimizers we can get the
9937 not the last argument even though the user used the last
9938 argument. We just warn and set the arg to be the last
9939 argument so that we will get wrong-code because of
9940 it. */
9941 warning_at (current_location,
9942 OPT_Wvarargs,
9943 "second parameter of %<va_start%> not last named argument");
9946 /* Undefined by C99 7.15.1.4p4 (va_start):
9947 "If the parameter parmN is declared with the register storage
9948 class, with a function or array type, or with a type that is
9949 not compatible with the type that results after application of
9950 the default argument promotions, the behavior is undefined."
9952 else if (DECL_REGISTER (arg))
9954 warning_at (current_location,
9955 OPT_Wvarargs,
9956 "undefined behavior when second parameter of "
9957 "%<va_start%> is declared with %<register%> storage");
9960 /* We want to verify the second parameter just once before the tree
9961 optimizers are run and then avoid keeping it in the tree,
9962 as otherwise we could warn even for correct code like:
9963 void foo (int i, ...)
9964 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9965 if (va_start_p)
9966 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9967 else
9968 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9970 return false;
9974 /* Expand a call EXP to __builtin_object_size. */
9976 static rtx
9977 expand_builtin_object_size (tree exp)
9979 tree ost;
9980 int object_size_type;
9981 tree fndecl = get_callee_fndecl (exp);
9983 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9985 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
9986 exp, fndecl);
9987 expand_builtin_trap ();
9988 return const0_rtx;
9991 ost = CALL_EXPR_ARG (exp, 1);
9992 STRIP_NOPS (ost);
9994 if (TREE_CODE (ost) != INTEGER_CST
9995 || tree_int_cst_sgn (ost) < 0
9996 || compare_tree_int (ost, 3) > 0)
9998 error ("%Klast argument of %qD is not integer constant between 0 and 3",
9999 exp, fndecl);
10000 expand_builtin_trap ();
10001 return const0_rtx;
10004 object_size_type = tree_to_shwi (ost);
10006 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10009 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10010 FCODE is the BUILT_IN_* to use.
10011 Return NULL_RTX if we failed; the caller should emit a normal call,
10012 otherwise try to get the result in TARGET, if convenient (and in
10013 mode MODE if that's convenient). */
10015 static rtx
10016 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10017 enum built_in_function fcode)
10019 if (!validate_arglist (exp,
10020 POINTER_TYPE,
10021 fcode == BUILT_IN_MEMSET_CHK
10022 ? INTEGER_TYPE : POINTER_TYPE,
10023 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10024 return NULL_RTX;
10026 tree dest = CALL_EXPR_ARG (exp, 0);
10027 tree src = CALL_EXPR_ARG (exp, 1);
10028 tree len = CALL_EXPR_ARG (exp, 2);
10029 tree size = CALL_EXPR_ARG (exp, 3);
10031 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
10032 /*str=*/NULL_TREE, size);
10034 if (!tree_fits_uhwi_p (size))
10035 return NULL_RTX;
10037 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10039 /* Avoid transforming the checking call to an ordinary one when
10040 an overflow has been detected or when the call couldn't be
10041 validated because the size is not constant. */
10042 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10043 return NULL_RTX;
10045 tree fn = NULL_TREE;
10046 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10047 mem{cpy,pcpy,move,set} is available. */
10048 switch (fcode)
10050 case BUILT_IN_MEMCPY_CHK:
10051 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10052 break;
10053 case BUILT_IN_MEMPCPY_CHK:
10054 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10055 break;
10056 case BUILT_IN_MEMMOVE_CHK:
10057 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10058 break;
10059 case BUILT_IN_MEMSET_CHK:
10060 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10061 break;
10062 default:
10063 break;
10066 if (! fn)
10067 return NULL_RTX;
10069 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10070 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10071 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10072 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10074 else if (fcode == BUILT_IN_MEMSET_CHK)
10075 return NULL_RTX;
10076 else
10078 unsigned int dest_align = get_pointer_alignment (dest);
10080 /* If DEST is not a pointer type, call the normal function. */
10081 if (dest_align == 0)
10082 return NULL_RTX;
10084 /* If SRC and DEST are the same (and not volatile), do nothing. */
10085 if (operand_equal_p (src, dest, 0))
10087 tree expr;
10089 if (fcode != BUILT_IN_MEMPCPY_CHK)
10091 /* Evaluate and ignore LEN in case it has side-effects. */
10092 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10093 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10096 expr = fold_build_pointer_plus (dest, len);
10097 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10100 /* __memmove_chk special case. */
10101 if (fcode == BUILT_IN_MEMMOVE_CHK)
10103 unsigned int src_align = get_pointer_alignment (src);
10105 if (src_align == 0)
10106 return NULL_RTX;
10108 /* If src is categorized for a readonly section we can use
10109 normal __memcpy_chk. */
10110 if (readonly_data_expr (src))
10112 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10113 if (!fn)
10114 return NULL_RTX;
10115 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10116 dest, src, len, size);
10117 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10118 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10119 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10122 return NULL_RTX;
10126 /* Emit warning if a buffer overflow is detected at compile time. */
10128 static void
10129 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10131 /* The source string. */
10132 tree srcstr = NULL_TREE;
10133 /* The size of the destination object. */
10134 tree objsize = NULL_TREE;
10135 /* The string that is being concatenated with (as in __strcat_chk)
10136 or null if it isn't. */
10137 tree catstr = NULL_TREE;
10138 /* The maximum length of the source sequence in a bounded operation
10139 (such as __strncat_chk) or null if the operation isn't bounded
10140 (such as __strcat_chk). */
10141 tree maxread = NULL_TREE;
10142 /* The exact size of the access (such as in __strncpy_chk). */
10143 tree size = NULL_TREE;
10145 switch (fcode)
10147 case BUILT_IN_STRCPY_CHK:
10148 case BUILT_IN_STPCPY_CHK:
10149 srcstr = CALL_EXPR_ARG (exp, 1);
10150 objsize = CALL_EXPR_ARG (exp, 2);
10151 break;
10153 case BUILT_IN_STRCAT_CHK:
10154 /* For __strcat_chk the warning will be emitted only if overflowing
10155 by at least strlen (dest) + 1 bytes. */
10156 catstr = CALL_EXPR_ARG (exp, 0);
10157 srcstr = CALL_EXPR_ARG (exp, 1);
10158 objsize = CALL_EXPR_ARG (exp, 2);
10159 break;
10161 case BUILT_IN_STRNCAT_CHK:
10162 catstr = CALL_EXPR_ARG (exp, 0);
10163 srcstr = CALL_EXPR_ARG (exp, 1);
10164 maxread = CALL_EXPR_ARG (exp, 2);
10165 objsize = CALL_EXPR_ARG (exp, 3);
10166 break;
10168 case BUILT_IN_STRNCPY_CHK:
10169 case BUILT_IN_STPNCPY_CHK:
10170 srcstr = CALL_EXPR_ARG (exp, 1);
10171 size = CALL_EXPR_ARG (exp, 2);
10172 objsize = CALL_EXPR_ARG (exp, 3);
10173 break;
10175 case BUILT_IN_SNPRINTF_CHK:
10176 case BUILT_IN_VSNPRINTF_CHK:
10177 maxread = CALL_EXPR_ARG (exp, 1);
10178 objsize = CALL_EXPR_ARG (exp, 3);
10179 break;
10180 default:
10181 gcc_unreachable ();
10184 if (catstr && maxread)
10186 /* Check __strncat_chk. There is no way to determine the length
10187 of the string to which the source string is being appended so
10188 just warn when the length of the source string is not known. */
10189 check_strncat_sizes (exp, objsize);
10190 return;
10193 /* The destination argument is the first one for all built-ins above. */
10194 tree dst = CALL_EXPR_ARG (exp, 0);
10196 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
10199 /* Emit warning if a buffer overflow is detected at compile time
10200 in __sprintf_chk/__vsprintf_chk calls. */
10202 static void
10203 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10205 tree size, len, fmt;
10206 const char *fmt_str;
10207 int nargs = call_expr_nargs (exp);
10209 /* Verify the required arguments in the original call. */
10211 if (nargs < 4)
10212 return;
10213 size = CALL_EXPR_ARG (exp, 2);
10214 fmt = CALL_EXPR_ARG (exp, 3);
10216 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10217 return;
10219 /* Check whether the format is a literal string constant. */
10220 fmt_str = c_getstr (fmt);
10221 if (fmt_str == NULL)
10222 return;
10224 if (!init_target_chars ())
10225 return;
10227 /* If the format doesn't contain % args or %%, we know its size. */
10228 if (strchr (fmt_str, target_percent) == 0)
10229 len = build_int_cstu (size_type_node, strlen (fmt_str));
10230 /* If the format is "%s" and first ... argument is a string literal,
10231 we know it too. */
10232 else if (fcode == BUILT_IN_SPRINTF_CHK
10233 && strcmp (fmt_str, target_percent_s) == 0)
10235 tree arg;
10237 if (nargs < 5)
10238 return;
10239 arg = CALL_EXPR_ARG (exp, 4);
10240 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10241 return;
10243 len = c_strlen (arg, 1);
10244 if (!len || ! tree_fits_uhwi_p (len))
10245 return;
10247 else
10248 return;
10250 /* Add one for the terminating nul. */
10251 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10253 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
10254 /*maxread=*/NULL_TREE, len, size);
10257 /* Emit warning if a free is called with address of a variable. */
10259 static void
10260 maybe_emit_free_warning (tree exp)
10262 tree arg = CALL_EXPR_ARG (exp, 0);
10264 STRIP_NOPS (arg);
10265 if (TREE_CODE (arg) != ADDR_EXPR)
10266 return;
10268 arg = get_base_address (TREE_OPERAND (arg, 0));
10269 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
10270 return;
10272 if (SSA_VAR_P (arg))
10273 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10274 "%Kattempt to free a non-heap object %qD", exp, arg);
10275 else
10276 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10277 "%Kattempt to free a non-heap object", exp);
10280 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10281 if possible. */
10283 static tree
10284 fold_builtin_object_size (tree ptr, tree ost)
10286 unsigned HOST_WIDE_INT bytes;
10287 int object_size_type;
10289 if (!validate_arg (ptr, POINTER_TYPE)
10290 || !validate_arg (ost, INTEGER_TYPE))
10291 return NULL_TREE;
10293 STRIP_NOPS (ost);
10295 if (TREE_CODE (ost) != INTEGER_CST
10296 || tree_int_cst_sgn (ost) < 0
10297 || compare_tree_int (ost, 3) > 0)
10298 return NULL_TREE;
10300 object_size_type = tree_to_shwi (ost);
10302 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10303 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10304 and (size_t) 0 for types 2 and 3. */
10305 if (TREE_SIDE_EFFECTS (ptr))
10306 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10308 if (TREE_CODE (ptr) == ADDR_EXPR)
10310 compute_builtin_object_size (ptr, object_size_type, &bytes);
10311 if (wi::fits_to_tree_p (bytes, size_type_node))
10312 return build_int_cstu (size_type_node, bytes);
10314 else if (TREE_CODE (ptr) == SSA_NAME)
10316 /* If object size is not known yet, delay folding until
10317 later. Maybe subsequent passes will help determining
10318 it. */
10319 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10320 && wi::fits_to_tree_p (bytes, size_type_node))
10321 return build_int_cstu (size_type_node, bytes);
10324 return NULL_TREE;
10327 /* Builtins with folding operations that operate on "..." arguments
10328 need special handling; we need to store the arguments in a convenient
10329 data structure before attempting any folding. Fortunately there are
10330 only a few builtins that fall into this category. FNDECL is the
10331 function, EXP is the CALL_EXPR for the call. */
10333 static tree
10334 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10336 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10337 tree ret = NULL_TREE;
10339 switch (fcode)
10341 case BUILT_IN_FPCLASSIFY:
10342 ret = fold_builtin_fpclassify (loc, args, nargs);
10343 break;
10345 default:
10346 break;
10348 if (ret)
10350 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10351 SET_EXPR_LOCATION (ret, loc);
10352 TREE_NO_WARNING (ret) = 1;
10353 return ret;
10355 return NULL_TREE;
10358 /* Initialize format string characters in the target charset. */
10360 bool
10361 init_target_chars (void)
10363 static bool init;
10364 if (!init)
10366 target_newline = lang_hooks.to_target_charset ('\n');
10367 target_percent = lang_hooks.to_target_charset ('%');
10368 target_c = lang_hooks.to_target_charset ('c');
10369 target_s = lang_hooks.to_target_charset ('s');
10370 if (target_newline == 0 || target_percent == 0 || target_c == 0
10371 || target_s == 0)
10372 return false;
10374 target_percent_c[0] = target_percent;
10375 target_percent_c[1] = target_c;
10376 target_percent_c[2] = '\0';
10378 target_percent_s[0] = target_percent;
10379 target_percent_s[1] = target_s;
10380 target_percent_s[2] = '\0';
10382 target_percent_s_newline[0] = target_percent;
10383 target_percent_s_newline[1] = target_s;
10384 target_percent_s_newline[2] = target_newline;
10385 target_percent_s_newline[3] = '\0';
10387 init = true;
10389 return true;
10392 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10393 and no overflow/underflow occurred. INEXACT is true if M was not
10394 exactly calculated. TYPE is the tree type for the result. This
10395 function assumes that you cleared the MPFR flags and then
10396 calculated M to see if anything subsequently set a flag prior to
10397 entering this function. Return NULL_TREE if any checks fail. */
10399 static tree
10400 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10402 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10403 overflow/underflow occurred. If -frounding-math, proceed iff the
10404 result of calling FUNC was exact. */
10405 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10406 && (!flag_rounding_math || !inexact))
10408 REAL_VALUE_TYPE rr;
10410 real_from_mpfr (&rr, m, type, GMP_RNDN);
10411 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10412 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10413 but the mpft_t is not, then we underflowed in the
10414 conversion. */
10415 if (real_isfinite (&rr)
10416 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10418 REAL_VALUE_TYPE rmode;
10420 real_convert (&rmode, TYPE_MODE (type), &rr);
10421 /* Proceed iff the specified mode can hold the value. */
10422 if (real_identical (&rmode, &rr))
10423 return build_real (type, rmode);
10426 return NULL_TREE;
10429 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10430 number and no overflow/underflow occurred. INEXACT is true if M
10431 was not exactly calculated. TYPE is the tree type for the result.
10432 This function assumes that you cleared the MPFR flags and then
10433 calculated M to see if anything subsequently set a flag prior to
10434 entering this function. Return NULL_TREE if any checks fail, if
10435 FORCE_CONVERT is true, then bypass the checks. */
10437 static tree
10438 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10440 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10441 overflow/underflow occurred. If -frounding-math, proceed iff the
10442 result of calling FUNC was exact. */
10443 if (force_convert
10444 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10445 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10446 && (!flag_rounding_math || !inexact)))
10448 REAL_VALUE_TYPE re, im;
10450 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10451 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10452 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10453 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10454 but the mpft_t is not, then we underflowed in the
10455 conversion. */
10456 if (force_convert
10457 || (real_isfinite (&re) && real_isfinite (&im)
10458 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10459 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10461 REAL_VALUE_TYPE re_mode, im_mode;
10463 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10464 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10465 /* Proceed iff the specified mode can hold the value. */
10466 if (force_convert
10467 || (real_identical (&re_mode, &re)
10468 && real_identical (&im_mode, &im)))
10469 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10470 build_real (TREE_TYPE (type), im_mode));
10473 return NULL_TREE;
10476 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10477 the pointer *(ARG_QUO) and return the result. The type is taken
10478 from the type of ARG0 and is used for setting the precision of the
10479 calculation and results. */
10481 static tree
10482 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10484 tree const type = TREE_TYPE (arg0);
10485 tree result = NULL_TREE;
10487 STRIP_NOPS (arg0);
10488 STRIP_NOPS (arg1);
10490 /* To proceed, MPFR must exactly represent the target floating point
10491 format, which only happens when the target base equals two. */
10492 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10493 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10494 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10496 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10497 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10499 if (real_isfinite (ra0) && real_isfinite (ra1))
10501 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10502 const int prec = fmt->p;
10503 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10504 tree result_rem;
10505 long integer_quo;
10506 mpfr_t m0, m1;
10508 mpfr_inits2 (prec, m0, m1, NULL);
10509 mpfr_from_real (m0, ra0, GMP_RNDN);
10510 mpfr_from_real (m1, ra1, GMP_RNDN);
10511 mpfr_clear_flags ();
10512 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10513 /* Remquo is independent of the rounding mode, so pass
10514 inexact=0 to do_mpfr_ckconv(). */
10515 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10516 mpfr_clears (m0, m1, NULL);
10517 if (result_rem)
10519 /* MPFR calculates quo in the host's long so it may
10520 return more bits in quo than the target int can hold
10521 if sizeof(host long) > sizeof(target int). This can
10522 happen even for native compilers in LP64 mode. In
10523 these cases, modulo the quo value with the largest
10524 number that the target int can hold while leaving one
10525 bit for the sign. */
10526 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10527 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10529 /* Dereference the quo pointer argument. */
10530 arg_quo = build_fold_indirect_ref (arg_quo);
10531 /* Proceed iff a valid pointer type was passed in. */
10532 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10534 /* Set the value. */
10535 tree result_quo
10536 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10537 build_int_cst (TREE_TYPE (arg_quo),
10538 integer_quo));
10539 TREE_SIDE_EFFECTS (result_quo) = 1;
10540 /* Combine the quo assignment with the rem. */
10541 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10542 result_quo, result_rem));
10547 return result;
10550 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10551 resulting value as a tree with type TYPE. The mpfr precision is
10552 set to the precision of TYPE. We assume that this mpfr function
10553 returns zero if the result could be calculated exactly within the
10554 requested precision. In addition, the integer pointer represented
10555 by ARG_SG will be dereferenced and set to the appropriate signgam
10556 (-1,1) value. */
10558 static tree
10559 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10561 tree result = NULL_TREE;
10563 STRIP_NOPS (arg);
10565 /* To proceed, MPFR must exactly represent the target floating point
10566 format, which only happens when the target base equals two. Also
10567 verify ARG is a constant and that ARG_SG is an int pointer. */
10568 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10569 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10570 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10571 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10573 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10575 /* In addition to NaN and Inf, the argument cannot be zero or a
10576 negative integer. */
10577 if (real_isfinite (ra)
10578 && ra->cl != rvc_zero
10579 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10581 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10582 const int prec = fmt->p;
10583 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10584 int inexact, sg;
10585 mpfr_t m;
10586 tree result_lg;
10588 mpfr_init2 (m, prec);
10589 mpfr_from_real (m, ra, GMP_RNDN);
10590 mpfr_clear_flags ();
10591 inexact = mpfr_lgamma (m, &sg, m, rnd);
10592 result_lg = do_mpfr_ckconv (m, type, inexact);
10593 mpfr_clear (m);
10594 if (result_lg)
10596 tree result_sg;
10598 /* Dereference the arg_sg pointer argument. */
10599 arg_sg = build_fold_indirect_ref (arg_sg);
10600 /* Assign the signgam value into *arg_sg. */
10601 result_sg = fold_build2 (MODIFY_EXPR,
10602 TREE_TYPE (arg_sg), arg_sg,
10603 build_int_cst (TREE_TYPE (arg_sg), sg));
10604 TREE_SIDE_EFFECTS (result_sg) = 1;
10605 /* Combine the signgam assignment with the lgamma result. */
10606 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10607 result_sg, result_lg));
10612 return result;
10615 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10616 mpc function FUNC on it and return the resulting value as a tree
10617 with type TYPE. The mpfr precision is set to the precision of
10618 TYPE. We assume that function FUNC returns zero if the result
10619 could be calculated exactly within the requested precision. If
10620 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10621 in the arguments and/or results. */
10623 tree
10624 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10625 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10627 tree result = NULL_TREE;
10629 STRIP_NOPS (arg0);
10630 STRIP_NOPS (arg1);
10632 /* To proceed, MPFR must exactly represent the target floating point
10633 format, which only happens when the target base equals two. */
10634 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10635 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10636 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10637 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10638 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10640 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10641 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10642 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10643 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10645 if (do_nonfinite
10646 || (real_isfinite (re0) && real_isfinite (im0)
10647 && real_isfinite (re1) && real_isfinite (im1)))
10649 const struct real_format *const fmt =
10650 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10651 const int prec = fmt->p;
10652 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10653 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10654 int inexact;
10655 mpc_t m0, m1;
10657 mpc_init2 (m0, prec);
10658 mpc_init2 (m1, prec);
10659 mpfr_from_real (mpc_realref (m0), re0, rnd);
10660 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10661 mpfr_from_real (mpc_realref (m1), re1, rnd);
10662 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10663 mpfr_clear_flags ();
10664 inexact = func (m0, m0, m1, crnd);
10665 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10666 mpc_clear (m0);
10667 mpc_clear (m1);
10671 return result;
10674 /* A wrapper function for builtin folding that prevents warnings for
10675 "statement without effect" and the like, caused by removing the
10676 call node earlier than the warning is generated. */
10678 tree
10679 fold_call_stmt (gcall *stmt, bool ignore)
10681 tree ret = NULL_TREE;
10682 tree fndecl = gimple_call_fndecl (stmt);
10683 location_t loc = gimple_location (stmt);
10684 if (fndecl
10685 && TREE_CODE (fndecl) == FUNCTION_DECL
10686 && DECL_BUILT_IN (fndecl)
10687 && !gimple_call_va_arg_pack_p (stmt))
10689 int nargs = gimple_call_num_args (stmt);
10690 tree *args = (nargs > 0
10691 ? gimple_call_arg_ptr (stmt, 0)
10692 : &error_mark_node);
10694 if (avoid_folding_inline_builtin (fndecl))
10695 return NULL_TREE;
10696 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10698 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10700 else
10702 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10703 if (ret)
10705 /* Propagate location information from original call to
10706 expansion of builtin. Otherwise things like
10707 maybe_emit_chk_warning, that operate on the expansion
10708 of a builtin, will use the wrong location information. */
10709 if (gimple_has_location (stmt))
10711 tree realret = ret;
10712 if (TREE_CODE (ret) == NOP_EXPR)
10713 realret = TREE_OPERAND (ret, 0);
10714 if (CAN_HAVE_LOCATION_P (realret)
10715 && !EXPR_HAS_LOCATION (realret))
10716 SET_EXPR_LOCATION (realret, loc);
10717 return realret;
10719 return ret;
10723 return NULL_TREE;
10726 /* Look up the function in builtin_decl that corresponds to DECL
10727 and set ASMSPEC as its user assembler name. DECL must be a
10728 function decl that declares a builtin. */
10730 void
10731 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10733 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10734 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10735 && asmspec != 0);
10737 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10738 set_user_assembler_name (builtin, asmspec);
10740 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10741 && INT_TYPE_SIZE < BITS_PER_WORD)
10743 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
10744 set_user_assembler_libfunc ("ffs", asmspec);
10745 set_optab_libfunc (ffs_optab, mode, "ffs");
10749 /* Return true if DECL is a builtin that expands to a constant or similarly
10750 simple code. */
10751 bool
10752 is_simple_builtin (tree decl)
10754 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10755 switch (DECL_FUNCTION_CODE (decl))
10757 /* Builtins that expand to constants. */
10758 case BUILT_IN_CONSTANT_P:
10759 case BUILT_IN_EXPECT:
10760 case BUILT_IN_OBJECT_SIZE:
10761 case BUILT_IN_UNREACHABLE:
10762 /* Simple register moves or loads from stack. */
10763 case BUILT_IN_ASSUME_ALIGNED:
10764 case BUILT_IN_RETURN_ADDRESS:
10765 case BUILT_IN_EXTRACT_RETURN_ADDR:
10766 case BUILT_IN_FROB_RETURN_ADDR:
10767 case BUILT_IN_RETURN:
10768 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10769 case BUILT_IN_FRAME_ADDRESS:
10770 case BUILT_IN_VA_END:
10771 case BUILT_IN_STACK_SAVE:
10772 case BUILT_IN_STACK_RESTORE:
10773 /* Exception state returns or moves registers around. */
10774 case BUILT_IN_EH_FILTER:
10775 case BUILT_IN_EH_POINTER:
10776 case BUILT_IN_EH_COPY_VALUES:
10777 return true;
10779 default:
10780 return false;
10783 return false;
10786 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10787 most probably expanded inline into reasonably simple code. This is a
10788 superset of is_simple_builtin. */
10789 bool
10790 is_inexpensive_builtin (tree decl)
10792 if (!decl)
10793 return false;
10794 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10795 return true;
10796 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10797 switch (DECL_FUNCTION_CODE (decl))
10799 case BUILT_IN_ABS:
10800 CASE_BUILT_IN_ALLOCA:
10801 case BUILT_IN_BSWAP16:
10802 case BUILT_IN_BSWAP32:
10803 case BUILT_IN_BSWAP64:
10804 case BUILT_IN_CLZ:
10805 case BUILT_IN_CLZIMAX:
10806 case BUILT_IN_CLZL:
10807 case BUILT_IN_CLZLL:
10808 case BUILT_IN_CTZ:
10809 case BUILT_IN_CTZIMAX:
10810 case BUILT_IN_CTZL:
10811 case BUILT_IN_CTZLL:
10812 case BUILT_IN_FFS:
10813 case BUILT_IN_FFSIMAX:
10814 case BUILT_IN_FFSL:
10815 case BUILT_IN_FFSLL:
10816 case BUILT_IN_IMAXABS:
10817 case BUILT_IN_FINITE:
10818 case BUILT_IN_FINITEF:
10819 case BUILT_IN_FINITEL:
10820 case BUILT_IN_FINITED32:
10821 case BUILT_IN_FINITED64:
10822 case BUILT_IN_FINITED128:
10823 case BUILT_IN_FPCLASSIFY:
10824 case BUILT_IN_ISFINITE:
10825 case BUILT_IN_ISINF_SIGN:
10826 case BUILT_IN_ISINF:
10827 case BUILT_IN_ISINFF:
10828 case BUILT_IN_ISINFL:
10829 case BUILT_IN_ISINFD32:
10830 case BUILT_IN_ISINFD64:
10831 case BUILT_IN_ISINFD128:
10832 case BUILT_IN_ISNAN:
10833 case BUILT_IN_ISNANF:
10834 case BUILT_IN_ISNANL:
10835 case BUILT_IN_ISNAND32:
10836 case BUILT_IN_ISNAND64:
10837 case BUILT_IN_ISNAND128:
10838 case BUILT_IN_ISNORMAL:
10839 case BUILT_IN_ISGREATER:
10840 case BUILT_IN_ISGREATEREQUAL:
10841 case BUILT_IN_ISLESS:
10842 case BUILT_IN_ISLESSEQUAL:
10843 case BUILT_IN_ISLESSGREATER:
10844 case BUILT_IN_ISUNORDERED:
10845 case BUILT_IN_VA_ARG_PACK:
10846 case BUILT_IN_VA_ARG_PACK_LEN:
10847 case BUILT_IN_VA_COPY:
10848 case BUILT_IN_TRAP:
10849 case BUILT_IN_SAVEREGS:
10850 case BUILT_IN_POPCOUNTL:
10851 case BUILT_IN_POPCOUNTLL:
10852 case BUILT_IN_POPCOUNTIMAX:
10853 case BUILT_IN_POPCOUNT:
10854 case BUILT_IN_PARITYL:
10855 case BUILT_IN_PARITYLL:
10856 case BUILT_IN_PARITYIMAX:
10857 case BUILT_IN_PARITY:
10858 case BUILT_IN_LABS:
10859 case BUILT_IN_LLABS:
10860 case BUILT_IN_PREFETCH:
10861 case BUILT_IN_ACC_ON_DEVICE:
10862 return true;
10864 default:
10865 return is_simple_builtin (decl);
10868 return false;
10871 /* Return true if T is a constant and the value cast to a target char
10872 can be represented by a host char.
10873 Store the casted char constant in *P if so. */
10875 bool
10876 target_char_cst_p (tree t, char *p)
10878 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
10879 return false;
10881 *p = (char)tree_to_uhwi (t);
10882 return true;
10885 /* Return the maximum object size. */
10887 tree
10888 max_object_size (void)
10890 /* To do: Make this a configurable parameter. */
10891 return TYPE_MAX_VALUE (ptrdiff_type_node);